focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@ScalarOperator(INDETERMINATE) @SqlType(StandardTypes.BOOLEAN) public static boolean indeterminate(@SqlType(StandardTypes.IPPREFIX) Slice value, @IsNull boolean isNull) { return isNull; }
@Test public void testIndeterminate() { assertOperator(INDETERMINATE, "CAST(null AS IPPREFIX)", BOOLEAN, true); assertOperator(INDETERMINATE, "IPPREFIX '::2222/128'", BOOLEAN, false); }
@Override public boolean retry(final BackgroundException failure, final ProgressListener progress, final BackgroundActionState cancel) { if(super.retry(failure, progress, cancel)) { counter.recv(-counter.getRecv()); counter.sent(-counter.getSent()); return true; } return false; }
@Test public void testRetry() throws Exception { final AtomicInteger count = new AtomicInteger(); final SegmentRetryCallable<Void> c = new SegmentRetryCallable<Void>(new Host(new TestProtocol(Scheme.file)), 2, 0, new BackgroundExceptionCallable<Void>() { @Override public Void call() throws BackgroundException { throw new ConnectionRefusedException("d", new SocketException()); } }, new TransferStatus(), new BytecountStreamListener()) { @Override public boolean retry(final BackgroundException failure, final ProgressListener progress, final BackgroundActionState cancel) { count.incrementAndGet(); return super.retry(failure, progress, cancel); } }; try { c.call(); fail(); } catch(ConnectionRefusedException e) { // Expected } assertEquals(3, count.get()); }
public static FullyQualifiedKotlinType convert(FullyQualifiedJavaType javaType) { FullyQualifiedKotlinType kotlinType = convertBaseType(javaType); for (FullyQualifiedJavaType argument : javaType.getTypeArguments()) { kotlinType.addTypeArgument(convert(argument)); } return kotlinType; }
@Test void testPrimitiveByteArray() { FullyQualifiedJavaType jt = new FullyQualifiedJavaType("byte[]"); FullyQualifiedKotlinType kt = JavaToKotlinTypeConverter.convert(jt); assertThat(kt.getShortNameWithTypeArguments()).isEqualTo("ByteArray"); assertThat(kt.getImportList()).isEmpty(); }
@Override public Long next(Object ignored) { if (!limitedDomain) { return next0(); } return domain .stream() .skip(random.nextInt(domain.size())) .findFirst() .orElseThrow(() -> new RuntimeException("Should never happen")); }
@Test public void testSequence() { SequenceGenerator generator = new SequenceGenerator.Builder() .setMin(1L) .setMax(100L) .build(); for (int i = 0; i < 10; i++) { assertEquals(i + 1, generator.next(null).intValue()); } }
@Override @CacheEvict(cacheNames = RedisKeyConstants.MAIL_TEMPLATE, allEntries = true) // allEntries 清空所有缓存,因为 id 不是直接的缓存 code,不好清理 public void deleteMailTemplate(Long id) { // 校验是否存在 validateMailTemplateExists(id); // 删除 mailTemplateMapper.deleteById(id); }
@Test public void testDeleteMailTemplate_notExists() { // 准备参数 Long id = randomLongId(); // 调用, 并断言异常 assertServiceException(() -> mailTemplateService.deleteMailTemplate(id), MAIL_TEMPLATE_NOT_EXISTS); }
@ApiOperation(value = "Delete a job", tags = { "Jobs" }, code = 204) @ApiResponses(value = { @ApiResponse(code = 204, message = "Indicates the job was found and has been deleted. Response-body is intentionally empty."), @ApiResponse(code = 404, message = "Indicates the requested job was not found.") }) @DeleteMapping("/management/jobs/{jobId}") @ResponseStatus(HttpStatus.NO_CONTENT) public void deleteJob(@ApiParam(name = "jobId") @PathVariable String jobId) { Job job = getJobById(jobId); if (restApiInterceptor != null) { restApiInterceptor.deleteJob(job); } try { managementService.deleteJob(jobId); } catch (FlowableObjectNotFoundException aonfe) { // Re-throw to have consistent error-messaging across REST-api throw new FlowableObjectNotFoundException("Could not find a job with id '" + jobId + "'.", Job.class); } }
@Test @Deployment(resources = { "org/flowable/rest/service/api/management/JobResourceTest.testTimerProcess.bpmn20.xml" }) public void testDeleteJob() throws Exception { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("timerProcess"); Job timerJob = managementService.createTimerJobQuery().processInstanceId(processInstance.getId()).singleResult(); assertThat(timerJob).isNotNull(); HttpDelete httpDelete = new HttpDelete(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_TIMER_JOB, timerJob.getId())); CloseableHttpResponse response = executeRequest(httpDelete, HttpStatus.SC_NO_CONTENT); closeResponse(response); // Job should be deleted assertThat(managementService.createJobQuery().processInstanceId(processInstance.getId()).singleResult()).isNull(); }
@Deprecated public ChannelFuture close() { return closeOutbound(); }
@Test public void testReleaseSslEngine() throws Exception { OpenSsl.ensureAvailability(); SelfSignedCertificate cert = new SelfSignedCertificate(); try { SslContext sslContext = SslContextBuilder.forServer(cert.certificate(), cert.privateKey()) .sslProvider(SslProvider.OPENSSL) .build(); try { assertEquals(1, ((ReferenceCounted) sslContext).refCnt()); SSLEngine sslEngine = sslContext.newEngine(ByteBufAllocator.DEFAULT); EmbeddedChannel ch = new EmbeddedChannel(new SslHandler(sslEngine)); assertEquals(2, ((ReferenceCounted) sslContext).refCnt()); assertEquals(1, ((ReferenceCounted) sslEngine).refCnt()); assertTrue(ch.finishAndReleaseAll()); ch.close().syncUninterruptibly(); assertEquals(1, ((ReferenceCounted) sslContext).refCnt()); assertEquals(0, ((ReferenceCounted) sslEngine).refCnt()); } finally { ReferenceCountUtil.release(sslContext); } } finally { cert.delete(); } }
public DynamicConfigSubscribe(String serviceName, DynamicConfigListener listener, String key) { this.serviceName = serviceName; this.listener = listener; this.key = key; try { this.nacosDynamicConfigService = ServiceManager.getService(NacosDynamicConfigService.class); } catch (IllegalArgumentException e) { LOGGER.log(Level.SEVERE, "nacosDynamicConfigService is not enabled!"); } }
@Test public void testDynamicConfigSubscribe() throws NoSuchFieldException, IllegalAccessException, InterruptedException { try { // test subscription TestListener testListener = new TestListener(); dynamicConfigSubscribe = new DynamicConfigSubscribe("testServiceName", testListener, "testSubscribeKey"); Assert.assertTrue(dynamicConfigSubscribe.subscribe()); Thread.sleep(SLEEP_TIME_MILLIS); Assert.assertTrue( nacosDynamicConfigService.doPublishConfig("testSubscribeKey", "app:testApplication_environment:testEnvironment", "content:1")); checkChangeTrue(testListener, "content:1"); Assert.assertTrue( nacosDynamicConfigService.doPublishConfig("testSubscribeKey", "app:testApplication_environment:testEnvironment_service:testServiceName", "content:2")); checkChangeTrue(testListener, "content:2"); Assert.assertTrue( nacosDynamicConfigService.doPublishConfig("testSubscribeKey", "testCustomLabel:testCustomLabelValue", "content:3")); checkChangeTrue(testListener, "content:3"); Optional<Object> listeners = ReflectUtils.getFieldValueByClazz(nacosDynamicConfigService.getClass(), nacosDynamicConfigService, "listeners"); Assert.assertTrue(listeners.orElse(Collections.emptyList()) instanceof List); Assert.assertEquals(3, ((List<NacosListener>) listeners.orElse(Collections.emptyList())).size()); // test delete subscription Assert.assertTrue(dynamicConfigSubscribe.unSubscribe()); Assert.assertEquals(0, ((List<NacosListener>) listeners.orElse(Collections.emptyList())).size()); Assert.assertTrue( nacosDynamicConfigService.doPublishConfig("testSubscribeKey", "app:testApplication_environment:testEnvironment", "content:11")); checkChangeFalse(testListener); Assert.assertTrue( nacosDynamicConfigService.doPublishConfig("testSubscribeKey", "app:testApplication_environment:testEnvironment_service:testServiceName", "content:22")); checkChangeFalse(testListener); Assert.assertTrue( nacosDynamicConfigService.doPublishConfig("testSubscribeKey", "testCustomLabel:testCustomLabelValue", "content:33")); checkChangeFalse(testListener); } finally { nacosDynamicConfigService.doRemoveConfig("testSubscribeKey", "app:testApplication_environment:testEnvironment"); nacosDynamicConfigService.doRemoveConfig("testSubscribeKey", "app:testApplication_environment:testEnvironment_service:testServiceName"); nacosDynamicConfigService.doRemoveConfig("testSubscribeKey", "testCustomLabel:testCustomLabelValue"); } }
public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetXMLDataMeta) smi; data = (GetXMLDataData) sdi; if ( super.init( smi, sdi ) ) { data.rownr = 1L; data.nrInputFields = meta.getInputFields().length; // correct attribute path if needed // do it once for ( int i = 0; i < data.nrInputFields; i++ ) { GetXMLDataField xmlDataField = meta.getInputFields()[i]; // Resolve variable substitution String XPathValue = environmentSubstitute( xmlDataField.getXPath() ); if ( xmlDataField.getElementType() == GetXMLDataField.ELEMENT_TYPE_ATTRIBUT ) { // We have an attribute // do we need to add leading @? // Only put @ to the last element in path, not in front at all int last = XPathValue.lastIndexOf( GetXMLDataMeta.N0DE_SEPARATOR ); if ( last > -1 ) { last++; String attribut = XPathValue.substring( last, XPathValue.length() ); if ( !attribut.startsWith( GetXMLDataMeta.AT ) ) { XPathValue = XPathValue.substring( 0, last ) + GetXMLDataMeta.AT + attribut; } } else { if ( !XPathValue.startsWith( GetXMLDataMeta.AT ) ) { XPathValue = GetXMLDataMeta.AT + XPathValue; } } } xmlDataField.setResolvedXPath( XPathValue ); } data.PathValue = environmentSubstitute( meta.getLoopXPath() ); if ( Utils.isEmpty( data.PathValue ) ) { logError( BaseMessages.getString( PKG, "GetXMLData.Error.EmptyPath" ) ); return false; } if ( !data.PathValue.substring( 0, 1 ).equals( GetXMLDataMeta.N0DE_SEPARATOR ) ) { data.PathValue = GetXMLDataMeta.N0DE_SEPARATOR + data.PathValue; } if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "GetXMLData.Log.LoopXPath", data.PathValue ) ); } data.prunePath = environmentSubstitute( meta.getPrunePath() ); if ( data.prunePath != null ) { if ( Utils.isEmpty( data.prunePath.trim() ) ) { data.prunePath = null; } else { // ensure a leading slash if ( !data.prunePath.startsWith( GetXMLDataMeta.N0DE_SEPARATOR ) ) { data.prunePath = GetXMLDataMeta.N0DE_SEPARATOR + data.prunePath; } // check if other conditions apply that do not allow pruning if ( meta.isInFields() ) { data.prunePath = null; // not possible by design, could be changed later on } } } return true; } return false; }
@Test public void testInit() throws Exception { KettleEnvironment.init(); // // Create a new transformation... // TransMeta transMeta = new TransMeta(); transMeta.setName( "getxmldata1" ); PluginRegistry registry = PluginRegistry.getInstance(); // // create an injector step... // String injectorStepname = "injector step"; InjectorMeta im = new InjectorMeta(); // Set the information of the injector. String injectorPid = registry.getPluginId( StepPluginType.class, im ); StepMeta injectorStep = new StepMeta( injectorPid, injectorStepname, im ); transMeta.addStep( injectorStep ); // // Create a Get XML Data step // String getXMLDataName = "get xml data step"; GetXMLDataMeta gxdm = new GetXMLDataMeta(); String getXMLDataPid = registry.getPluginId( StepPluginType.class, gxdm ); StepMeta getXMLDataStep = new StepMeta( getXMLDataPid, getXMLDataName, gxdm ); transMeta.addStep( getXMLDataStep ); GetXMLDataField[] fields = new GetXMLDataField[5]; for ( int idx = 0; idx < fields.length; idx++ ) { fields[idx] = new GetXMLDataField(); } fields[0].setName( "objectid" ); fields[0].setXPath( "${xml_path}" ); fields[0].setElementType( GetXMLDataField.ELEMENT_TYPE_NODE ); fields[0].setType( ValueMetaInterface.TYPE_STRING ); fields[0].setFormat( "" ); fields[0].setLength( -1 ); fields[0].setPrecision( -1 ); fields[0].setCurrencySymbol( "" ); fields[0].setDecimalSymbol( "" ); fields[0].setGroupSymbol( "" ); fields[0].setTrimType( GetXMLDataField.TYPE_TRIM_NONE ); gxdm.setEncoding( "UTF-8" ); gxdm.setIsAFile( false ); gxdm.setInFields( true ); gxdm.setLoopXPath( "Level1/Level2/Props" ); gxdm.setXMLField( "field1" ); gxdm.setInputFields( fields ); TransHopMeta hi = new TransHopMeta( injectorStep, getXMLDataStep ); transMeta.addTransHop( hi ); // // Create a dummy step 1 // String dummyStepname1 = "dummy step 1"; DummyTransMeta dm1 = new DummyTransMeta(); String dummyPid1 = registry.getPluginId( StepPluginType.class, dm1 ); StepMeta dummyStep1 = new StepMeta( dummyPid1, dummyStepname1, dm1 ); transMeta.addStep( dummyStep1 ); TransHopMeta hi1 = new TransHopMeta( getXMLDataStep, dummyStep1 ); transMeta.addTransHop( hi1 ); // Now execute the transformation... Trans trans = new Trans( transMeta ); trans.prepareExecution( null ); StepInterface si = trans.getStepInterface( dummyStepname1, 0 ); RowStepCollector dummyRc1 = new RowStepCollector(); si.addRowListener( dummyRc1 ); RowProducer rp = trans.addRowProducer( injectorStepname, 0 ); trans.startThreads(); // add rows List<RowMetaAndData> inputList = createData(); Iterator<RowMetaAndData> it = inputList.iterator(); while ( it.hasNext() ) { RowMetaAndData rm = it.next(); rp.putRow( rm.getRowMeta(), rm.getData() ); } rp.finished(); trans.waitUntilFinished(); GetXMLDataData getXMLDataData = new GetXMLDataData(); GetXMLData getXmlData = new GetXMLData( dummyStep1, getXMLDataData, 0, transMeta, trans ); getXmlData.setVariable( "xml_path", "data/owner" ); getXmlData.init( gxdm, getXMLDataData ); assertEquals( "${xml_path}", gxdm.getInputFields()[0].getXPath() ); assertEquals( "data/owner", gxdm.getInputFields()[0].getResolvedXPath() ); }
public MonitorBuilder appendParameters(Map<String, String> appendParameters) { this.parameters = appendParameters(parameters, appendParameters); return getThis(); }
@Test void appendParameters() { Map<String, String> source = new HashMap<>(); source.put("default.num", "one"); source.put("num", "ONE"); MonitorBuilder builder = MonitorBuilder.newBuilder(); builder.appendParameters(source); Map<String, String> parameters = builder.build().getParameters(); Assertions.assertTrue(parameters.containsKey("default.num")); Assertions.assertEquals("ONE", parameters.get("num")); }
public static String snakeToSplitName(String snakeName, String split) { String lowerCase = snakeName.toLowerCase(); if (isSnakeCase(snakeName)) { return replace(lowerCase, "_", split); } return snakeName; }
@Test void testSnakeCaseToSplitName() throws Exception { assertEquals("ab-cd-ef", StringUtils.snakeToSplitName("ab_Cd_Ef", "-")); assertEquals("ab-cd-ef", StringUtils.snakeToSplitName("Ab_Cd_Ef", "-")); assertEquals("ab-cd-ef", StringUtils.snakeToSplitName("ab_cd_ef", "-")); assertEquals("ab-cd-ef", StringUtils.snakeToSplitName("AB_CD_EF", "-")); assertEquals("abcdef", StringUtils.snakeToSplitName("abcdef", "-")); assertEquals("qosEnable", StringUtils.snakeToSplitName("qosEnable", "-")); assertEquals("name", StringUtils.snakeToSplitName("NAME", "-")); }
@Override public void onMsg(TbContext ctx, TbMsg msg) { ListenableFuture<Boolean> deleteResultFuture = config.isDeleteForSingleEntity() ? Futures.transformAsync(getTargetEntityId(ctx, msg), targetEntityId -> deleteRelationToSpecificEntity(ctx, msg, targetEntityId), MoreExecutors.directExecutor()) : deleteRelationsByTypeAndDirection(ctx, msg, ctx.getDbCallbackExecutor()); withCallback(deleteResultFuture, deleted -> { if (deleted) { ctx.tellSuccess(msg); return; } ctx.tellFailure(msg, new RuntimeException("Failed to delete relation(s) with originator!")); }, t -> ctx.tellFailure(msg, t), MoreExecutors.directExecutor()); }
@Test void givenSupportedEntityType_whenOnMsgAndDeleteForSingleEntityIsFalse_thenVerifyRelationFailedToDeleteAndOutMsgFailure() throws TbNodeException { // GIVEN config.setEntityType(EntityType.DEVICE); config.setEntityNamePattern("${name}"); config.setEntityTypePattern("${type}"); var nodeConfiguration = new TbNodeConfiguration(JacksonUtil.valueToTree(config)); node.init(ctxMock, nodeConfiguration); when(ctxMock.getTenantId()).thenReturn(tenantId); when(ctxMock.getRelationService()).thenReturn(relationServiceMock); when(ctxMock.getDbCallbackExecutor()).thenReturn(dbExecutor); var relationToDelete = new EntityRelation(); when(relationServiceMock.findByFromAndTypeAsync(any(), any(), any(), any())).thenReturn(Futures.immediateFuture(List.of(relationToDelete))); when(relationServiceMock.deleteRelationAsync(any(), any())).thenReturn(Futures.immediateFuture(false)); var md = getMetadataWithNameTemplate(); var msg = getTbMsg(originatorId, md); // WHEN node.onMsg(ctxMock, msg); verify(relationServiceMock).findByFromAndTypeAsync(eq(tenantId), eq(originatorId), eq(EntityRelation.CONTAINS_TYPE), eq(RelationTypeGroup.COMMON)); verify(relationServiceMock).deleteRelationAsync(eq(tenantId), eq(relationToDelete)); var throwableCaptor = ArgumentCaptor.forClass(Throwable.class); verify(ctxMock).tellFailure(eq(msg), throwableCaptor.capture()); verify(ctxMock, never()).tellNext(any(), anyString()); verify(ctxMock, never()).tellNext(any(), anySet()); verify(ctxMock, never()).tellSuccess(any()); verifyNoMoreInteractions(ctxMock, relationServiceMock); assertThat(throwableCaptor.getValue()).isInstanceOf(RuntimeException.class).hasMessage("Failed to delete relation(s) with originator!"); }
public Object eval(String expr, Map<String, Object> params) { try { Extension ext = extensionRepo == null ? null : extensionRepo.get(); SelType result = evaluator.evaluate(sanitize(expr), params, ext); switch (result.type()) { case STRING: case LONG: case DOUBLE: case BOOLEAN: return result.getInternalVal(); case STRING_ARRAY: case LONG_ARRAY: case DOUBLE_ARRAY: case BOOLEAN_ARRAY: case MAP: return result.unbox(); case ERROR: throw new MaestroInvalidExpressionException( "Expression throws an error [%s] for expr=[%s]", result, expr); default: throw new MaestroInvalidExpressionException( "Invalid return type [%s] for expr=[%s]", result.type(), expr); } } catch (MaestroRuntimeException me) { throw me; } catch (ExecutionException ee) { throw new MaestroInvalidExpressionException( ee, "Expression evaluation throws an exception for expr=[%s]", expr); } catch (Exception e) { throw new MaestroInternalError( e, "Expression evaluation is failed with an exception for expr=[%s]", expr); } }
@Test public void testIncludedMethods() { assertArrayEquals( new long[] {20210101, 20210108, 20210115}, (long[]) evaluator.eval( "return Util.dateIntsBetween(startDateTime, startDateTime + 15, 7);", Collections.singletonMap("startDateTime", 20210101))); }
@Override public boolean isInnerWordLetter(int keyValue) { return super.isInnerWordLetter(keyValue) || mAdditionalIsLetterExceptions.contains(keyValue); }
@Test public void testInnerCharacters() { ExternalAnyKeyboard keyboard = new ExternalAnyKeyboard( mDefaultAddOn, mContext, R.xml.keyboard_with_codes_as_letters, R.xml.keyboard_with_codes_as_letters, "test", R.drawable.sym_keyboard_notification_icon, 0, "en", "*&\uD83D\uDC71\u200D♂!️", "", Keyboard.KEYBOARD_ROW_MODE_NORMAL); keyboard.loadKeyboard(SIMPLE_KeyboardDimens); // sanity: known characters Assert.assertTrue(keyboard.isInnerWordLetter('a')); Assert.assertTrue(keyboard.isInnerWordLetter('b')); // known, generic, inner letters Assert.assertTrue(keyboard.isInnerWordLetter('\'')); Assert.assertTrue(keyboard.isInnerWordLetter(Dictionary.CURLY_QUOTE)); // additional Assert.assertTrue(keyboard.isInnerWordLetter('*')); Assert.assertTrue(keyboard.isInnerWordLetter('&')); Assert.assertTrue(keyboard.isInnerWordLetter(Character.codePointAt("\uD83D\uDC71\u200D♂️", 0))); Assert.assertTrue(keyboard.isInnerWordLetter('!')); // COMBINING_SPACING_MARK Assert.assertTrue(keyboard.isInnerWordLetter('ಂ')); // NON_SPACING_MARK Assert.assertTrue(keyboard.isInnerWordLetter('\u032A')); // whitespaces are not Assert.assertFalse(keyboard.isInnerWordLetter(' ')); Assert.assertFalse(keyboard.isInnerWordLetter('\n')); Assert.assertFalse(keyboard.isInnerWordLetter('\t')); // digits are not Assert.assertFalse(keyboard.isInnerWordLetter('0')); Assert.assertFalse(keyboard.isInnerWordLetter('1')); // punctuation are not Assert.assertFalse(keyboard.isInnerWordLetter('?')); Assert.assertFalse(keyboard.isInnerWordLetter('(')); Assert.assertFalse(keyboard.isInnerWordLetter('.')); Assert.assertFalse(keyboard.isInnerWordLetter(',')); Assert.assertFalse(keyboard.isInnerWordLetter(':')); Assert.assertFalse(keyboard.isInnerWordLetter('-')); }
@Override public IcebergEnumeratorState snapshotState(long checkpointId) { return new IcebergEnumeratorState( enumeratorPosition.get(), assigner.state(), enumerationHistory.snapshot()); }
@Test public void testDiscoverSplitWhenNoReaderRegistered() throws Exception { TestingSplitEnumeratorContext<IcebergSourceSplit> enumeratorContext = new TestingSplitEnumeratorContext<>(4); ScanContext scanContext = ScanContext.builder() .streaming(true) .startingStrategy(StreamingStartingStrategy.TABLE_SCAN_THEN_INCREMENTAL) .build(); ManualContinuousSplitPlanner splitPlanner = new ManualContinuousSplitPlanner(scanContext, 0); ContinuousIcebergEnumerator enumerator = createEnumerator(enumeratorContext, scanContext, splitPlanner); Collection<IcebergSourceSplitState> pendingSplitsEmpty = enumerator.snapshotState(1).pendingSplits(); assertThat(pendingSplitsEmpty).isEmpty(); // make one split available and trigger the periodic discovery List<IcebergSourceSplit> splits = SplitHelpers.createSplitsFromTransientHadoopTable(temporaryFolder, 1, 1); splitPlanner.addSplits(splits); enumeratorContext.triggerAllActions(); Collection<IcebergSourceSplitState> pendingSplits = enumerator.snapshotState(2).pendingSplits(); assertThat(pendingSplits).hasSize(1); IcebergSourceSplitState pendingSplit = pendingSplits.iterator().next(); assertThat(pendingSplit.split().splitId()).isEqualTo(splits.get(0).splitId()); assertThat(pendingSplit.status()).isEqualTo(IcebergSourceSplitStatus.UNASSIGNED); }
public String toGetParam(final String json) { if (StringUtils.isBlank(json)) { return EMPTY; } final Map<String, String> map = toStringMap(json); StringBuilder stringBuilder = new StringBuilder(); map.forEach((k, v) -> { try { stringBuilder.append(k) .append(EQUAL_SIGN) .append(URLDecoder.decode(v, Constants.DECODE)) .append(AND); } catch (UnsupportedEncodingException e) { LOG.error("decode:{} failed !", v, e); } }); final String r = stringBuilder.toString(); return r.substring(0, r.lastIndexOf(AND)); }
@Test public void testToGetParam() { Map<String, String> param = ImmutableMap.of("id", "123", "name", "test", "data", "测试"); String json = GsonUtils.getGson().toJson(param, new TypeToken<Map<String, String>>() { }.getType()); String resultParam = GsonUtils.getInstance().toGetParam(json); Map<String, String> resultMap = Arrays.stream(resultParam.split("&")) .collect(Collectors.toMap(s -> s.split("=")[0], s -> s.split("=")[1])); param.forEach((key, value) -> { assertTrue(resultMap.containsKey(key)); assertEquals(value, resultMap.get(key)); }); assertEquals("", GsonUtils.getInstance().toGetParam("")); }
@Override public Page<RoleInfo> getRolesByUserNameAndRoleName(String username, String role, int pageNo, int pageSize) { AuthPaginationHelper<RoleInfo> helper = createPaginationHelper(); String sqlCountRows = "SELECT count(*) FROM roles "; String sqlFetchRows = "SELECT role,username FROM roles "; StringBuilder where = new StringBuilder(" WHERE 1 = 1 "); List<String> params = new ArrayList<>(); if (StringUtils.isNotBlank(username)) { where.append(" AND username = ? "); params.add(username); } if (StringUtils.isNotBlank(role)) { where.append(" AND role = ? "); params.add(role); } return helper.fetchPage(sqlCountRows + where, sqlFetchRows + where, params.toArray(), pageNo, pageSize, ROLE_INFO_ROW_MAPPER); }
@Test void testGetRolesByUserName() { Page<RoleInfo> page = embeddedRolePersistService.getRolesByUserNameAndRoleName("userName", "roleName", 1, 10); assertNotNull(page); }
String lookupCountryIdentifier(String productCode) { initIfNeeded(); int prefix = Integer.parseInt(productCode.substring(0, 3)); int max = ranges.size(); for (int i = 0; i < max; i++) { int[] range = ranges.get(i); int start = range[0]; if (prefix < start) { return null; } int end = range.length == 1 ? start : range[1]; if (prefix <= end) { return countryIdentifiers.get(i); } } return null; }
@Test public void testLookup() { EANManufacturerOrgSupport support = new EANManufacturerOrgSupport(); assertNull(support.lookupCountryIdentifier("472000")); assertEquals("US/CA", support.lookupCountryIdentifier("000000")); assertEquals("MO", support.lookupCountryIdentifier("958000")); assertEquals("GB", support.lookupCountryIdentifier("500000")); assertEquals("GB", support.lookupCountryIdentifier("509000")); }
public static String getApplication(Invocation invocation, String defaultValue) { if (invocation == null || invocation.getAttachments() == null) { throw new IllegalArgumentException("Bad invocation instance"); } return invocation.getAttachment(DUBBO_APPLICATION_KEY, defaultValue); }
@Test(expected = IllegalArgumentException.class) public void testGetApplicationNoAttachments() { Invocation invocation = mock(Invocation.class); when(invocation.getAttachments()).thenReturn(null); when(invocation.getAttachment(DubboUtils.DUBBO_APPLICATION_KEY, "")).thenReturn("consumerA"); DubboUtils.getApplication(invocation, ""); fail("No attachments in invocation, IllegalArgumentException should be thrown!"); }
@Override public void updateMailSendResult(Long logId, String messageId, Exception exception) { // 1. 成功 if (exception == null) { mailLogMapper.updateById(new MailLogDO().setId(logId).setSendTime(LocalDateTime.now()) .setSendStatus(MailSendStatusEnum.SUCCESS.getStatus()).setSendMessageId(messageId)); return; } // 2. 失败 mailLogMapper.updateById(new MailLogDO().setId(logId).setSendTime(LocalDateTime.now()) .setSendStatus(MailSendStatusEnum.FAILURE.getStatus()).setSendException(getRootCauseMessage(exception))); }
@Test public void testUpdateMailSendResult_exception() { // mock 数据 MailLogDO log = randomPojo(MailLogDO.class, o -> { o.setSendStatus(MailSendStatusEnum.INIT.getStatus()); o.setSendTime(null).setSendMessageId(null).setSendException(null) .setTemplateParams(randomTemplateParams()); }); mailLogMapper.insert(log); // 准备参数 Long logId = log.getId(); Exception exception = new NullPointerException("测试异常"); // 调用 mailLogService.updateMailSendResult(logId, null, exception); // 断言 MailLogDO dbLog = mailLogMapper.selectById(logId); assertEquals(MailSendStatusEnum.FAILURE.getStatus(), dbLog.getSendStatus()); assertNotNull(dbLog.getSendTime()); assertNull(dbLog.getSendMessageId()); assertEquals("NullPointerException: 测试异常", dbLog.getSendException()); }
@Override public void execute(Runnable command) { ObjectUtil.checkNotNull(command, "command").run(); }
@Test public void testExecuteNonNullRunnable() throws Exception { FutureTask<Void> task = new FutureTask<Void>(new Runnable() { @Override public void run() { // NOOP } }, null); ImmediateExecutor.INSTANCE.execute(task); assertTrue(task.isDone()); assertFalse(task.isCancelled()); assertNull(task.get()); }
public static List<String> splitStatementsAcrossBlocks(CharSequence string) { List<String> statements = codeAwareSplitOnChar(string, false, true, ';', '\n', '{', '}'); return statements.stream() .filter(stmt -> !(stmt.isEmpty())) .filter(stmt -> !(stmt.startsWith("//"))) .collect(Collectors.toList()); }
@Test public void splitStatementsAcrossBlocksCommentedIf() { String text = "// if (true) {\n" + " $fact.value1 = 2;\n" + " drools.update($fact);\n" + "// }"; List<String> statements = splitStatementsAcrossBlocks(text); assertThat(statements.get(0)).isEqualTo("$fact.value1 = 2"); assertThat(statements.get(1)).isEqualTo("drools.update($fact)"); }
public long getStartInMs() { return this.startInMs; }
@Test void testStartInMs() { Pane<?> pane = mock(Pane.class); long startTime = System.currentTimeMillis(); when(pane.getStartInMs()).thenReturn(startTime); assertEquals(startTime, pane.getStartInMs()); }
@VisibleForTesting void validateLevelUnique(List<MemberLevelDO> list, Long id, Integer level) { for (MemberLevelDO levelDO : list) { if (ObjUtil.notEqual(levelDO.getLevel(), level)) { continue; } if (id == null || !id.equals(levelDO.getId())) { throw exception(LEVEL_VALUE_EXISTS, levelDO.getLevel(), levelDO.getName()); } } }
@Test public void testUpdateLevel_levelUnique() { // 准备参数 Long id = randomLongId(); Integer level = randomInteger(); String name = randomString(); // mock 数据 memberlevelMapper.insert(randomLevelDO(o -> { o.setLevel(level); o.setName(name); })); // 调用,校验异常 List<MemberLevelDO> list = memberlevelMapper.selectList(); assertServiceException(() -> levelService.validateLevelUnique(list, id, level), LEVEL_VALUE_EXISTS, level, name); }
public static String buildSelectorParentPath(final String pluginName) { return String.join(PATH_SEPARATOR, SELECTOR_PARENT, pluginName); }
@Test public void testBuildSelectorParentPath() { String pluginName = RandomStringUtils.randomAlphanumeric(10); String selectorParentPath = DefaultPathConstants.buildSelectorParentPath(pluginName); assertThat(selectorParentPath, notNullValue()); assertThat(String.join(SEPARATOR, SELECTOR_PARENT, pluginName), equalTo(selectorParentPath)); }
@Override public boolean mknode(String path, boolean createParents) throws PathNotFoundException, InvalidPathnameException, IOException { Path registryPath = makePath(path); // getFileStatus throws FileNotFound if the path doesn't exist. If the // file already exists, return. try { fs.getFileStatus(registryPath); return false; } catch (FileNotFoundException e) { } if (createParents) { // By default, mkdirs creates any parent dirs it needs fs.mkdirs(registryPath); } else { FileStatus parentStatus = null; if (registryPath.getParent() != null) { parentStatus = fs.getFileStatus(registryPath.getParent()); } if (registryPath.getParent() == null || parentStatus.isDirectory()) { fs.mkdirs(registryPath); } else { throw new PathNotFoundException("no parent for " + path); } } return true; }
@Test public void testMkNodeAlreadyExists() throws IOException { System.out.println("pre-create test path"); fs.mkdirs(new Path("test/registryTestNode")); System.out.println( "Try to mknode existing path -- should be noop and return false"); Assert.assertFalse(registry.mknode("test/registryTestNode", true)); Assert.assertFalse(registry.mknode("test/registryTestNode", false)); }
@Override public void execute(Context context) { PreMeasuresComputationCheck.Context extensionContext = new ContextImpl(); for (PreMeasuresComputationCheck extension : extensions) { try { extension.onCheck(extensionContext); } catch (PreMeasuresComputationCheckException pmcce) { ceTaskMessages.add(new CeTaskMessages.Message(pmcce.getMessage(), System2.INSTANCE.now(), MessageType.GENERIC)); } } }
@Test public void context_contains_branch_from_analysis_metadata_holder() throws PreMeasuresComputationCheckException { mockBranch("branchName"); PreMeasuresComputationCheck check = mock(PreMeasuresComputationCheck.class); newStep(check).execute(new TestComputationStepContext()); ArgumentCaptor<Context> contextArgumentCaptor = ArgumentCaptor.forClass(Context.class); verify(check).onCheck(contextArgumentCaptor.capture()); assertThat(contextArgumentCaptor.getValue().getBranch().getName()).isEqualTo("branchName"); }
@Override protected void decode(final ChannelHandlerContext ctx, final ByteBuf in, final List<Object> out) { while (in.readableBytes() >= 1 + MySQLBinlogEventHeader.MYSQL_BINLOG_EVENT_HEADER_LENGTH) { in.markReaderIndex(); MySQLPacketPayload payload = new MySQLPacketPayload(in, ctx.channel().attr(CommonConstants.CHARSET_ATTRIBUTE_KEY).get()); checkPayload(payload); MySQLBinlogEventHeader binlogEventHeader = new MySQLBinlogEventHeader(payload, binlogContext.getChecksumLength()); if (!checkEventIntegrity(in, binlogEventHeader)) { return; } Optional<MySQLBaseBinlogEvent> binlogEvent = decodeEvent(binlogEventHeader, payload); if (!binlogEvent.isPresent()) { skipChecksum(binlogEventHeader.getEventType(), in); return; } if (binlogEvent.get() instanceof PlaceholderBinlogEvent) { out.add(binlogEvent.get()); skipChecksum(binlogEventHeader.getEventType(), in); return; } if (decodeWithTX) { processEventWithTX(binlogEvent.get(), out); } else { processEventIgnoreTX(binlogEvent.get(), out); } skipChecksum(binlogEventHeader.getEventType(), in); } }
@Test void assertDecodeWriteRowEvent() { ByteBuf byteBuf = ByteBufAllocator.DEFAULT.buffer(); // the hex data is from INSERT INTO t_order(order_id, user_id, status, t_numeric) VALUES (1, 1, 'SUCCESS',null); byteBuf.writeBytes(StringUtil.decodeHexDump("007a36a9621e0100000038000000bb7c000000007b00000000000100020004ff08010000000000000001000000075355434345535365eff9ff")); byteBuf.writeBytes(StringUtil.decodeHexDump("006acb656410010000001f000000fa29000000001643000000000000b13f8340")); binlogContext.getTableMap().put(123L, tableMapEventPacket); when(tableMapEventPacket.getColumnDefs()).thenReturn(columnDefs); List<Object> decodedEvents = new LinkedList<>(); binlogEventPacketDecoder.decode(channelHandlerContext, byteBuf, decodedEvents); assertThat(decodedEvents.size(), is(1)); LinkedList<?> actualEventList = (LinkedList<?>) decodedEvents.get(0); assertThat(actualEventList.get(0), instanceOf(MySQLWriteRowsBinlogEvent.class)); MySQLWriteRowsBinlogEvent actual = (MySQLWriteRowsBinlogEvent) actualEventList.get(0); assertThat(actual.getAfterRows().get(0), is(new Serializable[]{1L, 1, new MySQLBinaryString("SUCCESS".getBytes()), null})); }
Serde<List<?>> createFormatSerde( final String target, final FormatInfo formatInfo, final PersistenceSchema schema, final KsqlConfig ksqlConfig, final Supplier<SchemaRegistryClient> schemaRegistryClientFactory, final boolean isKey ) { final Format format = formatFactory.apply(formatInfo); try { return format .getSerde(schema, formatInfo.getProperties(), ksqlConfig, schemaRegistryClientFactory, isKey ); } catch (final Exception e) { throw new SchemaNotSupportedException(target + " format does not support schema." + System.lineSeparator() + "format: " + format.name() + System.lineSeparator() + "schema: " + schema + System.lineSeparator() + "reason: " + e.getMessage(), e ); } }
@Test public void shouldThrowIfGetSerdeThrows() { // Given: when(format.getSerde(any(), any(), any(), any(), eq(false))).thenThrow(new RuntimeException("boom")); // When: final Exception actual = assertThrows( SchemaNotSupportedException.class, () -> serdeFactory .createFormatSerde("Target-A", formatInfo, schema, config, srClientFactory, false) ); // Then: assertThat(actual.getMessage(), is("Target-A format does not support schema." + System.lineSeparator() + "format: FormatName" + System.lineSeparator() + "schema: schemaText" + System.lineSeparator() + "reason: boom" )); }
@Override public InterpreterResult interpret(String st, InterpreterContext context) throws InterpreterException { LOGGER.debug("Interpret code: {}", st); this.z.setInterpreterContext(context); this.z.setGui(context.getGui()); this.z.setNoteGui(context.getNoteGui()); // set ClassLoader of current Thread to be the ClassLoader of Flink scala-shell, // otherwise codegen will fail to find classes defined in scala-shell ClassLoader originClassLoader = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(getFlinkScalaShellLoader()); createPlannerAgain(); setParallelismIfNecessary(context); setSavepointPathIfNecessary(context); return innerIntp.interpret(st, context); } finally { Thread.currentThread().setContextClassLoader(originClassLoader); } }
@Test void testStreamWordCount() throws InterpreterException, IOException { InterpreterContext context = getInterpreterContext(); InterpreterResult result = interpreter.interpret( "val data = senv.fromElements(\"hello world\", \"hello flink\", \"hello hadoop\")", context); assertEquals(InterpreterResult.Code.SUCCESS, result.code()); context = getInterpreterContext(); result = interpreter.interpret( "data.flatMap(line => line.split(\"\\\\s\"))\n" + " .map(w => (w, 1))\n" + " .keyBy(0)\n" + " .sum(1)\n" + " .print()\n" + "senv.execute()", context); assertEquals(InterpreterResult.Code.SUCCESS, result.code()); String[] expectedCounts = {"(hello,3)", "(world,1)", "(flink,1)", "(hadoop,1)"}; String output = context.out.toInterpreterResultMessage().get(0).getData(); for (String expectedCount : expectedCounts) { assertTrue(output.contains(expectedCount), output); } }
public void evaluate(List<AuthorizationContext> contexts) { if (CollectionUtils.isEmpty(contexts)) { return; } contexts.forEach(this.authorizationStrategy::evaluate); }
@Test public void evaluate5() { if (MixAll.isMac()) { return; } User user = User.of("test", "test"); this.authenticationMetadataManager.createUser(user).join(); Acl acl = AuthTestHelper.buildAcl("User:test", "*", "Pub,Sub", "192.168.0.0/24", Decision.ALLOW); this.authorizationMetadataManager.createAcl(acl).join(); acl = AuthTestHelper.buildAcl("User:test", "Topic:*", "Pub,Sub", "192.168.0.0/24", Decision.DENY); this.authorizationMetadataManager.updateAcl(acl).join(); acl = AuthTestHelper.buildAcl("User:test", "Topic:test*", "Pub,Sub", "192.168.0.0/24", Decision.ALLOW); this.authorizationMetadataManager.updateAcl(acl).join(); acl = AuthTestHelper.buildAcl("User:test", "Topic:test-1", "Pub,Sub", "192.168.0.0/24", Decision.DENY); this.authorizationMetadataManager.updateAcl(acl).join(); Assert.assertThrows(AuthorizationException.class, () -> { Subject subject = Subject.of("User:test"); Resource resource = Resource.ofTopic("test-1"); Action action = Action.PUB; String sourceIp = "192.168.0.1"; DefaultAuthorizationContext context = DefaultAuthorizationContext.of(subject, resource, action, sourceIp); context.setRpcCode("10"); this.evaluator.evaluate(Collections.singletonList(context)); }); { Subject subject = Subject.of("User:test"); Resource resource = Resource.ofTopic("test-2"); Action action = Action.PUB; String sourceIp = "192.168.0.1"; DefaultAuthorizationContext context = DefaultAuthorizationContext.of(subject, resource, action, sourceIp); context.setRpcCode("10"); this.evaluator.evaluate(Collections.singletonList(context)); } Assert.assertThrows(AuthorizationException.class, () -> { Subject subject = Subject.of("User:test"); Resource resource = Resource.ofTopic("abc"); Action action = Action.PUB; String sourceIp = "192.168.0.1"; DefaultAuthorizationContext context = DefaultAuthorizationContext.of(subject, resource, action, sourceIp); context.setRpcCode("10"); this.evaluator.evaluate(Collections.singletonList(context)); }); { Subject subject = Subject.of("User:test"); Resource resource = Resource.ofGroup("test-2"); Action action = Action.SUB; String sourceIp = "192.168.0.1"; DefaultAuthorizationContext context = DefaultAuthorizationContext.of(subject, resource, action, sourceIp); context.setRpcCode("10"); this.evaluator.evaluate(Collections.singletonList(context)); } }
public static Object newInstance(String name) { try { return forName(name).getDeclaredConstructor().newInstance(); } catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { throw new IllegalStateException(e.getMessage(), e); } }
@Test void testNewInstance2() { Assertions.assertThrows( IllegalStateException.class, () -> ClassUtils.newInstance("org.apache.dubbo.common.compiler.support.internal.NotExistsImpl")); }
public static <T> T defaultIfNull(final T object, final T defaultValue) { return isNull(object) ? defaultValue : object; }
@Test public void defaultIfNullTest() { final String nullValue = null; final String dateStr = "2020-10-23 15:12:30"; Instant result1 = ObjectUtil.defaultIfNull(dateStr, (source) -> DateUtil.parse(source, DatePattern.NORM_DATETIME_PATTERN).toInstant(), Instant.now()); assertNotNull(result1); Instant result2 = ObjectUtil.defaultIfNull(nullValue, (source) -> DateUtil.parse(source, DatePattern.NORM_DATETIME_PATTERN).toInstant(), Instant.now()); assertNotNull(result2); Obj obj = new Obj(); Obj objNull = null; String result3 = ObjectUtil.defaultIfNull(obj, (a) -> obj.doSomeThing(), "fail"); assertNotNull(result3); String result4 = ObjectUtil.defaultIfNull(objNull, Obj::doSomeThing, "fail"); assertNotNull(result4); }
@Override public boolean match(Message msg, StreamRule rule) { Double msgVal = getDouble(msg.getField(rule.getField())); if (msgVal == null) { return false; } Double ruleVal = getDouble(rule.getValue()); if (ruleVal == null) { return false; } return rule.getInverted() ^ (msgVal > ruleVal); }
@Test public void testMissedDoubleMatchWithEqualValues() { StreamRule rule = getSampleRule(); rule.setValue("-9001.45"); Message msg = getSampleMessage(); msg.addField("something", "-9001.45"); StreamRuleMatcher matcher = getMatcher(rule); assertFalse(matcher.match(msg, rule)); }
public static boolean parse(final String str, ResTable_config out) { return parse(str, out, true); }
@Test public void parse_orientation_square() { ResTable_config config = new ResTable_config(); ConfigDescription.parse("square", config); assertThat(config.orientation).isEqualTo(ORIENTATION_SQUARE); }
public Map<String, String> getQueryMap() { return URIUtils.parseQueryString(mUri.getQuery()); }
@Test public void basicConstructorQuery() { /** * Some encodings: * '&' -> %26 * '=' -> %3D * ' ' -> %20 * '%' -> %25 * '+' -> %2B */ String queryPart = "k1=v1&k2= spaces &k3=%3D%20escapes %20%25%26%2B&!@#$^*()-_=[]{};\"'<>,./"; AlluxioURI uri = new AlluxioURI("hdfs://localhost/a?" + queryPart); Map<String, String> queryMap = uri.getQueryMap(); assertEquals(4, queryMap.size()); assertEquals("v1", queryMap.get("k1")); assertEquals(" spaces ", queryMap.get("k2")); assertEquals("= escapes %&+", queryMap.get("k3")); assertEquals("[]{};\"'<>,./", queryMap.get("!@#$^*()-_")); }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof AlluxioProxyInfo)) { return false; } AlluxioProxyInfo that = (AlluxioProxyInfo) o; return Objects.equal(mConfiguration, that.mConfiguration) && mStartTimeMs == that.mStartTimeMs && mUptimeMs == that.mUptimeMs && Objects.equal(mVersion, that.mVersion); }
@Test public void equals() { alluxio.test.util.CommonUtils.testEquals(AlluxioProxyInfo.class); }
protected boolean isThrottled() { // Throttle logging the HotKeyDetection to every 5 minutes. long nowMs = clock.currentTimeMillis(); if (nowMs - prevHotKeyDetectionLogMs < loggingPeriod.getMillis()) { return true; } prevHotKeyDetectionLogMs = nowMs; return false; }
@Test public void throttlesLoggingHotKeyMessage() { HotKeyLogger hotKeyLogger = new HotKeyLogger(clock); clock.setTime(Clock.SYSTEM.currentTimeMillis()); assertFalse(hotKeyLogger.isThrottled()); assertTrue(hotKeyLogger.isThrottled()); // The class throttles every 5 minutes, so the first time it is called is true. The second time // is throttled and returns false. clock.setTime(clock.currentTimeMillis() + Duration.standardMinutes(5L).getMillis()); assertFalse(hotKeyLogger.isThrottled()); assertTrue(hotKeyLogger.isThrottled()); // Test that the state variable is set and can log again in 5 minutes. clock.setTime(clock.currentTimeMillis() + Duration.standardMinutes(5L).getMillis()); assertFalse(hotKeyLogger.isThrottled()); assertTrue(hotKeyLogger.isThrottled()); }
String validateElasticProfileRequestBody(Map<String, String> configuration) { JsonObject properties = mapToJsonObject(configuration); return new GsonBuilder().serializeNulls().create().toJson(properties); }
@Test public void shouldConstructValidationRequest() { HashMap<String, String> configuration = new HashMap<>(); configuration.put("key1", "value1"); configuration.put("key2", "value2"); configuration.put("key3", null); String requestBody = new ElasticAgentExtensionConverterV4().validateElasticProfileRequestBody(configuration); assertThatJson(requestBody).isEqualTo("{\"key3\":null,\"key2\":\"value2\",\"key1\":\"value1\"}"); }
@Override public synchronized Response handle(Request req) { // note the [synchronized] if (corsEnabled && "OPTIONS".equals(req.getMethod())) { Response response = new Response(200); response.setHeader("Allow", ALLOWED_METHODS); response.setHeader("Access-Control-Allow-Origin", "*"); response.setHeader("Access-Control-Allow-Methods", ALLOWED_METHODS); List<String> requestHeaders = req.getHeaderValues("Access-Control-Request-Headers"); if (requestHeaders != null) { response.setHeader("Access-Control-Allow-Headers", requestHeaders); } return response; } if (prefix != null && req.getPath().startsWith(prefix)) { req.setPath(req.getPath().substring(prefix.length())); } // rare case when http-client is active within same jvm // snapshot existing thread-local to restore ScenarioEngine prevEngine = ScenarioEngine.get(); for (Map.Entry<Feature, ScenarioRuntime> entry : scenarioRuntimes.entrySet()) { Feature feature = entry.getKey(); ScenarioRuntime runtime = entry.getValue(); // important for graal to work properly Thread.currentThread().setContextClassLoader(runtime.featureRuntime.suite.classLoader); LOCAL_REQUEST.set(req); req.processBody(); ScenarioEngine engine = initEngine(runtime, globals, req); for (FeatureSection fs : feature.getSections()) { if (fs.isOutline()) { runtime.logger.warn("skipping scenario outline - {}:{}", feature, fs.getScenarioOutline().getLine()); break; } Scenario scenario = fs.getScenario(); if (isMatchingScenario(scenario, engine)) { Map<String, Object> configureHeaders; Variable response, responseStatus, responseHeaders, responseDelay; ScenarioActions actions = new ScenarioActions(engine); Result result = executeScenarioSteps(feature, runtime, scenario, actions); engine.mockAfterScenario(); configureHeaders = engine.mockConfigureHeaders(); response = engine.vars.remove(ScenarioEngine.RESPONSE); responseStatus = engine.vars.remove(ScenarioEngine.RESPONSE_STATUS); responseHeaders = engine.vars.remove(ScenarioEngine.RESPONSE_HEADERS); responseDelay = engine.vars.remove(RESPONSE_DELAY); globals.putAll(engine.shallowCloneVariables()); Response res = new Response(200); if (result.isFailed()) { response = new Variable(result.getError().getMessage()); responseStatus = new Variable(500); } else { if (corsEnabled) { res.setHeader("Access-Control-Allow-Origin", "*"); } res.setHeaders(configureHeaders); if (responseHeaders != null && responseHeaders.isMap()) { res.setHeaders(responseHeaders.getValue()); } if (responseDelay != null) { res.setDelay(responseDelay.getAsInt()); } } if (response != null && !response.isNull()) { res.setBody(response.getAsByteArray()); if (res.getContentType() == null) { ResourceType rt = ResourceType.fromObject(response.getValue()); if (rt != null) { res.setContentType(rt.contentType); } } } if (responseStatus != null) { res.setStatus(responseStatus.getAsInt()); } if (prevEngine != null) { ScenarioEngine.set(prevEngine); } if (mockInterceptor != null) { mockInterceptor.intercept(req, res, scenario); } return res; } } } logger.warn("no scenarios matched, returning 404: {}", req); // NOTE: not logging with engine.logger if (prevEngine != null) { ScenarioEngine.set(prevEngine); } return new Response(404); }
@Test void testResponseStatus() { background().scenario( "pathMatches('/hello')", "def response = { success: false }", "def responseStatus = 404" ); request.path("/hello"); handle(); match(response.getBodyConverted(), "{ success: false }"); match(response.getStatus(), 404); }
@PublicEvolving public static <IN, OUT> TypeInformation<OUT> getMapReturnTypes( MapFunction<IN, OUT> mapInterface, TypeInformation<IN> inType) { return getMapReturnTypes(mapInterface, inType, null, false); }
@SuppressWarnings({"unchecked", "rawtypes"}) @Test void testInputInference2() { EdgeMapper2<Boolean> em = new EdgeMapper2<Boolean>(); TypeInformation<?> ti = TypeExtractor.getMapReturnTypes((MapFunction) em, Types.BOOLEAN); assertThat(ti.isTupleType()).isTrue(); assertThat(ti.getArity()).isEqualTo(3); TupleTypeInfo<?> tti = (TupleTypeInfo<?>) ti; assertThat(tti.getTypeAt(0)).isEqualTo(BasicTypeInfo.LONG_TYPE_INFO); assertThat(tti.getTypeAt(1)).isEqualTo(BasicTypeInfo.LONG_TYPE_INFO); assertThat(tti.getTypeAt(2)).isEqualTo(BasicTypeInfo.BOOLEAN_TYPE_INFO); }
public static boolean isAlphaHex(String hex) { return ALPHA_HEX_PATTERN.matcher(hex).matches(); }
@Test public void isAlphaHex() { COLOR_ALPHA_HEXSTRING_MAP.values().forEach((hex) -> { assertTrue(ColorUtil.isAlphaHex(hex)); }); COLOR_HEXSTRING_MAP.values().forEach((hex) -> { assertFalse(ColorUtil.isAlphaHex(hex)); }); INVALID_COLOR_HEXSTRING_LIST.forEach((string) -> { assertFalse(ColorUtil.isAlphaHex(string)); }); }
public static boolean isEmpty(Collection coll) { return (coll == null || coll.isEmpty()); }
@Test void testIsEmpty() { assertTrue(CollectionUtils.isEmpty(null)); assertTrue(CollectionUtils.isEmpty(new ArrayList<String>())); assertFalse(CollectionUtils.isEmpty(Arrays.asList("aa"))); }
public int length() { split(); return splitted.size() - 1; }
@Test public void testLengthGrapheme() { final UnicodeHelper lh = new UnicodeHelper("a", Method.GRAPHEME); assertEquals(1, lh.length()); final UnicodeHelper lh2 = new UnicodeHelper(new String(Character.toChars(0x1f600)), Method.GRAPHEME); assertEquals(1, lh2.length()); final UnicodeHelper lh3 = new UnicodeHelper(UCSTR, Method.GRAPHEME); assertEquals(1, lh3.length()); final UnicodeHelper lh4 = new UnicodeHelper("a" + UCSTR + "A", Method.GRAPHEME); assertEquals(3, lh4.length()); final UnicodeHelper lh5 = new UnicodeHelper("k\u035fh", Method.GRAPHEME); assertEquals(2, lh5.length()); }
static ItemThreshold fromConfigEntry(String entry) { if (Strings.isNullOrEmpty(entry)) { return null; } Inequality operator = Inequality.MORE_THAN; int qty = 0; for (int i = entry.length() - 1; i >= 0; i--) { char c = entry.charAt(i); if (c >= '0' && c <= '9' || Character.isWhitespace(c)) { continue; } switch (c) { case '<': operator = Inequality.LESS_THAN; // fallthrough case '>': if (i + 1 < entry.length()) { try { qty = Integer.parseInt(entry.substring(i + 1).trim()); } catch (NumberFormatException e) { qty = 0; operator = Inequality.MORE_THAN; } entry = entry.substring(0, i); } } break; } return new ItemThreshold(entry.trim(), qty, operator); }
@Test(timeout = 100) public void testExplosive() { String name = "archer" + Strings.repeat('e', 50000) + "s ring"; Assert.assertEquals(ItemThreshold.fromConfigEntry(name + " < 387"), new ItemThreshold(name, 387, LESS_THAN)); }
@Override public int getMaxColumnsInSelect() { return 0; }
@Test void assertGetMaxColumnsInSelect() { assertThat(metaData.getMaxColumnsInSelect(), is(0)); }
public boolean isPortfolio() { return Qualifiers.VIEW.equals(qualifier) || Qualifiers.SUBVIEW.equals(qualifier); }
@Test void isPortfolio_whenQualifierIsPortfolio_shouldReturnTrue() { ProjectDto projectDto = new ProjectDto(); projectDto.setQualifier(Qualifiers.VIEW); boolean projectOrApp = projectDto.isPortfolio(); assertThat(projectOrApp).isTrue(); }
public boolean completeSyncFuture( ClassicGroupMember member, SyncGroupResponseData response ) { if (member.isAwaitingSync()) { member.awaitingSyncFuture().complete(response); member.setAwaitingSyncFuture(null); return true; } return false; }
@Test public void testCompleteSyncFuture() throws Exception { JoinGroupRequestProtocolCollection protocols = new JoinGroupRequestProtocolCollection(); protocols.add(new JoinGroupRequestProtocol() .setName("roundrobin") .setMetadata(new byte[0])); ClassicGroupMember member = new ClassicGroupMember( memberId, Optional.empty(), clientId, clientHost, rebalanceTimeoutMs, sessionTimeoutMs, protocolType, protocols ); group.add(member); CompletableFuture<SyncGroupResponseData> syncGroupFuture = new CompletableFuture<>(); member.setAwaitingSyncFuture(syncGroupFuture); assertTrue(group.completeSyncFuture(member, new SyncGroupResponseData() .setErrorCode(Errors.NONE.code()))); assertEquals(0, group.numAwaitingJoinResponse()); assertFalse(member.isAwaitingSync()); assertEquals(Errors.NONE.code(), syncGroupFuture.get().errorCode()); }
@Override public void setBoolean( boolean bool ) { this.string = bool ? "Y" : "N"; }
@Test public void testSetBoolean() { ValueString vs = new ValueString(); vs.setBoolean( false ); assertEquals( "N", vs.getString() ); vs.setBoolean( true ); assertEquals( "Y", vs.getString() ); }
@Override protected void close() { if (started) { Utils.closeQuietly(task::stop, "source task"); } closeProducer(Duration.ofSeconds(30)); if (admin != null) { Utils.closeQuietly(() -> admin.close(Duration.ofSeconds(30)), "source task admin"); } Utils.closeQuietly(offsetReader, "offset reader"); Utils.closeQuietly(offsetStore::stop, "offset backing store"); Utils.closeQuietly(headerConverter, "header converter"); }
@Test public void testMetricsGroup() { AbstractWorkerSourceTask.SourceTaskMetricsGroup group = new AbstractWorkerSourceTask.SourceTaskMetricsGroup(taskId, metrics); AbstractWorkerSourceTask.SourceTaskMetricsGroup group1 = new AbstractWorkerSourceTask.SourceTaskMetricsGroup(taskId1, metrics); for (int i = 0; i != 10; ++i) { group.recordPoll(100, 1000 + i * 100); group.recordWrite(10, 2); } for (int i = 0; i != 20; ++i) { group1.recordPoll(100, 1000 + i * 100); group1.recordWrite(10, 4); } assertEquals(1900.0, metrics.currentMetricValueAsDouble(group.metricGroup(), "poll-batch-max-time-ms"), 0.001d); assertEquals(1450.0, metrics.currentMetricValueAsDouble(group.metricGroup(), "poll-batch-avg-time-ms"), 0.001d); assertEquals(33.333, metrics.currentMetricValueAsDouble(group.metricGroup(), "source-record-poll-rate"), 0.001d); assertEquals(1000, metrics.currentMetricValueAsDouble(group.metricGroup(), "source-record-poll-total"), 0.001d); assertEquals(2.666, metrics.currentMetricValueAsDouble(group.metricGroup(), "source-record-write-rate"), 0.001d); assertEquals(80, metrics.currentMetricValueAsDouble(group.metricGroup(), "source-record-write-total"), 0.001d); assertEquals(900.0, metrics.currentMetricValueAsDouble(group.metricGroup(), "source-record-active-count"), 0.001d); // Close the group group.close(); for (MetricName metricName : group.metricGroup().metrics().metrics().keySet()) { // Metrics for this group should no longer exist assertFalse(group.metricGroup().groupId().includes(metricName)); } // Sensors for this group should no longer exist assertNull(group.metricGroup().metrics().getSensor("sink-record-read")); assertNull(group.metricGroup().metrics().getSensor("sink-record-send")); assertNull(group.metricGroup().metrics().getSensor("sink-record-active-count")); assertNull(group.metricGroup().metrics().getSensor("partition-count")); assertNull(group.metricGroup().metrics().getSensor("offset-seq-number")); assertNull(group.metricGroup().metrics().getSensor("offset-commit-completion")); assertNull(group.metricGroup().metrics().getSensor("offset-commit-completion-skip")); assertNull(group.metricGroup().metrics().getSensor("put-batch-time")); assertEquals(2900.0, metrics.currentMetricValueAsDouble(group1.metricGroup(), "poll-batch-max-time-ms"), 0.001d); assertEquals(1950.0, metrics.currentMetricValueAsDouble(group1.metricGroup(), "poll-batch-avg-time-ms"), 0.001d); assertEquals(66.667, metrics.currentMetricValueAsDouble(group1.metricGroup(), "source-record-poll-rate"), 0.001d); assertEquals(2000, metrics.currentMetricValueAsDouble(group1.metricGroup(), "source-record-poll-total"), 0.001d); assertEquals(4.0, metrics.currentMetricValueAsDouble(group1.metricGroup(), "source-record-write-rate"), 0.001d); assertEquals(120, metrics.currentMetricValueAsDouble(group1.metricGroup(), "source-record-write-total"), 0.001d); assertEquals(1800.0, metrics.currentMetricValueAsDouble(group1.metricGroup(), "source-record-active-count"), 0.001d); }
@Description("length of the given binary") @ScalarFunction @SqlType(StandardTypes.BIGINT) public static long length(@SqlType(StandardTypes.VARBINARY) Slice slice) { return slice.length(); }
@Test public void testLength() { assertFunction("length(CAST('' AS VARBINARY))", BIGINT, 0L); assertFunction("length(CAST('a' AS VARBINARY))", BIGINT, 1L); assertFunction("length(CAST('abc' AS VARBINARY))", BIGINT, 3L); }
@Override public boolean isWarProject() { return project.getPlugins().hasPlugin(WarPlugin.class); }
@Test public void testIsWarProject() { project.getPlugins().apply("war"); assertThat(gradleProjectProperties.isWarProject()).isTrue(); }
protected static VplsOperation getOptimizedVplsOperation(Deque<VplsOperation> operations) { if (operations.isEmpty()) { return null; } // no need to optimize if the queue contains only one operation if (operations.size() == 1) { return operations.getFirst(); } final VplsOperation firstOperation = operations.peekFirst(); final VplsOperation lastOperation = operations.peekLast(); final VplsOperation.Operation firstOp = firstOperation.op(); final VplsOperation.Operation lastOp = lastOperation.op(); if (firstOp.equals(VplsOperation.Operation.REMOVE)) { if (lastOp.equals(VplsOperation.Operation.REMOVE)) { // case 1: both first and last operation are REMOVE; do remove return firstOperation; } else if (lastOp.equals(VplsOperation.Operation.ADD)) { // case 2: if first is REMOVE, and last is ADD; do update return VplsOperation.of(lastOperation.vpls(), VplsOperation.Operation.UPDATE); } else { // case 3: first is REMOVE, last is UPDATE; do update return lastOperation; } } else if (firstOp.equals(VplsOperation.Operation.ADD)) { if (lastOp.equals(VplsOperation.Operation.REMOVE)) { // case 4: first is ADD, last is REMOVE; nothing to do return null; } else if (lastOp.equals(VplsOperation.Operation.ADD)) { // case 5: both first and last are ADD, do add return VplsOperation.of(lastOperation.vpls(), VplsOperation.Operation.ADD); } else { // case 6: first is ADD and last is update, do add return VplsOperation.of(lastOperation.vpls(), VplsOperation.Operation.ADD); } } else { if (lastOp.equals(VplsOperation.Operation.REMOVE)) { // case 7: last is remove, do remove return lastOperation; } else if (lastOp.equals(VplsOperation.Operation.ADD)) { // case 8: do update only return VplsOperation.of(lastOperation.vpls(), VplsOperation.Operation.UPDATE); } else { // case 9: from UPDATE to UPDATE // only need last UPDATE operation return VplsOperation.of(lastOperation.vpls(), VplsOperation.Operation.UPDATE); } } }
@Test public void testOptimizeOperationsRToR() { Deque<VplsOperation> operations = new ArrayDeque<>(); VplsData vplsData = VplsData.of(VPLS1); vplsData.addInterfaces(ImmutableSet.of(V100H1)); VplsOperation vplsOperation = VplsOperation.of(vplsData, VplsOperation.Operation.REMOVE); operations.add(vplsOperation); vplsData = VplsData.of(VPLS1, EncapsulationType.VLAN); vplsData.addInterfaces(ImmutableSet.of(V100H1, V100H2)); vplsOperation = VplsOperation.of(vplsData, VplsOperation.Operation.REMOVE); operations.add(vplsOperation); vplsOperation = VplsOperationManager.getOptimizedVplsOperation(operations); vplsData = VplsData.of(VPLS1); vplsData.addInterfaces(ImmutableSet.of(V100H1)); assertEquals(VplsOperation.of(vplsData, VplsOperation.Operation.REMOVE), vplsOperation); }
public static Collection<String> patterns() { return STATIC_RESOURCES; }
@Test public void patterns_shouldNotBeEmpty() { assertThat(StaticResources.patterns()).isNotEmpty(); }
@Override public void serialize(Asn1OutputStream out, String obj) { out.write(obj.getBytes(StandardCharsets.UTF_8)); }
@Test public void shouldSerialize() { assertArrayEquals( new byte[] { -30, -126, -84, '-' }, serialize(new Utf8StringConverter(), String.class,"€-") ); }
public static <T> PCollections<T> pCollections() { return new PCollections<>(); }
@Test @Category(ValidatesRunner.class) public void testFlattenInputMultipleCopies() { int count = 5; PCollection<Long> longs = p.apply("mkLines", GenerateSequence.from(0).to(count)); PCollection<Long> biggerLongs = p.apply("mkOtherLines", GenerateSequence.from(0).to(count)) .apply( MapElements.via( new SimpleFunction<Long, Long>() { @Override public Long apply(Long input) { return input + 10L; } })); PCollection<Long> flattened = PCollectionList.of(longs).and(longs).and(biggerLongs).apply(Flatten.pCollections()); List<Long> expectedLongs = new ArrayList<>(); for (int i = 0; i < count; i++) { // The duplicated input expectedLongs.add((long) i); expectedLongs.add((long) i); // The bigger longs expectedLongs.add(i + 10L); } PAssert.that(flattened).containsInAnyOrder(expectedLongs); p.run(); }
public static GrpcDataWriter create(FileSystemContext context, WorkerNetAddress address, long id, long length, RequestType type, OutStreamOptions options) throws IOException { long chunkSize = context.getClusterConf() .getBytes(PropertyKey.USER_STREAMING_WRITER_CHUNK_SIZE_BYTES); CloseableResource<BlockWorkerClient> grpcClient = context.acquireBlockWorkerClient(address); try { return new GrpcDataWriter(context, address, id, length, chunkSize, type, options, grpcClient); } catch (Exception e) { grpcClient.close(); throw e; } }
@Test(timeout = 1000 * 60) public void writeEmptyFile() throws Exception { long checksumActual; try (DataWriter writer = create(10)) { checksumActual = verifyWriteRequests(mClient, 0, 10); } assertEquals(0, checksumActual); }
@Override public UUID getOwnerUuid() { return txOwnerUuid; }
@Test public void getOwnerUUID() { UUID ownerUUID = UUID.randomUUID(); TransactionImpl tx = new TransactionImpl(txManagerService, nodeEngine, options, ownerUUID); assertEquals(ownerUUID, tx.getOwnerUuid()); }
@Override public String random() { return random.generate(length); }
@Test public void random() { assertNotNull(new AlphanumericRandomStringService().random()); }
public void replay( ConsumerGroupMemberMetadataKey key, ConsumerGroupMemberMetadataValue value ) { String groupId = key.groupId(); String memberId = key.memberId(); ConsumerGroup consumerGroup = getOrMaybeCreatePersistedConsumerGroup(groupId, value != null); Set<String> oldSubscribedTopicNames = new HashSet<>(consumerGroup.subscribedTopicNames().keySet()); if (value != null) { ConsumerGroupMember oldMember = consumerGroup.getOrMaybeCreateMember(memberId, true); consumerGroup.updateMember(new ConsumerGroupMember.Builder(oldMember) .updateWith(value) .build()); } else { ConsumerGroupMember oldMember = consumerGroup.getOrMaybeCreateMember(memberId, false); if (oldMember.memberEpoch() != LEAVE_GROUP_MEMBER_EPOCH) { throw new IllegalStateException("Received a tombstone record to delete member " + memberId + " but did not receive ConsumerGroupCurrentMemberAssignmentValue tombstone."); } if (consumerGroup.targetAssignment().containsKey(memberId)) { throw new IllegalStateException("Received a tombstone record to delete member " + memberId + " but did not receive ConsumerGroupTargetAssignmentMetadataValue tombstone."); } consumerGroup.removeMember(memberId); } updateGroupsByTopics(groupId, oldSubscribedTopicNames, consumerGroup.subscribedTopicNames().keySet()); }
@Test public void testShareGroupStates() { String groupId = "fooup"; String memberId1 = Uuid.randomUuid().toString(); Uuid fooTopicId = Uuid.randomUuid(); String fooTopicName = "foo"; MockPartitionAssignor assignor = new MockPartitionAssignor("share-range"); GroupMetadataManagerTestContext context = new GroupMetadataManagerTestContext.Builder() .withShareGroupAssignor(assignor) .withShareGroup(new ShareGroupBuilder(groupId, 10)) .build(); context.replay(GroupCoordinatorRecordHelpers.newShareGroupEpochRecord(groupId, 10)); assertEquals(ShareGroup.ShareGroupState.EMPTY, context.shareGroupState(groupId)); context.replay(GroupCoordinatorRecordHelpers.newShareGroupMemberSubscriptionRecord(groupId, new ShareGroupMember.Builder(memberId1) .setState(MemberState.STABLE) .setSubscribedTopicNames(Collections.singletonList(fooTopicName)) .build())); context.replay(GroupCoordinatorRecordHelpers.newShareGroupEpochRecord(groupId, 11)); assertEquals(ShareGroup.ShareGroupState.STABLE, context.shareGroupState(groupId)); context.replay(GroupCoordinatorRecordHelpers.newShareGroupTargetAssignmentRecord(groupId, memberId1, mkAssignment( mkTopicAssignment(fooTopicId, 1, 2, 3)))); context.replay(GroupCoordinatorRecordHelpers.newShareGroupTargetAssignmentEpochRecord(groupId, 11)); assertEquals(ShareGroup.ShareGroupState.STABLE, context.shareGroupState(groupId)); context.replay(GroupCoordinatorRecordHelpers.newShareGroupCurrentAssignmentRecord(groupId, new ShareGroupMember.Builder(memberId1) .setState(MemberState.STABLE) .setMemberEpoch(11) .setPreviousMemberEpoch(10) .setAssignedPartitions(mkAssignment(mkTopicAssignment(fooTopicId, 1, 2))) .build())); assertEquals(ShareGroup.ShareGroupState.STABLE, context.shareGroupState(groupId)); context.replay(GroupCoordinatorRecordHelpers.newShareGroupCurrentAssignmentRecord(groupId, new ShareGroupMember.Builder(memberId1) .setState(MemberState.STABLE) .setMemberEpoch(11) .setPreviousMemberEpoch(10) .setAssignedPartitions(mkAssignment(mkTopicAssignment(fooTopicId, 1, 2, 3))) .build())); assertEquals(ShareGroup.ShareGroupState.STABLE, context.shareGroupState(groupId)); }
public static Applier fromSource(CharSequence source, EndPosTable endPositions) { return new Applier(source, endPositions); }
@Test public void shouldThrowIfReplacementOutsideSource() { AppliedFix.Applier applier = AppliedFix.fromSource("Hello", endPositions); SuggestedFix fix = SuggestedFix.replace(0, 6, "World!"); assertThrows(IllegalArgumentException.class, () -> applier.apply(fix)); }
public static Optional<KsqlAuthorizationValidator> create( final KsqlConfig ksqlConfig, final ServiceContext serviceContext, final Optional<KsqlAuthorizationProvider> externalAuthorizationProvider ) { final Optional<KsqlAccessValidator> accessValidator = getAccessValidator( ksqlConfig, serviceContext, externalAuthorizationProvider ); return accessValidator.map(v -> new KsqlAuthorizationValidatorImpl(cacheIfEnabled(ksqlConfig, v))); }
@Test public void shouldReturnEmptyValidatorIfAuthorizedOperationsReturnNull() { // Given: givenKafkaAuthorizer("an-authorizer-class", null); // When: final Optional<KsqlAuthorizationValidator> validator = KsqlAuthorizationValidatorFactory.create( ksqlConfig, serviceContext, Optional.empty() ); // Then assertThat(validator, is(Optional.empty())); }
@Override public double get(int i, int j) { return A.get(index(i, j)); }
@Test public void testGet() { System.out.println("get"); assertEquals(0.9, matrix.get(0, 0), 1E-7); assertEquals(0.8, matrix.get(2, 2), 1E-7); assertEquals(0.5, matrix.get(1, 1), 1E-7); assertEquals(0.0, matrix.get(2, 0), 1E-7); assertEquals(0.0, matrix.get(0, 2), 1E-7); assertEquals(0.4, matrix.get(0, 1), 1E-7); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { if(log.isDebugEnabled()) { log.debug(String.format("List containers for %s", session)); } try { final AttributedList<Path> buckets = new AttributedList<>(); // List all buckets owned for(StorageBucket b : session.getClient().listAllBuckets()) { final PathAttributes attr = new PathAttributes(); final Path bucket = new Path(PathNormalizer.normalize(b.getName()), EnumSet.of(Path.Type.volume, Path.Type.directory), attr); if(b.getOwner() != null) { // Null if the owner is not available attr.setOwner(b.getOwner().getId()); } attr.setCreationDate(b.getCreationDate().getTime()); if(b.isLocationKnown()) { attr.setRegion(b.getLocation()); } else { if(region.getIdentifier() != null) { final String location; if(!b.isLocationKnown()) { location = session.getFeature(Location.class).getLocation(bucket).getIdentifier(); } else { location = b.getLocation(); } if(!StringUtils.equals(location, region.getIdentifier())) { log.warn(String.format("Skip bucket %s in region %s", bucket, location)); continue; } attr.setRegion(location); } } buckets.add(bucket); listener.chunk(directory, buckets); } return buckets; } catch(ServiceException e) { throw new S3ExceptionMappingService().map("Listing directory {0} failed", e, directory); } }
@Test public void testListRestrictRegion() throws Exception { final AttributedList<Path> list = new S3BucketListService(session, new S3LocationFeature.S3Region("eu-central-1")).list( new Path(String.valueOf(Path.DELIMITER), EnumSet.of(Path.Type.volume, Path.Type.directory)), new DisabledListProgressListener()); assertFalse(list.isEmpty()); for(Path bucket : list) { assertEquals("eu-central-1", bucket.attributes().getRegion()); assertEquals(bucket.attributes(), new S3AttributesFinderFeature(session, new S3AccessControlListFeature(session)).find(bucket, new DisabledListProgressListener())); } }
public void deleteDir(String delPrefix) throws CosClientException, CosServiceException { ListObjectsRequest listObjectsRequest = new ListObjectsRequest(); // 设置 bucket 名称 listObjectsRequest.setBucketName(cosClientConfig.getBucket()); // prefix 表示列出的对象名以 prefix 为前缀 // 这里填要列出的目录的相对 bucket 的路径 listObjectsRequest.setPrefix(delPrefix); // 设置最大遍历出多少个对象, 一次 listobject 最大支持1000 listObjectsRequest.setMaxKeys(1000); // 保存每次列出的结果 ObjectListing objectListing = null; do { objectListing = cosClient.listObjects(listObjectsRequest); // 这里保存列出的对象列表 List<COSObjectSummary> cosObjectSummaries = objectListing.getObjectSummaries(); if (CollUtil.isEmpty(cosObjectSummaries)) { break; } ArrayList<DeleteObjectsRequest.KeyVersion> delObjects = new ArrayList<>(); for (COSObjectSummary cosObjectSummary : cosObjectSummaries) { delObjects.add(new DeleteObjectsRequest.KeyVersion(cosObjectSummary.getKey())); } DeleteObjectsRequest deleteObjectsRequest = new DeleteObjectsRequest(cosClientConfig.getBucket()); deleteObjectsRequest.setKeys(delObjects); cosClient.deleteObjects(deleteObjectsRequest); // 标记下一次开始的位置 String nextMarker = objectListing.getNextMarker(); listObjectsRequest.setMarker(nextMarker); } while (objectListing.isTruncated()); }
@Test void deleteDir() { cosManager.deleteDir("/test/"); }
@Udf public String chr(@UdfParameter( description = "Decimal codepoint") final Integer decimalCode) { if (decimalCode == null) { return null; } if (!Character.isValidCodePoint(decimalCode)) { return null; } final char[] resultChars = Character.toChars(decimalCode); return String.valueOf(resultChars); }
@Test public void shouldConvertZhFromUTF16() { final String result = udf.chr("\\u597d"); assertThat(result, is("好")); }
@Override public void open(Configuration parameters) throws Exception { this.rateLimiterTriggeredCounter = getRuntimeContext() .getMetricGroup() .addGroup( TableMaintenanceMetrics.GROUP_KEY, TableMaintenanceMetrics.GROUP_VALUE_DEFAULT) .counter(TableMaintenanceMetrics.RATE_LIMITER_TRIGGERED); this.concurrentRunThrottledCounter = getRuntimeContext() .getMetricGroup() .addGroup( TableMaintenanceMetrics.GROUP_KEY, TableMaintenanceMetrics.GROUP_VALUE_DEFAULT) .counter(TableMaintenanceMetrics.CONCURRENT_RUN_THROTTLED); this.nothingToTriggerCounter = getRuntimeContext() .getMetricGroup() .addGroup( TableMaintenanceMetrics.GROUP_KEY, TableMaintenanceMetrics.GROUP_VALUE_DEFAULT) .counter(TableMaintenanceMetrics.NOTHING_TO_TRIGGER); this.triggerCounters = taskNames.stream() .map( name -> getRuntimeContext() .getMetricGroup() .addGroup(TableMaintenanceMetrics.GROUP_KEY, name) .counter(TableMaintenanceMetrics.TRIGGERED)) .collect(Collectors.toList()); this.nextEvaluationTimeState = getRuntimeContext() .getState(new ValueStateDescriptor<>("triggerManagerNextTriggerTime", Types.LONG)); this.accumulatedChangesState = getRuntimeContext() .getListState( new ListStateDescriptor<>( "triggerManagerAccumulatedChange", TypeInformation.of(TableChange.class))); this.lastTriggerTimesState = getRuntimeContext() .getListState(new ListStateDescriptor<>("triggerManagerLastTriggerTime", Types.LONG)); tableLoader.open(); }
@Test void testLockCheckDelay() throws Exception { TableLoader tableLoader = sql.tableLoader(TABLE_NAME); TriggerManager manager = manager(tableLoader, 1, DELAY); try (KeyedOneInputStreamOperatorTestHarness<Boolean, TableChange, Trigger> testHarness = harness(manager)) { testHarness.open(); addEventAndCheckResult(testHarness, TableChange.builder().commitCount(2).build(), 1); // Create a lock to prevent execution, and check that there is no result assertThat(lock.tryLock()).isTrue(); addEventAndCheckResult(testHarness, TableChange.builder().commitCount(2).build(), 1); long currentTime = testHarness.getProcessingTime(); // Remove the lock, and still no trigger lock.unlock(); assertThat(testHarness.extractOutputValues()).hasSize(1); // Check that the trigger fired after the delay testHarness.setProcessingTime(currentTime + DELAY); assertThat(testHarness.extractOutputValues()).hasSize(2); } }
public static InstanceConfig toHelixInstanceConfig(Instance instance) { InstanceConfig instanceConfig = new InstanceConfig(getHelixInstanceId(instance)); instanceConfig.setInstanceEnabled(true); updateHelixInstanceConfig(instanceConfig, instance); return instanceConfig; }
@Test public void testToHelixInstanceConfig() { Instance instance = new Instance("localhost", 1234, InstanceType.CONTROLLER, null, null, 0, 0, 0, 0, false); InstanceConfig instanceConfig = InstanceUtils.toHelixInstanceConfig(instance); assertEquals(instanceConfig.getInstanceName(), "Controller_localhost_1234"); assertTrue(instanceConfig.getInstanceEnabled()); assertEquals(instanceConfig.getHostName(), "localhost"); assertEquals(instanceConfig.getPort(), "1234"); assertTrue(instanceConfig.getTags().isEmpty()); ZNRecord znRecord = instanceConfig.getRecord(); assertNull(znRecord.getMapField(InstanceUtils.POOL_KEY)); assertNull(znRecord.getSimpleField(CommonConstants.Helix.Instance.GRPC_PORT_KEY)); assertNull(znRecord.getSimpleField(CommonConstants.Helix.Instance.ADMIN_PORT_KEY)); assertNull(znRecord.getSimpleField(CommonConstants.Helix.Instance.MULTI_STAGE_QUERY_ENGINE_SERVICE_PORT_KEY)); assertNull(znRecord.getSimpleField(CommonConstants.Helix.Instance.MULTI_STAGE_QUERY_ENGINE_MAILBOX_PORT_KEY)); assertNull(znRecord.getSimpleField(CommonConstants.Helix.QUERIES_DISABLED)); List<String> tags = Collections.singletonList("DefaultTenant_BROKER"); instance = new Instance("localhost", 2345, InstanceType.BROKER, tags, null, 0, 0, 0, 0, false); instanceConfig = InstanceUtils.toHelixInstanceConfig(instance); assertEquals(instanceConfig.getInstanceName(), "Broker_localhost_2345"); assertTrue(instanceConfig.getInstanceEnabled()); assertEquals(instanceConfig.getHostName(), "localhost"); assertEquals(instanceConfig.getPort(), "2345"); assertEquals(instanceConfig.getTags(), tags); znRecord = instanceConfig.getRecord(); assertNull(znRecord.getMapField(InstanceUtils.POOL_KEY)); assertNull(znRecord.getSimpleField(CommonConstants.Helix.Instance.GRPC_PORT_KEY)); assertNull(znRecord.getSimpleField(CommonConstants.Helix.Instance.ADMIN_PORT_KEY)); assertNull(znRecord.getSimpleField(CommonConstants.Helix.Instance.MULTI_STAGE_QUERY_ENGINE_SERVICE_PORT_KEY)); assertNull(znRecord.getSimpleField(CommonConstants.Helix.Instance.MULTI_STAGE_QUERY_ENGINE_MAILBOX_PORT_KEY)); assertNull(znRecord.getSimpleField(CommonConstants.Helix.QUERIES_DISABLED)); tags = Arrays.asList("T1_OFFLINE", "T2_REALTIME"); Map<String, Integer> poolMap = new TreeMap<>(); poolMap.put("T1_OFFLINE", 0); poolMap.put("T2_REALTIME", 1); instance = new Instance("localhost", 3456, InstanceType.SERVER, tags, poolMap, 123, 234, 345, 456, true); instanceConfig = InstanceUtils.toHelixInstanceConfig(instance); assertEquals(instanceConfig.getInstanceName(), "Server_localhost_3456"); assertTrue(instanceConfig.getInstanceEnabled()); assertEquals(instanceConfig.getHostName(), "localhost"); assertEquals(instanceConfig.getPort(), "3456"); assertEquals(instanceConfig.getTags(), tags); znRecord = instanceConfig.getRecord(); Map<String, String> expectedPoolMap = new TreeMap<>(); expectedPoolMap.put("T1_OFFLINE", "0"); expectedPoolMap.put("T2_REALTIME", "1"); assertEquals(znRecord.getMapField(InstanceUtils.POOL_KEY), expectedPoolMap); assertEquals(znRecord.getSimpleField(CommonConstants.Helix.Instance.GRPC_PORT_KEY), "123"); assertEquals(znRecord.getSimpleField(CommonConstants.Helix.Instance.ADMIN_PORT_KEY), "234"); assertEquals(znRecord.getSimpleField(CommonConstants.Helix.Instance.MULTI_STAGE_QUERY_ENGINE_SERVICE_PORT_KEY), "345"); assertEquals(znRecord.getSimpleField(CommonConstants.Helix.Instance.MULTI_STAGE_QUERY_ENGINE_MAILBOX_PORT_KEY), "456"); assertEquals(znRecord.getSimpleField(CommonConstants.Helix.QUERIES_DISABLED), "true"); tags = Collections.singletonList("minion_untagged"); instance = new Instance("localhost", 4567, InstanceType.MINION, tags, null, 0, 0, 0, 0, false); instanceConfig = InstanceUtils.toHelixInstanceConfig(instance); assertEquals(instanceConfig.getInstanceName(), "Minion_localhost_4567"); assertTrue(instanceConfig.getInstanceEnabled()); assertEquals(instanceConfig.getHostName(), "localhost"); assertEquals(instanceConfig.getPort(), "4567"); assertEquals(instanceConfig.getTags(), tags); znRecord = instanceConfig.getRecord(); assertNull(znRecord.getMapField(InstanceUtils.POOL_KEY)); assertNull(znRecord.getSimpleField(CommonConstants.Helix.Instance.GRPC_PORT_KEY)); assertNull(znRecord.getSimpleField(CommonConstants.Helix.Instance.ADMIN_PORT_KEY)); assertNull(znRecord.getSimpleField(CommonConstants.Helix.Instance.MULTI_STAGE_QUERY_ENGINE_SERVICE_PORT_KEY)); assertNull(znRecord.getSimpleField(CommonConstants.Helix.Instance.MULTI_STAGE_QUERY_ENGINE_MAILBOX_PORT_KEY)); assertNull(znRecord.getSimpleField(CommonConstants.Helix.QUERIES_DISABLED)); }
static TreeSet<Integer> brokerNodesBecomingControllerOnlyNodes(KafkaNodePool pool, Set<Integer> current, Set<Integer> desired) { if (pool.getStatus() != null && pool.getSpec().getRoles() != null && pool.getStatus().getRoles() != null && pool.getStatus().getRoles().contains(ProcessRoles.BROKER) // Used to have the broker role && !pool.getSpec().getRoles().contains(ProcessRoles.BROKER)) { // But should not have it anymore // Collect all node IDs that are both current and desired (i.e. we do not care about old nodes being scaled down or new nodes being scaled up) TreeSet<Integer> usedToBeBroker = new TreeSet<>(desired); usedToBeBroker.retainAll(current); return usedToBeBroker; } else { return new TreeSet<>(); } }
@Test public void testBrokerRoleRemoval() { // New node pool KafkaNodePool pool = createPool("pool", 3, Map.of()); assertThat(NodeIdAssignor.brokerNodesBecomingControllerOnlyNodes(pool, Set.of(), Set.of(0, 1, 2)).size(), is(0)); // Existing node pool without role change pool = createPool("pool", 3, Map.of(), List.of(0, 1, 2), List.of(ProcessRoles.BROKER), List.of(ProcessRoles.BROKER)); assertThat(NodeIdAssignor.brokerNodesBecomingControllerOnlyNodes(pool, Set.of(0, 1, 2), Set.of(0, 1, 2)).size(), is(0)); // Existing node pool with role change that keeps broker role pool = createPool("pool", 3, Map.of(), List.of(0, 1, 2), List.of(ProcessRoles.BROKER), List.of(ProcessRoles.BROKER, ProcessRoles.CONTROLLER)); assertThat(NodeIdAssignor.brokerNodesBecomingControllerOnlyNodes(pool, Set.of(0, 1, 2), Set.of(0, 1, 2)).size(), is(0)); // Existing node pool with role change that removes broker role pool = createPool("pool", 3, Map.of(), List.of(0, 1, 2), List.of(ProcessRoles.CONTROLLER), List.of(ProcessRoles.BROKER, ProcessRoles.CONTROLLER)); assertThat(NodeIdAssignor.brokerNodesBecomingControllerOnlyNodes(pool, Set.of(0, 1, 2), Set.of(0, 1, 2)).size(), is(3)); assertThat(NodeIdAssignor.brokerNodesBecomingControllerOnlyNodes(pool, Set.of(0, 1, 2), Set.of(0, 1, 2)), hasItems(0, 1, 2)); // Existing node pool with role change that removes broker role and scale-up pool = createPool("pool", 4, Map.of(), List.of(0, 1, 2), List.of(ProcessRoles.CONTROLLER), List.of(ProcessRoles.BROKER, ProcessRoles.CONTROLLER)); assertThat(NodeIdAssignor.brokerNodesBecomingControllerOnlyNodes(pool, Set.of(0, 1, 2), Set.of(0, 1, 2, 4)).size(), is(3)); assertThat(NodeIdAssignor.brokerNodesBecomingControllerOnlyNodes(pool, Set.of(0, 1, 2), Set.of(0, 1, 2, 4)), hasItems(0, 1, 2)); // Existing node pool with role change that removes broker role and scale-down pool = createPool("pool", 2, Map.of(), List.of(0, 1, 2), List.of(ProcessRoles.CONTROLLER), List.of(ProcessRoles.BROKER, ProcessRoles.CONTROLLER)); assertThat(NodeIdAssignor.brokerNodesBecomingControllerOnlyNodes(pool, Set.of(0, 1, 2), Set.of(0, 1)).size(), is(2)); assertThat(NodeIdAssignor.brokerNodesBecomingControllerOnlyNodes(pool, Set.of(0, 1, 2), Set.of(0, 1)), hasItems(0, 1)); // Existing node pool without roles in status => might be some legacy cluster before we added them pool = createPool("pool", 3, Map.of(), List.of(0, 1, 2), List.of(ProcessRoles.CONTROLLER), null); assertThat(NodeIdAssignor.brokerNodesBecomingControllerOnlyNodes(pool, Set.of(0, 1, 2), Set.of(0, 1, 2)).size(), is(0)); }
public static String[] split(String path) { return splitToSteps(path, false); }
@Test void splitWithoutSeparatorShouldReturnInput() { // by observation, Camel devs were uncertain what is returned ... assertArrayEquals(new String[] { "a path" }, FilesPath.split("a path")); }
<T extends PipelineOptions> T as(Class<T> iface) { checkNotNull(iface); checkArgument(iface.isInterface(), "Not an interface: %s", iface); T existingOption = computedProperties.interfaceToProxyCache.getInstance(iface); if (existingOption == null) { synchronized (this) { // double check existingOption = computedProperties.interfaceToProxyCache.getInstance(iface); if (existingOption == null) { Registration<T> registration = PipelineOptionsFactory.CACHE .get() .validateWellFormed(iface, computedProperties.knownInterfaces); List<PropertyDescriptor> propertyDescriptors = registration.getPropertyDescriptors(); Class<T> proxyClass = registration.getProxyClass(); existingOption = InstanceBuilder.ofType(proxyClass) .fromClass(proxyClass) .withArg(InvocationHandler.class, this) .build(); computedProperties = computedProperties.updated(iface, existingOption, propertyDescriptors); } } } return existingOption; }
@Test public void testDisplayDataTypes() { Instant now = Instant.now(); TypedOptions options = PipelineOptionsFactory.as(TypedOptions.class); options.setInteger(1234); options.setTimestamp(now); options.setJavaClass(ProxyInvocationHandlerTest.class); options.setObject( new Serializable() { @Override public String toString() { return "foobar"; } }); DisplayData displayData = DisplayData.from(options); assertThat(displayData, hasDisplayItem("integer", 1234)); assertThat(displayData, hasDisplayItem("timestamp", now)); assertThat(displayData, hasDisplayItem("javaClass", ProxyInvocationHandlerTest.class)); assertThat(displayData, hasDisplayItem("object", "foobar")); }
@Override public void addNullOrEmptyTarget() { throw new UnsupportedOperationException("Can't modify an immutable MultiResult"); }
@Test(expected = UnsupportedOperationException.class) public void testAddNullOrEmptyTarget() { immutableMultiResult.addNullOrEmptyTarget(); }
public static List<IntPair> intersectSortedRangeSets(List<List<IntPair>> sortedRangeSetList) { if (sortedRangeSetList == null || sortedRangeSetList.isEmpty()) { return Collections.emptyList(); } if (sortedRangeSetList.size() == 1) { return sortedRangeSetList.get(0); } // if any list is empty return empty for (List<IntPair> rangeSet : sortedRangeSetList) { if (rangeSet.isEmpty()) { return Collections.emptyList(); } } int[] currentRangeSetIndex = new int[sortedRangeSetList.size()]; Arrays.fill(currentRangeSetIndex, 0); int maxHead = -1; int maxHeadIndex = -1; boolean reachedEnd = false; List<IntPair> result = new ArrayList<IntPair>(); while (!reachedEnd) { // find max Head in the current pointers for (int i = 0; i < sortedRangeSetList.size(); i++) { int head = sortedRangeSetList.get(i).get(currentRangeSetIndex[i]).getLeft(); if (head > maxHead) { maxHead = head; maxHeadIndex = i; } } // move all pointers forward such that range they point to contain maxHead int j = -1; while (j++ < sortedRangeSetList.size() - 1) { if (j == maxHeadIndex) { continue; } boolean found = false; while (!found && currentRangeSetIndex[j] < sortedRangeSetList.get(j).size()) { IntPair range = sortedRangeSetList.get(j).get(currentRangeSetIndex[j]); if (maxHead >= range.getLeft() && maxHead <= range.getRight()) { found = true; break; } if (range.getLeft() > maxHead) { maxHead = range.getLeft(); maxHeadIndex = j; j = -1; break; } currentRangeSetIndex[j] = currentRangeSetIndex[j] + 1; } // new maxHead found if (j == -1) { continue; } if (!found) { reachedEnd = true; break; } } if (reachedEnd) { break; } // there is definitely some intersection possible here IntPair intPair = sortedRangeSetList.get(0).get(currentRangeSetIndex[0]); IntPair intersection = Pairs.intPair(intPair.getLeft(), intPair.getRight()); for (int i = 1; i < sortedRangeSetList.size(); i++) { IntPair pair = sortedRangeSetList.get(i).get(currentRangeSetIndex[i]); int start = Math.max(intersection.getLeft(), pair.getLeft()); int end = Math.min(intersection.getRight(), pair.getRight()); intersection.setLeft(start); intersection.setRight(end); } if (!result.isEmpty()) { // if new range is contiguous merge it IntPair prevIntersection = result.get(result.size() - 1); if (intersection.getLeft() == prevIntersection.getRight() + 1) { prevIntersection.setRight(intersection.getRight()); } else { result.add(intersection); } } else { result.add(intersection); } // move the pointers forward for rangesets where the currenttail == intersection.tail for (int i = 0; i < sortedRangeSetList.size(); i++) { IntPair pair = sortedRangeSetList.get(i).get(currentRangeSetIndex[i]); if (pair.getRight() == intersection.getRight()) { currentRangeSetIndex[i] = currentRangeSetIndex[i] + 1; if (currentRangeSetIndex[i] == sortedRangeSetList.get(i).size()) { reachedEnd = true; break; } } } } return result; }
@Test public void testRandom() { int totalDocs = 1_000_000; int maxRange = 10000; int minRange = 1000; long randomSeed = System.currentTimeMillis(); Random r = new Random(randomSeed); int numLists = 3; List<List<IntPair>> sortedRangePairsList = new ArrayList<>(); List<Set<Integer>> rawIdSetList = new ArrayList<>(); for (int i = 0; i < numLists; i++) { List<IntPair> pairs = new ArrayList<>(); Set<Integer> rawIdSet = new HashSet<>(); int docId = 0; while (docId < totalDocs) { int start = docId + r.nextInt(maxRange); int end = start + Math.max(minRange, r.nextInt(maxRange)); if (end < totalDocs) { pairs.add(Pairs.intPair(start, end)); for (int id = start; id <= end; id++) { rawIdSet.add(id); } } docId = end + 1; } sortedRangePairsList.add(pairs); rawIdSetList.add(rawIdSet); } // expected intersection List<IntPair> expected = new ArrayList<>(); int tempStart = -1; for (int id = 0; id < totalDocs; id++) { boolean foundInAll = true; for (int i = 0; i < numLists; i++) { if (!rawIdSetList.get(i).contains(id)) { foundInAll = false; break; } } if (foundInAll) { if (tempStart == -1) { tempStart = id; } } else { if (tempStart != -1) { expected.add(Pairs.intPair(tempStart, id - 1)); tempStart = -1; } } } List<IntPair> actual = SortedRangeIntersection.intersectSortedRangeSets(sortedRangePairsList); if (!actual.equals(expected)) { LOGGER.error("Actual pairs not equal to expected pairs."); LOGGER.error("Actual pairs: {}", actual); LOGGER.error("Expected pairs: {}", expected); LOGGER.error("Random seed: {}", randomSeed); LOGGER.error("Sorted range pairs list: {}", sortedRangePairsList); Assert.fail(); } }
@Override public CheckResult runCheck() { String filter = buildQueryFilter(stream.getId(), query); String query = field + ":\"" + value + "\""; Integer backlogSize = getBacklog(); boolean backlogEnabled = false; int searchLimit = 1; if(backlogSize != null && backlogSize > 0) { backlogEnabled = true; searchLimit = backlogSize; } try { SearchResult result = searches.search( query, filter, RelativeRange.create(configuration.getAlertCheckInterval()), searchLimit, 0, new Sorting(Message.FIELD_TIMESTAMP, Sorting.Direction.DESC) ); final List<MessageSummary> summaries; if (backlogEnabled) { summaries = Lists.newArrayListWithCapacity(result.getResults().size()); for (ResultMessage resultMessage : result.getResults()) { final Message msg = resultMessage.getMessage(); summaries.add(new MessageSummary(resultMessage.getIndex(), msg)); } } else { summaries = Collections.emptyList(); } final long count = result.getTotalResults(); final String resultDescription = "Stream received messages matching <" + query + "> " + "(Current grace time: " + grace + " minutes)"; if (count > 0) { LOG.debug("Alert check <{}> found [{}] messages.", id, count); return new CheckResult(true, this, resultDescription, Tools.nowUTC(), summaries); } else { LOG.debug("Alert check <{}> returned no results.", id); return new NegativeCheckResult(); } } catch (InvalidRangeParametersException e) { // cannot happen lol LOG.error("Invalid timerange.", e); return null; } }
@Test public void testRunNoMatchingMessages() throws Exception { final DateTime now = DateTime.now(DateTimeZone.UTC); final IndexRange indexRange = MongoIndexRange.create("graylog_test", now.minusDays(1), now, now, 0); final Set<IndexRange> indexRanges = Sets.newHashSet(indexRange); final SearchResult searchResult = spy(new SearchResult(Collections.emptyList(), 0L, indexRanges, "message:something", null, 100L)); when(searches.search( anyString(), anyString(), any(RelativeRange.class), anyInt(), anyInt(), any(Sorting.class))) .thenReturn(searchResult); final FieldContentValueAlertCondition condition = getCondition(getParametersMap(0, "message", "something"), alertConditionTitle); final AlertCondition.CheckResult result = condition.runCheck(); assertNotTriggered(result); }
public static String substVars(String val, PropertyContainer pc1) throws ScanException { return substVars(val, pc1, null); }
@Test public void recursionErrorWithNullLiteralPayload() throws ScanException { Exception e = assertThrows(IllegalArgumentException.class, () -> { OptionHelper.substVars("abc${AA$AA${}}}xyz", context); }); String expectedMessage = CIRCULAR_VARIABLE_REFERENCE_DETECTED+"${AA} --> ${}]"; assertEquals(expectedMessage, e.getMessage()); }
static AssignReplicasToDirsRequestData buildRequestData( int nodeId, long brokerEpoch, Map<TopicIdPartition, Assignment> assignments ) { Map<Uuid, DirectoryData> directoryMap = new HashMap<>(); Map<Uuid, Map<Uuid, TopicData>> topicMap = new HashMap<>(); for (Map.Entry<TopicIdPartition, Assignment> entry : assignments.entrySet()) { TopicIdPartition topicPartition = entry.getKey(); Uuid directoryId = entry.getValue().directoryId(); DirectoryData directory = directoryMap.computeIfAbsent(directoryId, d -> new DirectoryData().setId(directoryId)); TopicData topic = topicMap.computeIfAbsent(directoryId, d -> new HashMap<>()) .computeIfAbsent(topicPartition.topicId(), topicId -> { TopicData data = new TopicData().setTopicId(topicId); directory.topics().add(data); return data; }); PartitionData partition = new PartitionData().setPartitionIndex(topicPartition.partitionId()); topic.partitions().add(partition); } return new AssignReplicasToDirsRequestData() .setBrokerId(nodeId) .setBrokerEpoch(brokerEpoch) .setDirectories(new ArrayList<>(directoryMap.values())); }
@Test void testBuildRequestData() { Map<TopicIdPartition, Uuid> assignments = new LinkedHashMap<>(); assignments.put(new TopicIdPartition(TOPIC_1, 1), DIR_1); assignments.put(new TopicIdPartition(TOPIC_1, 2), DIR_2); assignments.put(new TopicIdPartition(TOPIC_1, 3), DIR_3); assignments.put(new TopicIdPartition(TOPIC_1, 4), DIR_1); assignments.put(new TopicIdPartition(TOPIC_2, 5), DIR_2); Map<TopicIdPartition, Assignment> targetAssignments = new LinkedHashMap<>(); assignments.entrySet().forEach(e -> targetAssignments.put(e.getKey(), new Assignment(e.getKey(), e.getValue(), 0, () -> { }))); AssignReplicasToDirsRequestData built = AssignmentsManager.buildRequestData(8, 100L, targetAssignments); AssignReplicasToDirsRequestData expected = new AssignReplicasToDirsRequestData(). setBrokerId(8). setBrokerEpoch(100L). setDirectories(Arrays.asList( new AssignReplicasToDirsRequestData.DirectoryData(). setId(DIR_2). setTopics(Arrays.asList( new AssignReplicasToDirsRequestData.TopicData(). setTopicId(TOPIC_1). setPartitions(Collections.singletonList( new AssignReplicasToDirsRequestData.PartitionData(). setPartitionIndex(2))), new AssignReplicasToDirsRequestData.TopicData(). setTopicId(TOPIC_2). setPartitions(Collections.singletonList( new AssignReplicasToDirsRequestData.PartitionData(). setPartitionIndex(5))))), new AssignReplicasToDirsRequestData.DirectoryData(). setId(DIR_3). setTopics(Collections.singletonList( new AssignReplicasToDirsRequestData.TopicData(). setTopicId(TOPIC_1). setPartitions(Collections.singletonList( new AssignReplicasToDirsRequestData.PartitionData(). setPartitionIndex(3))))), new AssignReplicasToDirsRequestData.DirectoryData(). setId(DIR_1). setTopics(Collections.singletonList( new AssignReplicasToDirsRequestData.TopicData(). setTopicId(TOPIC_1). setPartitions(Arrays.asList( new AssignReplicasToDirsRequestData.PartitionData(). setPartitionIndex(1), new AssignReplicasToDirsRequestData.PartitionData(). setPartitionIndex(4))))))); assertEquals(expected, built); }
@GET @Path("/{connector}/offsets") @Operation(summary = "Get the current offsets for the specified connector") public ConnectorOffsets getOffsets(final @PathParam("connector") String connector) throws Throwable { FutureCallback<ConnectorOffsets> cb = new FutureCallback<>(); herder.connectorOffsets(connector, cb); return requestHandler.completeRequest(cb); }
@Test public void testGetOffsetsConnectorNotFound() { final ArgumentCaptor<Callback<ConnectorOffsets>> cb = ArgumentCaptor.forClass(Callback.class); expectAndCallbackException(cb, new NotFoundException("Connector not found")) .when(herder).connectorOffsets(anyString(), cb.capture()); assertThrows(NotFoundException.class, () -> connectorsResource.getOffsets("unknown-connector")); }
@Override public int statusCode() { if (response == null) { return 0; } return response.getCode(); }
@Test void statusCode_zeroWhenNoResponse() { assertThat(new HttpResponseWrapper(null, new HttpRequestWrapper(request, null), null).statusCode()).isZero(); }
public Properties getProperties() { return properties; }
@Test public void testApplicationProperties() { assertNull(Configuration.INSTANCE.getProperties().getProperty("hibernate.types.app.props.no")); assertEquals("true", Configuration.INSTANCE.getProperties().getProperty("hibernate.types.app.props")); }
@Override protected boolean isNewMigration(NoSqlMigration noSqlMigration) { // why: as Jedis does not have a schema, each migration checks if it needs to do something return true; }
@Test void testMigrationsHappyPath() { assertThat(jedisRedisDBCreator.isNewMigration(new NoSqlMigrationByClass(M001_JedisRemoveJobStatsAndUseMetadata.class))).isTrue(); assertThatCode(jedisRedisDBCreator::runMigrations).doesNotThrowAnyException(); assertThatCode(jedisRedisDBCreator::runMigrations).doesNotThrowAnyException(); assertThat(jedisRedisDBCreator.isNewMigration(new NoSqlMigrationByClass(M001_JedisRemoveJobStatsAndUseMetadata.class))).isTrue(); }
public static List<ExportPackages.Export> parseExports(String exportAttribute) { ParsingContext p = new ParsingContext(exportAttribute.trim()); List<ExportPackages.Export> exports = parseExportPackage(p); if (exports.isEmpty()) { p.fail("Expected a list of exports"); } else if (p.atEnd() == false) { p.fail("Exports not fully processed"); } return exports; }
@Test void require_that_package_is_parsed_correctly() { List<Export> exports = ExportPackageParser.parseExports("sample.exported.package"); assertEquals(1, exports.size()); assertTrue(exports.get(0).getParameters().isEmpty()); assertTrue(exports.get(0).getPackageNames().contains("sample.exported.package")); }
public void set(UUID uuid, VersionedProfile versionedProfile) { redisSet(uuid, versionedProfile); profiles.set(uuid, versionedProfile); }
@Test public void testSet() { final UUID uuid = UUID.randomUUID(); final byte[] name = TestRandomUtil.nextBytes(81); final VersionedProfile profile = new VersionedProfile("someversion", name, "someavatar", null, null, null, null, "somecommitment".getBytes()); profilesManager.set(uuid, profile); verify(commands, times(1)).hset(eq("profiles::" + uuid), eq("someversion"), any()); verifyNoMoreInteractions(commands); verify(profiles, times(1)).set(eq(uuid), eq(profile)); verifyNoMoreInteractions(profiles); }
@Override public byte[] serialize(Event event) { if (event instanceof SchemaChangeEvent) { Schema schema; SchemaChangeEvent schemaChangeEvent = (SchemaChangeEvent) event; if (event instanceof CreateTableEvent) { CreateTableEvent createTableEvent = (CreateTableEvent) event; schema = createTableEvent.getSchema(); } else { schema = SchemaUtils.applySchemaChangeEvent( jsonSerializers.get(schemaChangeEvent.tableId()).getSchema(), schemaChangeEvent); } JsonRowDataSerializationSchema jsonSerializer = buildSerializationForPrimaryKey(schema); try { jsonSerializer.open(context); } catch (Exception e) { throw new RuntimeException(e); } jsonSerializers.put( schemaChangeEvent.tableId(), new TableSchemaInfo( schemaChangeEvent.tableId(), schema, jsonSerializer, zoneId)); return null; } DataChangeEvent dataChangeEvent = (DataChangeEvent) event; RecordData recordData = dataChangeEvent.op().equals(OperationType.DELETE) ? dataChangeEvent.before() : dataChangeEvent.after(); TableSchemaInfo tableSchemaInfo = jsonSerializers.get(dataChangeEvent.tableId()); return tableSchemaInfo .getSerializationSchema() .serialize(tableSchemaInfo.getRowDataFromRecordData(recordData, true)); }
@Test public void testSerialize() throws Exception { ObjectMapper mapper = JacksonMapperFactory.createObjectMapper() .configure(JsonGenerator.Feature.WRITE_BIGDECIMAL_AS_PLAIN, false); SerializationSchema<Event> serializationSchema = KeySerializationFactory.createSerializationSchema( new Configuration(), KeyFormat.JSON, ZoneId.systemDefault()); serializationSchema.open(new MockInitializationContext()); // create table Schema schema = Schema.newBuilder() .physicalColumn("col1", DataTypes.STRING()) .physicalColumn("col2", DataTypes.STRING()) .primaryKey("col1") .build(); CreateTableEvent createTableEvent = new CreateTableEvent(TABLE_1, schema); Assertions.assertNull(serializationSchema.serialize(createTableEvent)); // insert BinaryRecordDataGenerator generator = new BinaryRecordDataGenerator(RowType.of(DataTypes.STRING(), DataTypes.STRING())); DataChangeEvent insertEvent1 = DataChangeEvent.insertEvent( TABLE_1, generator.generate( new Object[] { BinaryStringData.fromString("1"), BinaryStringData.fromString("1") })); JsonNode expected = mapper.readTree( "{\"TableId\":\"default_namespace.default_schema.table1\",\"col1\":\"1\"}"); JsonNode actual = mapper.readTree(serializationSchema.serialize(insertEvent1)); Assertions.assertEquals(expected, actual); DataChangeEvent insertEvent2 = DataChangeEvent.insertEvent( TABLE_1, generator.generate( new Object[] { BinaryStringData.fromString("2"), BinaryStringData.fromString("2") })); expected = mapper.readTree( "{\"TableId\":\"default_namespace.default_schema.table1\",\"col1\":\"2\"}"); actual = mapper.readTree(serializationSchema.serialize(insertEvent2)); Assertions.assertEquals(expected, actual); DataChangeEvent deleteEvent = DataChangeEvent.deleteEvent( TABLE_1, generator.generate( new Object[] { BinaryStringData.fromString("2"), BinaryStringData.fromString("2") })); expected = mapper.readTree( "{\"TableId\":\"default_namespace.default_schema.table1\",\"col1\":\"2\"}"); actual = mapper.readTree(serializationSchema.serialize(deleteEvent)); Assertions.assertEquals(expected, actual); DataChangeEvent updateEvent = DataChangeEvent.updateEvent( TABLE_1, generator.generate( new Object[] { BinaryStringData.fromString("1"), BinaryStringData.fromString("1") }), generator.generate( new Object[] { BinaryStringData.fromString("1"), BinaryStringData.fromString("x") })); expected = mapper.readTree( "{\"TableId\":\"default_namespace.default_schema.table1\",\"col1\":\"1\"}"); actual = mapper.readTree(serializationSchema.serialize(updateEvent)); Assertions.assertEquals(expected, actual); }
@Override public Column convert(BasicTypeDefine typeDefine) { PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String sqlServerType = typeDefine.getDataType().toUpperCase(); switch (sqlServerType) { case SQLSERVER_BIT: builder.sourceType(SQLSERVER_BIT); builder.dataType(BasicType.BOOLEAN_TYPE); break; case SQLSERVER_TINYINT: case SQLSERVER_TINYINT_IDENTITY: builder.sourceType(SQLSERVER_TINYINT); builder.dataType(BasicType.SHORT_TYPE); break; case SQLSERVER_SMALLINT: case SQLSERVER_SMALLINT_IDENTITY: builder.sourceType(SQLSERVER_SMALLINT); builder.dataType(BasicType.SHORT_TYPE); break; case SQLSERVER_INTEGER: case SQLSERVER_INTEGER_IDENTITY: case SQLSERVER_INT: case SQLSERVER_INT_IDENTITY: builder.sourceType(SQLSERVER_INT); builder.dataType(BasicType.INT_TYPE); break; case SQLSERVER_BIGINT: case SQLSERVER_BIGINT_IDENTITY: builder.sourceType(SQLSERVER_BIGINT); builder.dataType(BasicType.LONG_TYPE); break; case SQLSERVER_REAL: builder.sourceType(SQLSERVER_REAL); builder.dataType(BasicType.FLOAT_TYPE); break; case SQLSERVER_FLOAT: if (typeDefine.getPrecision() != null && typeDefine.getPrecision() <= 24) { builder.sourceType(SQLSERVER_REAL); builder.dataType(BasicType.FLOAT_TYPE); } else { builder.sourceType(SQLSERVER_FLOAT); builder.dataType(BasicType.DOUBLE_TYPE); } break; case SQLSERVER_DECIMAL: case SQLSERVER_NUMERIC: builder.sourceType( String.format( "%s(%s,%s)", SQLSERVER_DECIMAL, typeDefine.getPrecision(), typeDefine.getScale())); builder.dataType( new DecimalType( typeDefine.getPrecision().intValue(), typeDefine.getScale())); builder.columnLength(typeDefine.getPrecision()); builder.scale(typeDefine.getScale()); break; case SQLSERVER_MONEY: builder.sourceType(SQLSERVER_MONEY); builder.dataType( new DecimalType( typeDefine.getPrecision().intValue(), typeDefine.getScale())); builder.columnLength(typeDefine.getPrecision()); builder.scale(typeDefine.getScale()); break; case SQLSERVER_SMALLMONEY: builder.sourceType(SQLSERVER_SMALLMONEY); builder.dataType( new DecimalType( typeDefine.getPrecision().intValue(), typeDefine.getScale())); builder.columnLength(typeDefine.getPrecision()); builder.scale(typeDefine.getScale()); break; case SQLSERVER_CHAR: builder.sourceType(String.format("%s(%s)", SQLSERVER_CHAR, typeDefine.getLength())); builder.dataType(BasicType.STRING_TYPE); builder.columnLength( TypeDefineUtils.doubleByteTo4ByteLength(typeDefine.getLength())); break; case SQLSERVER_NCHAR: builder.sourceType( String.format("%s(%s)", SQLSERVER_NCHAR, typeDefine.getLength())); builder.dataType(BasicType.STRING_TYPE); builder.columnLength( TypeDefineUtils.doubleByteTo4ByteLength(typeDefine.getLength())); break; case SQLSERVER_VARCHAR: if (typeDefine.getLength() == -1) { builder.sourceType(MAX_VARCHAR); builder.columnLength(TypeDefineUtils.doubleByteTo4ByteLength(POWER_2_31 - 1)); } else { builder.sourceType( String.format("%s(%s)", SQLSERVER_VARCHAR, typeDefine.getLength())); builder.columnLength( TypeDefineUtils.doubleByteTo4ByteLength(typeDefine.getLength())); } builder.dataType(BasicType.STRING_TYPE); break; case SQLSERVER_NVARCHAR: if (typeDefine.getLength() == -1) { builder.sourceType(MAX_NVARCHAR); builder.columnLength(TypeDefineUtils.doubleByteTo4ByteLength(POWER_2_31 - 1)); } else { builder.sourceType( String.format("%s(%s)", SQLSERVER_NVARCHAR, typeDefine.getLength())); builder.columnLength( TypeDefineUtils.doubleByteTo4ByteLength(typeDefine.getLength())); } builder.dataType(BasicType.STRING_TYPE); break; case SQLSERVER_TEXT: builder.sourceType(SQLSERVER_TEXT); builder.dataType(BasicType.STRING_TYPE); builder.columnLength(POWER_2_31 - 1); break; case SQLSERVER_NTEXT: builder.sourceType(SQLSERVER_NTEXT); builder.dataType(BasicType.STRING_TYPE); builder.columnLength(POWER_2_30 - 1); break; case SQLSERVER_XML: builder.sourceType(SQLSERVER_XML); builder.dataType(BasicType.STRING_TYPE); builder.columnLength(POWER_2_31 - 1); break; case SQLSERVER_UNIQUEIDENTIFIER: builder.sourceType(SQLSERVER_UNIQUEIDENTIFIER); builder.dataType(BasicType.STRING_TYPE); builder.columnLength(TypeDefineUtils.charTo4ByteLength(typeDefine.getLength())); break; case SQLSERVER_SQLVARIANT: builder.sourceType(SQLSERVER_SQLVARIANT); builder.dataType(BasicType.STRING_TYPE); builder.columnLength(typeDefine.getLength()); break; case SQLSERVER_BINARY: builder.sourceType( String.format("%s(%s)", SQLSERVER_BINARY, typeDefine.getLength())); builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(typeDefine.getLength()); break; case SQLSERVER_VARBINARY: if (typeDefine.getLength() == -1) { builder.sourceType(MAX_VARBINARY); builder.columnLength(POWER_2_31 - 1); } else { builder.sourceType( String.format("%s(%s)", SQLSERVER_VARBINARY, typeDefine.getLength())); builder.columnLength(typeDefine.getLength()); } builder.dataType(PrimitiveByteArrayType.INSTANCE); break; case SQLSERVER_IMAGE: builder.sourceType(SQLSERVER_IMAGE); builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(POWER_2_31 - 1); break; case SQLSERVER_TIMESTAMP: builder.sourceType(SQLSERVER_TIMESTAMP); builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(8L); break; case SQLSERVER_DATE: builder.sourceType(SQLSERVER_DATE); builder.dataType(LocalTimeType.LOCAL_DATE_TYPE); break; case SQLSERVER_TIME: builder.sourceType(String.format("%s(%s)", SQLSERVER_TIME, typeDefine.getScale())); builder.dataType(LocalTimeType.LOCAL_TIME_TYPE); builder.scale(typeDefine.getScale()); break; case SQLSERVER_DATETIME: builder.sourceType(SQLSERVER_DATETIME); builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); builder.scale(3); break; case SQLSERVER_DATETIME2: builder.sourceType( String.format("%s(%s)", SQLSERVER_DATETIME2, typeDefine.getScale())); builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); builder.scale(typeDefine.getScale()); break; case SQLSERVER_DATETIMEOFFSET: builder.sourceType( String.format("%s(%s)", SQLSERVER_DATETIMEOFFSET, typeDefine.getScale())); builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); builder.scale(typeDefine.getScale()); break; case SQLSERVER_SMALLDATETIME: builder.sourceType(SQLSERVER_SMALLDATETIME); builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); break; default: throw CommonError.convertToSeaTunnelTypeError( DatabaseIdentifier.SQLSERVER, sqlServerType, typeDefine.getName()); } return builder.build(); }
@Test public void testConvertUnsupported() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder().name("test").columnType("aaa").dataType("aaa").build(); try { SqlServerTypeConverter.INSTANCE.convert(typeDefine); Assertions.fail(); } catch (SeaTunnelRuntimeException e) { // ignore } catch (Throwable e) { Assertions.fail(); } }
@Override public void close() throws IOException { dataOut.close(); }
@Test public void testClose() throws Exception { dataOutputStream.close(); verify(mockOutputStream).close(); }
@Override public void release(final ConnectionSession connectionSession) { postgresqlFrontendEngine.release(connectionSession); }
@Test void assertRelease() { ConnectionSession connection = mock(ConnectionSession.class); openGaussFrontendEngine.release(connection); verify(mockPostgreSQLFrontendEngine).release(connection); }
public static boolean isAggrDataId(String dataId) { if (null == dataId) { throw new IllegalArgumentException("dataId is null"); } for (Pattern pattern : AGGR_DATAID_WHITELIST.get()) { if (pattern.matcher(dataId).matches()) { return true; } } return false; }
@Test void testIsAggrDataId() { List<String> list = new ArrayList<String>(); list.add("com.taobao.jiuren.*"); list.add("NS_NACOS_SUBSCRIPTION_TOPIC_*"); list.add("com.taobao.tae.AppListOnGrid-*"); AggrWhitelist.compile(list); assertFalse(AggrWhitelist.isAggrDataId("com.abc")); assertFalse(AggrWhitelist.isAggrDataId("com.taobao.jiuren")); assertFalse(AggrWhitelist.isAggrDataId("com.taobao.jiurenABC")); assertTrue(AggrWhitelist.isAggrDataId("com.taobao.jiuren.abc")); assertTrue(AggrWhitelist.isAggrDataId("NS_NACOS_SUBSCRIPTION_TOPIC_abc")); assertTrue(AggrWhitelist.isAggrDataId("com.taobao.tae.AppListOnGrid-abc")); }
public static String substVars(String val, PropertyContainer pc1) throws ScanException { return substVars(val, pc1, null); }
@Test public void emptyDefault() throws ScanException { String r = OptionHelper.substVars("a${undefinedX:-}b", context); assertEquals("ab", r); }
@Override @DataPermission(enable = false) // 禁用数据权限,避免建立不正确的缓存 @Cacheable(cacheNames = RedisKeyConstants.DEPT_CHILDREN_ID_LIST, key = "#id") public Set<Long> getChildDeptIdListFromCache(Long id) { List<DeptDO> children = getChildDeptList(id); return convertSet(children, DeptDO::getId); }
@Test public void testGetChildDeptListFromCache() { // mock 数据(1 级别子节点) DeptDO dept1 = randomPojo(DeptDO.class, o -> o.setName("1")); deptMapper.insert(dept1); DeptDO dept2 = randomPojo(DeptDO.class, o -> o.setName("2")); deptMapper.insert(dept2); // mock 数据(2 级子节点) DeptDO dept1a = randomPojo(DeptDO.class, o -> o.setName("1-a").setParentId(dept1.getId())); deptMapper.insert(dept1a); DeptDO dept2a = randomPojo(DeptDO.class, o -> o.setName("2-a").setParentId(dept2.getId())); deptMapper.insert(dept2a); // 准备参数 Long id = dept1.getParentId(); // 调用 Set<Long> result = deptService.getChildDeptIdListFromCache(id); // 断言 assertEquals(result.size(), 2); assertTrue(result.contains(dept1.getId())); assertTrue(result.contains(dept1a.getId())); }
@Override public RexNode visit(CallExpression call) { boolean isBatchMode = unwrapContext(relBuilder).isBatchMode(); for (CallExpressionConvertRule rule : getFunctionConvertChain(isBatchMode)) { Optional<RexNode> converted = rule.convert(call, newFunctionContext()); if (converted.isPresent()) { return converted.get(); } } throw new RuntimeException("Unknown call expression: " + call); }
@Test void testTimeLiteral() { RexNode rex = converter.visit( valueLiteral( LocalTime.parse("12:12:12.12345"), DataTypes.TIME(2).notNull())); assertThat(((RexLiteral) rex).getValueAs(TimeString.class)) .isEqualTo(new TimeString("12:12:12.12")); assertThat(rex.getType().getSqlTypeName()).isEqualTo(SqlTypeName.TIME); assertThat(rex.getType().getPrecision()).isEqualTo(2); }
@Nullable public DataBuffer readChunk(int index) throws IOException { if (index >= mDataBuffers.length) { return null; } if (index >= mBufferCount.get()) { try (LockResource ignored = new LockResource(mBufferLocks.writeLock())) { while (index >= mBufferCount.get()) { DataBuffer buffer = readChunk(); mDataBuffers[mBufferCount.get()] = buffer; mBufferCount.incrementAndGet(); } } } return mDataBuffers[index]; }
@Test public void testDataCache() throws Exception { int index = 2; DataBuffer buffer = mDataReader.readChunk(index); Assert.assertEquals(index + 1, mDataReader.getReadChunkNum()); Assert.assertTrue(mDataReader.validateBuffer(index, buffer)); index = 1; buffer = mDataReader.readChunk(index); Assert.assertEquals(3, mDataReader.getReadChunkNum()); Assert.assertTrue(mDataReader.validateBuffer(index, buffer)); index = 0; buffer = mDataReader.readChunk(index); Assert.assertEquals(3, mDataReader.getReadChunkNum()); Assert.assertTrue(mDataReader.validateBuffer(index, buffer)); index = 3; buffer = mDataReader.readChunk(index); Assert.assertEquals(index + 1, mDataReader.getReadChunkNum()); Assert.assertTrue(mDataReader.validateBuffer(index, buffer)); }
public DataStreamSource<Long> fromSequence(long from, long to) { if (from > to) { throw new IllegalArgumentException( "Start of sequence must not be greater than the end"); } return fromSource( new NumberSequenceSource(from, to), WatermarkStrategy.noWatermarks(), "Sequence Source"); }
@Test void testFromSequence() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); DataStreamSource<Long> src = env.fromSequence(0, 2); assertThat(src.getType()).isEqualTo(BasicTypeInfo.LONG_TYPE_INFO); }
@UdafFactory(description = "Compute sample standard deviation of column with type Long.", aggregateSchema = "STRUCT<SUM bigint, COUNT bigint, M2 double>") public static TableUdaf<Long, Struct, Double> stdDevLong() { return getStdDevImplementation( 0L, STRUCT_LONG, (agg, newValue) -> newValue + agg.getInt64(SUM), (agg, newValue) -> Double.valueOf(newValue * (agg.getInt64(COUNT) + 1) - (agg.getInt64(SUM) + newValue)), (agg1, agg2) -> Double.valueOf( agg1.getInt64(SUM) / agg1.getInt64(COUNT) - agg2.getInt64(SUM) / agg2.getInt64(COUNT)), (agg1, agg2) -> agg1.getInt64(SUM) + agg2.getInt64(SUM), (agg, valueToRemove) -> agg.getInt64(SUM) - valueToRemove); }
@Test public void shouldMergeLongs() { final TableUdaf<Long, Struct, Double> udaf = stdDevLong(); Struct left = udaf.initialize(); final Long[] leftValues = new Long[] {1L, 2L, 3L, 4L, 5L}; for (final Long thisValue : leftValues) { left = udaf.aggregate(thisValue, left); } Struct right = udaf.initialize(); final Long[] rightValues = new Long[] {2L, 2L, 1L}; for (final Long thisValue : rightValues) { right = udaf.aggregate(thisValue, right); } final Struct merged = udaf.merge(left, right); assertThat(merged.getInt64(COUNT), equalTo(8L)); assertThat(merged.getInt64(SUM), equalTo(20L)); assertThat(merged.getFloat64(M2), equalTo(18.166666666666664)); final double standardDev = udaf.map(merged); assertThat(standardDev, equalTo(2.595238095238095)); }
public boolean isInstalled() { return mTrigger != null; }
@Test public void testIntentNotInstalledWhenNoActivities() { Assert.assertFalse(IntentApiTrigger.isInstalled(mMockInputMethodService)); }
public static String getTypeName(final int type) { switch (type) { case START_EVENT_V3: return "Start_v3"; case STOP_EVENT: return "Stop"; case QUERY_EVENT: return "Query"; case ROTATE_EVENT: return "Rotate"; case INTVAR_EVENT: return "Intvar"; case LOAD_EVENT: return "Load"; case NEW_LOAD_EVENT: return "New_load"; case SLAVE_EVENT: return "Slave"; case CREATE_FILE_EVENT: return "Create_file"; case APPEND_BLOCK_EVENT: return "Append_block"; case DELETE_FILE_EVENT: return "Delete_file"; case EXEC_LOAD_EVENT: return "Exec_load"; case RAND_EVENT: return "RAND"; case XID_EVENT: return "Xid"; case USER_VAR_EVENT: return "User var"; case FORMAT_DESCRIPTION_EVENT: return "Format_desc"; case TABLE_MAP_EVENT: return "Table_map"; case PRE_GA_WRITE_ROWS_EVENT: return "Write_rows_event_old"; case PRE_GA_UPDATE_ROWS_EVENT: return "Update_rows_event_old"; case PRE_GA_DELETE_ROWS_EVENT: return "Delete_rows_event_old"; case WRITE_ROWS_EVENT_V1: return "Write_rows_v1"; case UPDATE_ROWS_EVENT_V1: return "Update_rows_v1"; case DELETE_ROWS_EVENT_V1: return "Delete_rows_v1"; case BEGIN_LOAD_QUERY_EVENT: return "Begin_load_query"; case EXECUTE_LOAD_QUERY_EVENT: return "Execute_load_query"; case INCIDENT_EVENT: return "Incident"; case HEARTBEAT_LOG_EVENT: case HEARTBEAT_LOG_EVENT_V2: return "Heartbeat"; case IGNORABLE_LOG_EVENT: return "Ignorable"; case ROWS_QUERY_LOG_EVENT: return "Rows_query"; case WRITE_ROWS_EVENT: return "Write_rows"; case UPDATE_ROWS_EVENT: return "Update_rows"; case DELETE_ROWS_EVENT: return "Delete_rows"; case GTID_LOG_EVENT: return "Gtid"; case ANONYMOUS_GTID_LOG_EVENT: return "Anonymous_Gtid"; case PREVIOUS_GTIDS_LOG_EVENT: return "Previous_gtids"; case PARTIAL_UPDATE_ROWS_EVENT: return "Update_rows_partial"; case TRANSACTION_CONTEXT_EVENT : return "Transaction_context"; case VIEW_CHANGE_EVENT : return "view_change"; case XA_PREPARE_LOG_EVENT : return "Xa_prepare"; case TRANSACTION_PAYLOAD_EVENT : return "transaction_payload"; default: return "Unknown type:" + type; } }
@Test public void getTypeNameInputPositiveOutputNotNull31() { // Arrange final int type = 20; // Act final String actual = LogEvent.getTypeName(type); // Assert result Assert.assertEquals("Write_rows_event_old", actual); }
public KsqlGenericRecord build( final List<ColumnName> columnNames, final List<Expression> expressions, final LogicalSchema schema, final DataSourceType dataSourceType ) { final List<ColumnName> columns = columnNames.isEmpty() ? implicitColumns(schema) : columnNames; if (columns.size() != expressions.size()) { throw new KsqlException( "Expected a value for each column." + " Expected Columns: " + columnNames + ". Got " + expressions); } final LogicalSchema schemaWithPseudoColumns = withPseudoColumns(schema); for (ColumnName col : columns) { if (!schemaWithPseudoColumns.findColumn(col).isPresent()) { throw new KsqlException("Column name " + col + " does not exist."); } if (SystemColumns.isDisallowedForInsertValues(col)) { throw new KsqlException("Inserting into column " + col + " is not allowed."); } } final Map<ColumnName, Object> values = resolveValues( columns, expressions, schemaWithPseudoColumns, functionRegistry, config ); if (dataSourceType == DataSourceType.KTABLE) { final String noValue = schemaWithPseudoColumns.key().stream() .map(Column::name) .filter(colName -> !values.containsKey(colName)) .map(ColumnName::text) .collect(Collectors.joining(", ")); if (!noValue.isEmpty()) { throw new KsqlException("Value for primary key column(s) " + noValue + " is required for tables"); } } final long ts = (long) values.getOrDefault(SystemColumns.ROWTIME_NAME, clock.getAsLong()); final GenericKey key = buildKey(schema, values); final GenericRow value = buildValue(schema, values); return KsqlGenericRecord.of(key, value, ts); }
@Test public void shouldBuildExpression() { // Given: final LogicalSchema schema = LogicalSchema.builder() .keyColumn(KEY, SqlTypes.STRING) .valueColumn(COL0, SqlTypes.STRING) .build(); final List<ColumnName> names = ImmutableList.of(KEY, COL0); final Expression exp = new FunctionCall( FunctionName.of("CONCAT"), ImmutableList.of(new StringLiteral("a"), new StringLiteral("b")) ); // When: final KsqlGenericRecord record = recordFactory.build( names, ImmutableList.of(exp, exp), schema, DataSourceType.KSTREAM ); // Then: assertThat(record, is(KsqlGenericRecord.of( GenericKey.genericKey("ab"), GenericRow.genericRow("ab"), 0 ))); }
public RunResponse restart(RunRequest runRequest, boolean blocking) { if (!runRequest.isFreshRun() && runRequest.getCurrentPolicy() != RunPolicy.RESTART_FROM_SPECIFIC) { updateRunRequestForRestartFromInlineRoot(runRequest); } RunResponse runResponse = actionHandler.restartRecursively(runRequest); if (runResponse.getStatus() == RunResponse.Status.DELEGATED) { return restartDirectly(runResponse, runRequest, blocking); } return runResponse; }
@Test public void testRestartFromInlineRootWithinNonForeach() { when(instanceDao.getWorkflowInstance( "sample-minimal-wf", 1, Constants.LATEST_INSTANCE_RUN, true)) .thenReturn(instance); when(instance.getStatus()).thenReturn(WorkflowInstance.Status.FAILED); when(instance.getInitiator()).thenReturn(new ManualInitiator()); WorkflowInstanceAggregatedInfo aggregatedInfo = mock(WorkflowInstanceAggregatedInfo.class); when(instance.getAggregatedInfo()).thenReturn(aggregatedInfo); StepAggregatedView aggregatedView = mock(StepAggregatedView.class); when(aggregatedInfo.getStepAggregatedViews()).thenReturn(singletonMap("job1", aggregatedView)); when(aggregatedView.getStatus()).thenReturn(StepInstance.Status.FATALLY_FAILED); when(actionHandler.restartRecursively(any())) .thenReturn(RunResponse.builder().status(RunResponse.Status.WORKFLOW_RUN_CREATED).build()); RunRequest runRequest = RunRequest.builder() .requester(user) .currentPolicy(RunPolicy.RESTART_FROM_INCOMPLETE) .restartConfig( RestartConfig.builder() .addRestartNode("sample-minimal-wf", 1, "job1") .restartPolicy(RunPolicy.RESTART_FROM_INCOMPLETE) .stepRestartParams( Collections.singletonMap( "job1", Collections.singletonMap( "param1", ParamDefinition.buildParamDefinition("param1", "foo")))) .build()) .build(); RunResponse response = stepActionHandler.restart(runRequest, true); ArgumentCaptor<RunRequest> requestCaptor = ArgumentCaptor.forClass(RunRequest.class); Mockito.verify(actionHandler, Mockito.times(1)).restartRecursively(requestCaptor.capture()); RunRequest request = requestCaptor.getValue(); assertEquals(runRequest, request); assertEquals( Collections.singletonList(new RestartConfig.RestartNode("sample-minimal-wf", 1, null)), request.getRestartConfig().getRestartPath()); assertEquals(RunResponse.Status.WORKFLOW_RUN_CREATED, response.getStatus()); }
@SuppressWarnings("unchecked") @Override public <T extends Statement> ConfiguredStatement<T> inject( final ConfiguredStatement<T> statement ) { if (!(statement.getStatement() instanceof CreateSource) && !(statement.getStatement() instanceof CreateAsSelect)) { return statement; } try { if (statement.getStatement() instanceof CreateSource) { final ConfiguredStatement<CreateSource> createStatement = (ConfiguredStatement<CreateSource>) statement; return (ConfiguredStatement<T>) forCreateStatement(createStatement).orElse(createStatement); } else { final ConfiguredStatement<CreateAsSelect> createStatement = (ConfiguredStatement<CreateAsSelect>) statement; return (ConfiguredStatement<T>) forCreateAsStatement(createStatement).orElse( createStatement); } } catch (final KsqlStatementException e) { throw e; } catch (final KsqlException e) { throw new KsqlStatementException( ErrorMessageUtil.buildErrorMessage(e), statement.getMaskedStatementText(), e.getCause()); } }
@Test public void shouldInjectKeyForCsas() { // Given: givenFormatsAndProps("avro", "delimited", ImmutableMap.of("KEY_SCHEMA_ID", new IntegerLiteral(42))); givenDDLSchemaAndFormats(LOGICAL_SCHEMA, "avro", "delimited", SerdeFeature.UNWRAP_SINGLES, SerdeFeature.WRAP_SINGLES); // When: final ConfiguredStatement<CreateStreamAsSelect> result = injector.inject(csasStatement); // Then: assertThat(result.getMaskedStatementText(), is( "CREATE STREAM `csas` " + "WITH (KAFKA_TOPIC='some-topic', KEY_FORMAT='avro', KEY_SCHEMA_FULL_NAME='myrecord', " + "KEY_SCHEMA_ID=42, VALUE_FORMAT='delimited') AS SELECT *\nFROM TABLE `sink`" )); }
public static String toJSONString(Object object) { return JSON.toJSONString(object); }
@Test public void toJSONString() throws Exception { Assert.assertEquals(JSONUtils.toJSONString("xxx"), "\"xxx\""); Assert.assertEquals(JSONUtils.toJSONString(1), "1"); Assert.assertEquals(JSONUtils.toJSONString(2.2d), "2.2"); Assert.assertEquals(JSONUtils.toJSONString(false), "false"); Assert.assertEquals(JSONUtils.toJSONString(new HashMap()), "{}"); Assert.assertEquals(JSONUtils.toJSONString(new ArrayList()), "[]"); Assert.assertEquals(JSONUtils.toJSONString(new Object[0]), "[]"); }
@Override protected Mono<Void> doExecute(final ServerWebExchange exchange, final ShenyuPluginChain chain, final SelectorData selector, final RuleData rule) { JwtConfig jwtConfig = Singleton.INST.get(JwtConfig.class); String authorization = exchange.getRequest().getHeaders().getFirst(HttpHeaders.AUTHORIZATION); String token = exchange.getRequest().getHeaders().getFirst(TOKEN); // check secreteKey if (StringUtils.isEmpty(jwtConfig.getSecretKey())) { Object error = ShenyuResultWrap.error(exchange, ShenyuResultEnum.SECRET_KEY_MUST_BE_CONFIGURED); return WebFluxResultUtils.result(exchange, error); } // compatible processing String finalAuthorization = compatible(token, authorization); Map<String, Object> jwtBody = checkAuthorization(finalAuthorization, jwtConfig.getSecretKey()); if (Objects.isNull(jwtBody)) { Object error = ShenyuResultWrap.error(exchange, ShenyuResultEnum.ERROR_TOKEN); return WebFluxResultUtils.result(exchange, error); } return chain.execute(executeRuleHandle(rule, exchange, jwtBody)); }
@Test public void testDoExecuteWithCustomHandleType() { ruleData.setHandle("{\"handleType\":\"custom\",\"customConvert\":\"customConvert\"}"); jwtPluginDataHandlerUnderTest.handlerRule(ruleData); when(this.chain.execute(any())).thenReturn(Mono.empty()); Mono<Void> mono = jwtPluginUnderTest.doExecute(exchange, chain, selectorData, ruleData); StepVerifier.create(mono).expectSubscription().verifyComplete(); verify(chain) .execute(argThat(exchange -> hasHeader(exchange, "custom", "customConvert"))); }