focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public void flushAllTracks() { parallelismDetector.run(() -> { ArrayList<String> allKnownKeys = newArrayList(tracksUnderConstruction.keySet()); closeAndPublish(allKnownKeys); }); }
@Test public void flushAllTracks() { String ONE = "[RH],STARS,D21_B,03/24/2018,14:42:00.130,N518SP,C172,,5256,032,110,186,042.92704,-083.70974,3472,5256,-14.5730,42.8527,1,Y,A,D21,,POL,ARB,1446,ARB,ACT,VFR,,01500,,,,,,S,1,,0,{RH}"; String TWO = "[RH],STARS,D21_B,03/24/2018,14:42:04.750,N518SP,C172,,5256,032,110,184,042.92457,-083.70999,3472,5256,-14.5847,42.7043,1,Y,A,D21,,POL,ARB,1446,ARB,ACT,VFR,,01500,,,,,,S,1,,0,{RH}"; //different Track ID (from 3472 to 3473) String A = "[RH],STARS,D21_B,03/24/2018,14:42:00.130,N518SP,C172,,5256,032,110,186,042.92704,-083.70974,3473,5256,-14.5730,42.8527,1,Y,A,D21,,POL,ARB,1446,ARB,ACT,VFR,,01500,,,,,,S,1,,0,{RH}"; String B = "[RH],STARS,D21_B,03/24/2018,14:42:04.750,N518SP,C172,,5256,032,110,184,042.92457,-083.70999,3473,5256,-14.5847,42.7043,1,Y,A,D21,,POL,ARB,1446,ARB,ACT,VFR,,01500,,,,,,S,1,,0,{RH}"; TestConsumer trackConsumer = new TestConsumer(); TrackMaker maker = new TrackMaker(trackConsumer); maker.accept(NopHit.from(ONE)); maker.accept(NopHit.from(A)); maker.accept(NopHit.from(TWO)); maker.accept(NopHit.from(B)); maker.flushAllTracks(); assertEquals(0, maker.currentPointCount()); assertEquals(0, maker.numTracksUnderConstruction()); assertEquals(4, maker.numPointsPublished()); assertEquals(2, maker.numTracksPublished()); assertEquals(2, trackConsumer.numCallsToAccept); }
public static MetadataBlock newEos() { return new MetadataBlock(); }
@Test public void emptyDataBlockCorrectness() throws IOException { testSerdeCorrectness(MetadataBlock.newEos()); }
@ApiOperation(value = "Delete common relations (deleteCommonRelations)", notes = "Deletes all the relations ('from' and 'to' direction) for the specified entity and relation type group: 'COMMON'. " + SECURITY_CHECKS_ENTITY_DESCRIPTION) @PreAuthorize("hasAnyAuthority('SYS_ADMIN','TENANT_ADMIN', 'CUSTOMER_USER')") @RequestMapping(value = "/relations", method = RequestMethod.DELETE, params = {"entityId", "entityType"}) @ResponseStatus(value = HttpStatus.OK) public void deleteRelations(@Parameter(description = ENTITY_ID_PARAM_DESCRIPTION, required = true) @RequestParam("entityId") String strId, @Parameter(description = ENTITY_TYPE_PARAM_DESCRIPTION, required = true) @RequestParam("entityType") String strType) throws ThingsboardException { checkParameter("entityId", strId); checkParameter("entityType", strType); EntityId entityId = EntityIdFactory.getByTypeAndId(strType, strId); checkEntityId(entityId, Operation.WRITE); tbEntityRelationService.deleteCommonRelations(getTenantId(), getCurrentUser().getCustomerId(), entityId, getCurrentUser()); }
@Test public void testDeleteRelations() throws Exception { final int numOfDevices = 30; createDevicesByFrom(numOfDevices, BASE_DEVICE_NAME + " from"); createDevicesByTo(numOfDevices, BASE_DEVICE_NAME + " to"); String urlTo = String.format("/api/relations?toId=%s&toType=%s", mainDevice.getUuidId(), EntityType.DEVICE ); String urlFrom = String.format("/api/relations?fromId=%s&fromType=%s", mainDevice.getUuidId(), EntityType.DEVICE ); assertFoundList(urlTo, numOfDevices); assertFoundList(urlFrom, numOfDevices); String url = String.format("/api/relations?entityId=%s&entityType=%s", mainDevice.getUuidId(), EntityType.DEVICE ); Mockito.reset(tbClusterService, auditLogService); doDelete(url).andExpect(status().isOk()); testNotifyEntityOneTimeMsgToEdgeServiceNever(null, mainDevice.getId(), mainDevice.getId(), savedTenant.getId(), tenantAdmin.getCustomerId(), tenantAdmin.getId(), tenantAdmin.getEmail(), ActionType.RELATIONS_DELETED); Assert.assertTrue( "Performed deletion of all relations but some relations were found!", doGet(urlTo, List.class).isEmpty() ); Assert.assertTrue( "Performed deletion of all relations but some relations were found!", doGet(urlFrom, List.class).isEmpty() ); }
@Override public Map<String, Object> getAttributes(boolean addSecureFields) { Map<String, Object> materialMap = new HashMap<>(); materialMap.put("type", "package"); materialMap.put("plugin-id", getPluginId()); Map<String, String> repositoryConfigurationMap = packageDefinition.getRepository().getConfiguration().getConfigurationAsMap(addSecureFields); materialMap.put("repository-configuration", repositoryConfigurationMap); Map<String, String> packageConfigurationMap = packageDefinition.getConfiguration().getConfigurationAsMap(addSecureFields); materialMap.put("package-configuration", packageConfigurationMap); return materialMap; }
@Test void shouldGetAttributesWithSecureFields() { PackageMaterial material = createPackageMaterialWithSecureConfiguration(); Map<String, Object> attributes = material.getAttributes(true); assertThat(attributes.get("type")).isEqualTo("package"); assertThat(attributes.get("plugin-id")).isEqualTo("pluginid"); Map<String, Object> repositoryConfiguration = (Map<String, Object>) attributes.get("repository-configuration"); assertThat(repositoryConfiguration.get("k1")).isEqualTo("repo-v1"); assertThat(repositoryConfiguration.get("k2")).isEqualTo("repo-v2"); Map<String, Object> packageConfiguration = (Map<String, Object>) attributes.get("package-configuration"); assertThat(packageConfiguration.get("k3")).isEqualTo("package-v1"); assertThat(packageConfiguration.get("k4")).isEqualTo("package-v2"); }
@Override public int totalSize() { return payload != null ? payload.length : 0; }
@Test public void totalSize_whenEmpty() { HeapData heapData = new HeapData(new byte[0]); assertEquals(0, heapData.totalSize()); }
@Override public void write(final MySQLPacketPayload payload, final Object value) { if (value instanceof byte[]) { payload.writeBytesLenenc((byte[]) value); } else { payload.writeStringLenenc(value.toString()); } }
@Test void assertWrite() { byte[] input = {0x0a, 0x33, 0x18, 0x01, 0x4a, 0x08, 0x0a, (byte) 0x9a, 0x01, 0x18, 0x01, 0x4a, 0x6f}; byte[] expected = {0x0d, 0x0a, 0x33, 0x18, 0x01, 0x4a, 0x08, 0x0a, (byte) 0x9a, 0x01, 0x18, 0x01, 0x4a, 0x6f}; ByteBuf actual = Unpooled.wrappedBuffer(new byte[expected.length]).writerIndex(0); MySQLPacketPayload payload = new MySQLPacketPayload(actual, StandardCharsets.UTF_8); new MySQLByteLenencBinaryProtocolValue().write(payload, input); assertThat(ByteBufUtil.getBytes(actual), is(expected)); }
public static Object get(Object object, int index) { if (index < 0) { throw new IndexOutOfBoundsException("Index cannot be negative: " + index); } if (object instanceof Map) { Map map = (Map) object; Iterator iterator = map.entrySet().iterator(); return get(iterator, index); } else if (object instanceof List) { return ((List) object).get(index); } else if (object instanceof Object[]) { return ((Object[]) object)[index]; } else if (object instanceof Iterator) { Iterator it = (Iterator) object; while (it.hasNext()) { index--; if (index == -1) { return it.next(); } else { it.next(); } } throw new IndexOutOfBoundsException("Entry does not exist: " + index); } else if (object instanceof Collection) { Iterator iterator = ((Collection) object).iterator(); return get(iterator, index); } else if (object instanceof Enumeration) { Enumeration it = (Enumeration) object; while (it.hasMoreElements()) { index--; if (index == -1) { return it.nextElement(); } else { it.nextElement(); } } throw new IndexOutOfBoundsException("Entry does not exist: " + index); } else if (object == null) { throw new IllegalArgumentException("Unsupported object type: null"); } else { try { return Array.get(object, index); } catch (IllegalArgumentException ex) { throw new IllegalArgumentException("Unsupported object type: " + object.getClass().getName()); } } }
@Test void testGetArray4() { assertThrows(ArrayIndexOutOfBoundsException.class, () -> { CollectionUtils.get(new int[] {}, 0); }); }
public static HttpConfigResponse createFromConfig(ConfigResponse config) { return new HttpConfigResponse(config); }
@Test public void require_that_response_is_created_from_config() throws IOException { long generation = 1L; ConfigPayload payload = ConfigPayload.fromInstance(new SimpletypesConfig(new SimpletypesConfig.Builder())); ConfigResponse configResponse = SlimeConfigResponse.fromConfigPayload(payload, generation, false, PayloadChecksums.from("", "595f44fec1e92a71d")); HttpConfigResponse response = HttpConfigResponse.createFromConfig(configResponse); assertEquals(SessionHandlerTest.getRenderedString(response), "{\"boolval\":false,\"doubleval\":0.0,\"enumval\":\"VAL1\",\"intval\":0,\"longval\":0,\"stringval\":\"s\"}"); }
public String getTopicName(AsyncMockDefinition definition, EventMessage eventMessage) { logger.debugf("AsyncAPI Operation {%s}", definition.getOperation().getName()); // Produce service name part of topic name. String serviceName = definition.getOwnerService().getName().replace(" ", ""); serviceName = serviceName.replace("-", ""); // Produce version name part of topic name. String versionName = definition.getOwnerService().getVersion().replace(" ", ""); versionName = versionName.replace(".", ""); // Produce operation name part of topic name. String operationName = ProducerManager.getDestinationOperationPart(definition.getOperation(), eventMessage); // Aggregate the 3 parts using '-' as delimiter. return serviceName + "-" + versionName + "-" + operationName.replace("/", "-"); }
@Test void testGetTopicName() { AmazonSNSProducerManager producerManager = new AmazonSNSProducerManager(); Service service = new Service(); service.setName("Streetlights API"); service.setVersion("0.1.0"); Operation operation = new Operation(); operation.setName("RECEIVE receiveLightMeasurement"); operation.setMethod("RECEIVE"); operation.setResourcePaths(Set.of("smartylighting.streetlights.1.0.event.lighting.measured")); service.addOperation(operation); EventMessage eventMessage = new EventMessage(); eventMessage.setName("Sample"); List<EventMessage> eventsMessages = List.of(eventMessage); AsyncMockDefinition definition = new AsyncMockDefinition(service, operation, eventsMessages); String queueName = producerManager.getTopicName(definition, eventMessage); assertEquals("StreetlightsAPI-010-receiveLightMeasurement", queueName); }
public List<String> split( String script ) { if ( script == null ) { return Collections.emptyList(); } List<String> result = new ArrayList<String>(); MODE mode = MODE.SQL; char currentStringChar = 0; int statementStart = 0; for ( int i = 0; i < script.length(); i++ ) { char ch = script.charAt( i ); char nextCh = i < script.length() - 1 ? script.charAt( i + 1 ) : 0; switch ( mode ) { case SQL: switch ( ch ) { case '/': if ( nextCh == '*' ) { mode = MODE.BLOCK_COMMENT; i++; } break; case '-': if ( nextCh == '-' ) { mode = MODE.LINE_COMMENT; i++; } break; case '\'': case '"': mode = MODE.STRING; currentStringChar = ch; break; case ';': String st = script.substring( statementStart, i ); if ( StringUtils.isNotBlank( st ) ) { result.add( st ); } statementStart = i + 1; break; } break; case BLOCK_COMMENT: if ( ch == '*' ) { if ( nextCh == '/' ) { mode = MODE.SQL; i++; } } break; case LINE_COMMENT: if ( ch == '\n' || ch == '\r' ) { mode = MODE.SQL; } break; case STRING: if ( ch == '\\' && nextCh == '\\' ) { /* * The user is hard-coding a backslash into the string. * Pass the hard-coded backslash through, and skip over the real backslash on the next loop */ i++; } else if ( ch == '\\' && nextCh == currentStringChar && usingBackslashAsEscapeCharForQuotation ) { /* * The user is hard-coding a quote character into the string. * Pass the hard-coded quote character through, and skip over the quote on next loop */ /* * usingBackslashAsEscapeCharForQuotation * PDI-16224. * * ANSI standards specify that using the backslash character (\) to escape single (' ') or double (" ") * quotation marks is invalid. For example, the following attempt to find a quotation mark does not conform to ANSI standards: * where col1 = '\'';" * In any way a construction '\'|| is correct for Oracle but for others DBs (ex. MySQl) isn't correct. * */ i++; } else if ( ch == currentStringChar ) { mode = MODE.SQL; } break; } } if ( statementStart < script.length() ) { String st = script.substring( statementStart ); if ( StringUtils.isNotBlank( st ) ) { result.add( st ); } } return result; }
@Test public void testSplit() { assertEquals( Arrays.asList( new String[0] ), sqlScriptParser.split( null ) ); assertEquals( Arrays.asList( new String[0] ), sqlScriptParser.split( "" ) ); assertEquals( Arrays.asList( new String[0] ), sqlScriptParser.split( " " ) ); assertEquals( Arrays.asList( "SELECT 1", "SELECT 2" ), sqlScriptParser.split( "SELECT 1;SELECT 2" ) ); assertEquals( Collections.singletonList( "SELECT '1;2'" ), sqlScriptParser.split( "SELECT '1;2'" ) ); assertEquals( Collections.singletonList( "SELECT \"1;2\"" ), sqlScriptParser.split( "SELECT \"1;2\"" ) ); assertEquals( Collections.singletonList( "SELECT -- 1;2" ), sqlScriptParser.split( "SELECT -- 1;2" ) ); assertEquals( Collections.singletonList( "SELECT /*1;2*/" ), sqlScriptParser.split( "SELECT /*1;2*/" ) ); assertEquals( Arrays.asList( "SELECT /1", "2" ), sqlScriptParser.split( "SELECT /1;2" ) ); assertEquals( Arrays.asList( "SELECT /1", "2" ), sqlScriptParser.split( "SELECT /1;;;;2" ) ); assertEquals( Collections.singletonList( "SELECT /1" ), sqlScriptParser.split( "SELECT /1;\n \n" ) ); assertEquals( Collections.singletonList( "SELECT \"hello\\\"world\" FROM dual" ), sqlScriptParser.split( "SELECT \"hello\\\"world\" FROM dual" ) ); assertEquals( Collections.singletonList( "CREATE TABLE test1 (col1 STRING) TBLPROPERTIES (\"prop1\" = \"my\\\"value\")" ), sqlScriptParser.split( "CREATE TABLE test1 (col1 STRING) TBLPROPERTIES (\"prop1\" = \"my\\\"value\");" ) ); assertEquals( Collections.singletonList( "CREATE TABLE test1 (col1 STRING) TBLPROPERTIES ('prop1' = 'my\\\"value')" ), sqlScriptParser.split( "CREATE TABLE test1 (col1 STRING) TBLPROPERTIES ('prop1' = 'my\\\"value');" ) ); assertEquals( Collections.singletonList( "SELECT \"test\\\";SELECT 1" ), sqlScriptParser.split( "SELECT \"test\\\";SELECT 1" ) ); assertEquals( Collections.singletonList( "SELECT 'test\\';SELECT 1" ), sqlScriptParser.split( "SELECT 'test\\';SELECT 1" ) ); assertEquals( Arrays.asList( "create table pdi13654 (col1 string) TBLPROPERTIES (\"quoteChar\"=\"\\\"\", \"escapeChar\"=\"\\\\\")", "SELECT 1" ), sqlScriptParser.split( "create table pdi13654 (col1 string) TBLPROPERTIES (\"quoteChar\"=\"\\\"\", \"escapeChar\"=\"\\\\\");SELECT 1" ) ); //PDI-16224 assertEquals( Collections.singletonList( "SELECT 1 from test where t='\\'||t=a" ), oracleSqlScriptParser.split( "SELECT 1 from test where t='\\'||t=a;" ) ); }
public EndpointResponse accessDeniedFromKafkaResponse(final Exception e) { return constructAccessDeniedFromKafkaResponse(errorMessages.kafkaAuthorizationErrorMessage(e)); }
@Test public void shouldReturnForbiddenKafkaResponse() { final EndpointResponse response = errorHandler.accessDeniedFromKafkaResponse(exception); assertThat(response.getStatus(), is(403)); assertThat(response.getEntity(), is(instanceOf(KsqlErrorMessage.class))); assertThat(((KsqlErrorMessage) response.getEntity()).getMessage(), is(SOME_KAFKA_ERROR)); }
@Override public Long createDictType(DictTypeSaveReqVO createReqVO) { // 校验字典类型的名字的唯一性 validateDictTypeNameUnique(null, createReqVO.getName()); // 校验字典类型的类型的唯一性 validateDictTypeUnique(null, createReqVO.getType()); // 插入字典类型 DictTypeDO dictType = BeanUtils.toBean(createReqVO, DictTypeDO.class); dictType.setDeletedTime(LocalDateTimeUtils.EMPTY); // 唯一索引,避免 null 值 dictTypeMapper.insert(dictType); return dictType.getId(); }
@Test public void testCreateDictType_success() { // 准备参数 DictTypeSaveReqVO reqVO = randomPojo(DictTypeSaveReqVO.class, o -> o.setStatus(randomEle(CommonStatusEnum.values()).getStatus())) .setId(null); // 避免 id 被赋值 // 调用 Long dictTypeId = dictTypeService.createDictType(reqVO); // 断言 assertNotNull(dictTypeId); // 校验记录的属性是否正确 DictTypeDO dictType = dictTypeMapper.selectById(dictTypeId); assertPojoEquals(reqVO, dictType, "id"); }
static JarFileWithEntryClass findOnlyEntryClass(Iterable<File> jarFiles) throws IOException { List<JarFileWithEntryClass> jarsWithEntryClasses = new ArrayList<>(); for (File jarFile : jarFiles) { findEntryClass(jarFile) .ifPresent( entryClass -> jarsWithEntryClasses.add( new JarFileWithEntryClass(jarFile, entryClass))); } int size = jarsWithEntryClasses.size(); if (size == 0) { throw new NoSuchElementException("No JAR with manifest attribute for entry class"); } if (size == 1) { return jarsWithEntryClasses.get(0); } // else: size > 1 throw new IllegalArgumentException( "Multiple JARs with manifest attribute for entry class: " + jarsWithEntryClasses); }
@Test void testFindOnlyEntryClassSingleJar() throws IOException { File jarFile = TestJob.getTestJobJar(); JarManifestParser.JarFileWithEntryClass jarFileWithEntryClass = JarManifestParser.findOnlyEntryClass(ImmutableList.of(jarFile)); assertThat(jarFileWithEntryClass.getEntryClass()) .isEqualTo(TestJob.class.getCanonicalName()); }
public ProtocolBuilder buffer(Integer buffer) { this.buffer = buffer; return getThis(); }
@Test void buffer() { ProtocolBuilder builder = new ProtocolBuilder(); builder.buffer(1024); Assertions.assertEquals(1024, builder.build().getBuffer()); }
@Override public SelLong assignOps(SelOp op, SelType rhs) { SelTypeUtil.checkTypeMatch(this.type(), rhs.type()); long another = ((SelLong) rhs).val; switch (op) { case ASSIGN: this.val = another; // direct assignment return this; case ADD_ASSIGN: this.val += another; return this; case SUB_ASSIGN: this.val -= another; return this; case MUL_ASSIGN: this.val *= another; return this; case DIV_ASSIGN: this.val /= another; return this; case MOD_ASSIGN: this.val %= another; return this; default: throw new UnsupportedOperationException( "int/Integer/long/Long DO NOT support assignment operation " + op); } }
@Test public void testAssignOps() { SelType obj = SelLong.of(2); SelType res = orig.assignOps(SelOp.ASSIGN, obj); assertEquals("LONG: 2", res.type() + ": " + res); res = orig.assignOps(SelOp.ASSIGN, SelLong.of(3)); assertEquals("LONG: 2", obj.type() + ": " + obj); assertEquals("LONG: 3", res.type() + ": " + res); res = orig.assignOps(SelOp.ADD_ASSIGN, obj); assertEquals("LONG: 5", res.type() + ": " + res); res = orig.assignOps(SelOp.SUB_ASSIGN, obj); assertEquals("LONG: 3", res.type() + ": " + res); res = orig.assignOps(SelOp.MUL_ASSIGN, obj); assertEquals("LONG: 6", res.type() + ": " + res); res = orig.assignOps(SelOp.DIV_ASSIGN, obj); assertEquals("LONG: 3", res.type() + ": " + res); res = orig.assignOps(SelOp.MOD_ASSIGN, obj); assertEquals("LONG: 1", res.type() + ": " + res); }
@Override public String toString() { return getClass().getSimpleName() + "[application=" + getApplication() + ", periodMillis=" + periodMillis + ", counters=" + getCounters() + ']'; }
@Test public void testToString() throws SchedulerException { final Collector collector = createCollectorWithOneCounter(); assertToStringNotEmpty("collector", collector); assertToStringNotEmpty("java", new JavaInformations(null, false)); assertToStringNotEmpty("thread", new ThreadInformations(Thread.currentThread(), List.of(Thread.currentThread().getStackTrace()), 100, 1000, false, Parameters.getHostAddress())); assertToStringNotEmpty("session", new SessionInformations(new SessionTestImpl(true), true)); assertToStringNotEmpty("memory", new MemoryInformations()); CacheManager.getInstance().addCache("testToString"); try { assertToStringNotEmpty("cache", new CacheInformations( CacheManager.getInstance().getEhcache("testToString"), false)); } finally { CacheManager.getInstance().shutdown(); } final MutableConfiguration<Object, Object> conf = new MutableConfiguration<>(); conf.setManagementEnabled(true); conf.setStatisticsEnabled(true); Caching.getCachingProvider().getCacheManager().createCache("cache", conf); try { assertToStringNotEmpty("cache", new JCacheInformations("cache")); } finally { Caching.getCachingProvider().getCacheManager().close(); } final Scheduler scheduler = StdSchedulerFactory.getDefaultScheduler(); final JobDetail job = JobBuilder.newJob(JobTestImpl.class).withIdentity("job").build(); assertToStringNotEmpty("job", new JobInformations(job, null, scheduler)); assertToStringNotEmpty("connectionInfos", new ConnectionInformations()); }
@Override @SuppressWarnings({"CastCanBeRemovedNarrowingVariableType", "unchecked"}) public E peek() { final E[] buffer = consumerBuffer; final long index = consumerIndex; final long mask = consumerMask; final long offset = modifiedCalcElementOffset(index, mask); Object e = lvElement(buffer, offset);// LoadLoad if (e == null && index != lvProducerIndex(this)) { // peek() == null iff queue is empty, null element is not strong enough indicator, so we must // check the producer index. If the queue is indeed not empty we spin until element is // visible. while ((e = lvElement(buffer, offset)) == null) { // retry } } if (e == JUMP) { return newBufferPeek(getNextBuffer(buffer, mask), index); } return (E) e; }
@Test(dataProvider = "populated") public void peek_whenPopulated(MpscGrowableArrayQueue<Integer> queue) { assertThat(queue.peek()).isNotNull(); assertThat(queue).hasSize(POPULATED_SIZE); }
@Override public <PS extends Serializer<P>, P> KeyValueIterator<K, V> prefixScan(final P prefix, final PS prefixKeySerializer) { Objects.requireNonNull(prefix); Objects.requireNonNull(prefixKeySerializer); final NextIteratorFunction<K, V, ReadOnlyKeyValueStore<K, V>> nextIteratorFunction = new NextIteratorFunction<K, V, ReadOnlyKeyValueStore<K, V>>() { @Override public KeyValueIterator<K, V> apply(final ReadOnlyKeyValueStore<K, V> store) { try { return store.prefixScan(prefix, prefixKeySerializer); } catch (final InvalidStateStoreException e) { throw new InvalidStateStoreException("State store is not available anymore and may have been migrated to another instance; please re-discover its location from the state metadata."); } } }; final List<ReadOnlyKeyValueStore<K, V>> stores = storeProvider.stores(storeName, storeType); return new DelegatingPeekingKeyValueIterator<>( storeName, new CompositeKeyValueIterator<>(stores.iterator(), nextIteratorFunction)); }
@Test public void shouldThrowNoSuchElementExceptionWhileNextForPrefixScan() { stubOneUnderlying.put("a", "1"); try (final KeyValueIterator<String, String> keyValueIterator = theStore.prefixScan("a", new StringSerializer())) { keyValueIterator.next(); assertThrows(NoSuchElementException.class, keyValueIterator::next); } }
public static IpAddress makeMaskPrefix(Version version, int prefixLength) { byte[] mask = makeMaskPrefixArray(version, prefixLength); return new IpAddress(version, mask); }
@Test(expected = IllegalArgumentException.class) public void testInvalidMakeTooLongMaskPrefixIPv4() { IpAddress ipAddress; ipAddress = IpAddress.makeMaskPrefix(IpAddress.Version.INET, 33); }
@Override public void setOriginalFile(Component file, OriginalFile originalFile) { storeOriginalFileInCache(originalFiles, file, originalFile); }
@Test public void setOriginalFile_throws_NPE_when_originalFile_is_null() { assertThatThrownBy(() -> underTest.setOriginalFile(SOME_FILE, null)) .isInstanceOf(NullPointerException.class) .hasMessage("originalFile can't be null"); }
public void addExtensionPoint( PluginInterface extensionPointPlugin ) { lock.writeLock().lock(); try { for ( String id : extensionPointPlugin.getIds() ) { extensionPointPluginMap.put( extensionPointPlugin.getName(), id, createLazyLoader( extensionPointPlugin ) ); } } finally { lock.writeLock().unlock(); } }
@Test public void addExtensionPointTest() throws KettlePluginException { ExtensionPointMap.getInstance().addExtensionPoint( pluginInterface ); assertEquals( ExtensionPointMap.getInstance().getTableValue( TEST_NAME, "testID" ), extensionPoint ); // Verify cached instance assertEquals( ExtensionPointMap.getInstance().getTableValue( TEST_NAME, "testID" ), extensionPoint ); verify( pluginInterface, times( 1 ) ).loadClass( any( Class.class ) ); }
public static USynchronized create(UExpression expression, UBlock block) { return new AutoValue_USynchronized(expression, block); }
@Test public void equality() { new EqualsTester() .addEqualityGroup(USynchronized.create(UFreeIdent.create("foo"), UBlock.create())) .addEqualityGroup(USynchronized.create(UFreeIdent.create("bar"), UBlock.create())) .testEquals(); }
@Override public int compareTo(final Version that) { String[] thisParts = version.split("\\."); String[] thatParts = that.version.split("\\."); int length = Math.max(thisParts.length, thatParts.length); for(int i = 0; i < length; i++) { int thisPart = i < thisParts.length ? Integer.parseInt(thisParts[i]) : 0; int thatPart = i < thatParts.length ? Integer.parseInt(thatParts[i]) : 0; if(thisPart < thatPart) { return -1; } if(thisPart > thatPart) { return 1; } } return 0; }
@Test public void testCompare() { assertEquals(0, new Version("4").compareTo(new Version("4"))); assertEquals(0, new Version("4.1").compareTo(new Version("4.1"))); assertEquals(0, new Version("4.12").compareTo(new Version("4.12"))); assertEquals(-1, new Version("4.12").compareTo(new Version("4.22"))); assertEquals(-1, new Version("4.12.9").compareTo(new Version("4.22"))); assertEquals(-1, new Version("4.12.9-LTS").compareTo(new Version("4.22"))); assertEquals(1, new Version("4.22").compareTo(new Version("4.12"))); assertEquals(1, new Version("4.22").compareTo(new Version("4.12.9"))); assertEquals(1, new Version("4.22").compareTo(new Version("4.12.9-LTS"))); assertEquals(-1, new Version("4.20").compareTo(new Version("4.30"))); assertEquals(0, new Version("4.30").compareTo(new Version("4.30"))); }
public CompletableFuture<LookupTopicResult> getBroker(TopicName topicName) { long startTime = System.nanoTime(); final MutableObject<CompletableFuture> newFutureCreated = new MutableObject<>(); try { return lookupInProgress.computeIfAbsent(topicName, tpName -> { CompletableFuture<LookupTopicResult> newFuture = findBroker(serviceNameResolver.resolveHost(), false, topicName, 0); newFutureCreated.setValue(newFuture); newFuture.thenRun(() -> { histoGetBroker.recordSuccess(System.nanoTime() - startTime); }).exceptionally(x -> { histoGetBroker.recordFailure(System.nanoTime() - startTime); return null; }); return newFuture; }); } finally { if (newFutureCreated.getValue() != null) { newFutureCreated.getValue().whenComplete((v, ex) -> { lookupInProgress.remove(topicName, newFutureCreated.getValue()); }); } } }
@Test(invocationTimeOut = 3000) public void maxLookupRedirectsTest3() throws Exception { Field field = BinaryProtoLookupService.class.getDeclaredField("maxLookupRedirects"); field.setAccessible(true); field.set(lookup, 1); try { lookup.getBroker(topicName).get(); fail("should have thrown ExecutionException"); } catch (ExecutionException e) { Throwable cause = e.getCause(); assertTrue(cause instanceof LookupException); assertEquals(cause.getMessage(), "Too many redirects: 1"); } }
public IterableOfProtosFluentAssertion<M> ignoringRepeatedFieldOrder() { return usingConfig(config.ignoringRepeatedFieldOrder()); }
@Test public void testCompareMultipleMessageTypes() { // Don't run this test twice. if (!testIsRunOnce()) { return; } expectThat( listOf( TestMessage2.newBuilder().addRString("foo").addRString("bar").build(), TestMessage3.newBuilder().addRString("baz").addRString("qux").build())) .ignoringRepeatedFieldOrder() .containsExactly( TestMessage3.newBuilder().addRString("qux").addRString("baz").build(), TestMessage2.newBuilder().addRString("bar").addRString("foo").build()); }
public InternalThreadLocal() { index = InternalThreadLocalMap.nextVariableIndex(); }
@Test void testInternalThreadLocal() { final AtomicInteger index = new AtomicInteger(0); final InternalThreadLocal<Integer> internalThreadLocal = new InternalThreadLocal<Integer>() { @Override protected Integer initialValue() { Integer v = index.getAndIncrement(); System.out.println("thread : " + Thread.currentThread().getName() + " init value : " + v); return v; } }; for (int i = 0; i < THREADS; i++) { Thread t = new Thread(internalThreadLocal::get); t.start(); } await().until(index::get, is(THREADS)); }
public int size() { return endpoints.size(); }
@Test void testSize() { Map<ListenerName, InetSocketAddress> endpointMap = Utils.mkMap( Utils.mkEntry(testListener, testSocketAddress)); assertEquals(1, Endpoints.fromInetSocketAddresses(endpointMap).size()); assertEquals(0, Endpoints.empty().size()); }
@Override public String getTableType() { return "kafka"; }
@Test public void testGetTableType() { assertEquals("kafka", provider.getTableType()); }
public static boolean isCompositeType(LogicalType logicalType) { if (logicalType instanceof DistinctType) { return isCompositeType(((DistinctType) logicalType).getSourceType()); } LogicalTypeRoot typeRoot = logicalType.getTypeRoot(); return typeRoot == STRUCTURED_TYPE || typeRoot == ROW; }
@Test void testIsCompositeTypeDistinctType() { DataType dataType = ROW(FIELD("f0", INT()), FIELD("f1", STRING())); DistinctType distinctType = DistinctType.newBuilder( ObjectIdentifier.of("catalog", "database", "type"), dataType.getLogicalType()) .build(); assertThat(LogicalTypeChecks.isCompositeType(distinctType)).isTrue(); }
public Map<String, String> asMap() { return pairs; }
@Test public void asMap() { OrderedProperties pairs = createTestKeyValues(); pairs.asMap().put("fifth", "5"); assertKeyOrder(pairs, "first", "second", "third", "FOURTH", "fifth"); }
public static Collection<String> getResourcesFromDirectory(File directory, Pattern pattern) { if (directory == null || directory.listFiles() == null) { return Collections.emptySet(); } return Arrays.stream(Objects.requireNonNull(directory.listFiles())) .flatMap(elem -> { if (elem.isDirectory()) { return getResourcesFromDirectory(elem, pattern) .stream(); } else { try { String fileName = elem.getCanonicalPath(); if (pattern.matcher(fileName).matches()) { return Stream.of(fileName); } else { return Stream.empty(); } } catch (final IOException e) { throw new RuntimeException("Impossible to access to resources", e); } } }) .collect(Collectors.toSet()); }
@Test public void getResourcesFromDirectoryTest() { List<String> classPathElements = Arrays.asList(ResourceHelper.getClassPathElements()); Optional<String> testFolder = classPathElements.stream().filter(elem -> elem.contains("test-classes")).findFirst(); assertThat(testFolder).isPresent(); File dir = new File(testFolder.get()); String regex = ".*" + TEST_FILE; Collection<String> filesFound = getResourcesFromDirectory(dir, Pattern.compile(regex)); assertThat(filesFound).hasSize(2); assertThat(getResourcesFromDirectory(null, null)).isEmpty(); assertThat(getResourcesFromDirectory(dir, Pattern.compile("noMatch"))).isEmpty(); }
public static <T> Deduplicate.Values<T> values() { return new Deduplicate.Values<>(DEFAULT_TIME_DOMAIN, DEFAULT_DURATION); }
@Test @Category({NeedsRunner.class, UsesTestStreamWithProcessingTime.class}) public void testProcessingTime() { Instant base = new Instant(0); TestStream<String> values = TestStream.create(StringUtf8Coder.of()) .advanceWatermarkTo(base) .addElements( TimestampedValue.of("k1", base), TimestampedValue.of("k2", base.plus(Duration.standardSeconds(10))), TimestampedValue.of("k3", base.plus(Duration.standardSeconds(20))), TimestampedValue.of("maybedup", base.plus(Duration.standardSeconds(59)))) .advanceProcessingTime(Duration.standardMinutes(1)) .addElements( TimestampedValue.of("k1", base.plus(Duration.standardSeconds(30))), TimestampedValue.of("k2", base.plus(Duration.standardSeconds(40))), TimestampedValue.of("k3", base.plus(Duration.standardSeconds(50)))) .advanceProcessingTime(Deduplicate.DEFAULT_DURATION) .addElements(TimestampedValue.of("maybedup", base.plus(Duration.standardSeconds(59)))) .advanceWatermarkToInfinity(); PCollection<String> distinctValues = p.apply(values).apply(Deduplicate.values()); PAssert.that(distinctValues) .satisfies( (Iterable<String> input) -> { assertEquals(1, Iterables.frequency(input, "k1")); assertEquals(1, Iterables.frequency(input, "k2")); assertEquals(1, Iterables.frequency(input, "k3")); assertTrue( Iterables.frequency(input, "maybedup") == 1 || Iterables.frequency(input, "maybedup") == 2); return null; }); p.run(); }
@Override public int leaveGroupEpoch() { return ShareGroupHeartbeatRequest.LEAVE_GROUP_MEMBER_EPOCH; }
@Test public void testLeaveGroupEpoch() { // Member should leave the group with epoch -1. ShareMembershipManager membershipManager = createMemberInStableState(); mockLeaveGroup(); membershipManager.leaveGroup(); assertEquals(MemberState.LEAVING, membershipManager.state()); assertEquals(ShareGroupHeartbeatRequest.LEAVE_GROUP_MEMBER_EPOCH, membershipManager.memberEpoch()); }
public void init(final InternalProcessorContext<KOut, VOut> context) { if (!closed) throw new IllegalStateException("The processor is not closed"); try { threadId = Thread.currentThread().getName(); internalProcessorContext = context; droppedRecordsSensor = TaskMetrics.droppedRecordsSensor(threadId, internalProcessorContext.taskId().toString(), internalProcessorContext.metrics()); if (processor != null) { processor.init(context); } if (fixedKeyProcessor != null) { @SuppressWarnings("unchecked") final FixedKeyProcessorContext<KIn, VOut> fixedKeyProcessorContext = (FixedKeyProcessorContext<KIn, VOut>) context; fixedKeyProcessor.init(fixedKeyProcessorContext); } } catch (final Exception e) { throw new StreamsException(String.format("failed to initialize processor %s", name), e); } // revived tasks could re-initialize the topology, // in which case we should reset the flag closed = false; }
@Test public void shouldThrowStreamsExceptionIfExceptionCaughtDuringClose() { final ProcessorNode<Object, Object, Object, Object> node = new ProcessorNode<>(NAME, new ExceptionalProcessor(), Collections.emptySet()); assertThrows(StreamsException.class, () -> node.init(null)); }
public static String getGeneratedResourcesString(GeneratedResources generatedResources) throws JsonProcessingException { return objectMapper.writeValueAsString(generatedResources); }
@Test void getGeneratedResourcesString() throws JsonProcessingException { String fullClassName = "full.class.Name"; GeneratedResource generatedIntermediateResource = new GeneratedClassResource(fullClassName); String model = "foo"; LocalUri modelLocalUriId = new ReflectiveAppRoot(model) .get(ComponentRootB.class) .get("this", "is", "modelLocalUriId") .asLocalUri(); ModelLocalUriId localUriId = new ModelLocalUriId(modelLocalUriId); GeneratedResource generatedFinalResource = new GeneratedExecutableResource(localUriId, Collections.singletonList(fullClassName)); GeneratedResources generatedResources = new GeneratedResources(); generatedResources.add(generatedIntermediateResource); generatedResources.add(generatedFinalResource); String retrieved = JSONUtils.getGeneratedResourcesString(generatedResources); String expected1 = String.format("{\"step-type\":\"class\",\"fullClassName\":\"%s\"}", fullClassName); String expected2 = String.format("{\"step-type\":\"executable\",\"modelLocalUriId\":%s,\"fullClassNames\":[\"%s\"]}", JSONUtils.getModelLocalUriIdString(localUriId), fullClassName); assertThat(retrieved).contains(expected1); assertThat(retrieved).contains(expected2); }
public List<Service> importServiceDefinition(String repositoryUrl, Secret repositorySecret, boolean disableSSLValidation, boolean mainArtifact) throws MockRepositoryImportException { log.info("Importing service definitions from {}", repositoryUrl); File localFile = null; Map<String, List<String>> fileProperties = null; if (repositoryUrl.startsWith("http")) { try { HTTPDownloader.FileAndHeaders fileAndHeaders = HTTPDownloader .handleHTTPDownloadToFileAndHeaders(repositoryUrl, repositorySecret, disableSSLValidation); localFile = fileAndHeaders.getLocalFile(); fileProperties = fileAndHeaders.getResponseHeaders(); } catch (IOException ioe) { throw new MockRepositoryImportException(repositoryUrl + " cannot be downloaded", ioe); } } else { // Simply build localFile from repository url. localFile = new File(repositoryUrl); } RelativeReferenceURLBuilder referenceURLBuilder = RelativeReferenceURLBuilderFactory .getRelativeReferenceURLBuilder(fileProperties); String artifactName = referenceURLBuilder.getFileName(repositoryUrl, fileProperties); // Initialize a reference resolver to the folder of this repositoryUrl. ReferenceResolver referenceResolver = new ReferenceResolver(repositoryUrl, repositorySecret, disableSSLValidation, referenceURLBuilder); return importServiceDefinition(localFile, referenceResolver, new ArtifactInfo(artifactName, mainArtifact)); }
@Test void testImportServiceDefinitionMainGrpcAndSecondaryPostman() { List<Service> services = null; try { File artifactFile = new File("target/test-classes/io/github/microcks/util/grpc/hello-v1.proto"); services = service.importServiceDefinition(artifactFile, null, new ArtifactInfo("hello-v1.proto", true)); } catch (MockRepositoryImportException mrie) { mrie.printStackTrace(); fail("No MockRepositoryImportException should have be thrown"); } assertNotNull(services); assertEquals(1, services.size()); // Inspect Service own attributes. Service importedSvc = services.get(0); assertEquals("io.github.microcks.grpc.hello.v1.HelloService", importedSvc.getName()); assertEquals("v1", importedSvc.getVersion()); assertEquals("hello-v1.proto", importedSvc.getSourceArtifact()); assertNotNull(importedSvc.getMetadata()); assertEquals(1, importedSvc.getOperations().size()); // As operation as only scalar type, it should be QUERY_ARGS dispatcher. assertEquals(DispatchStyles.QUERY_ARGS, importedSvc.getOperations().get(0).getDispatcher()); assertEquals("firstname && lastname", importedSvc.getOperations().get(0).getDispatcherRules()); // Inspect and check requests. List<Request> requests = requestRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(0, requests.size()); // Inspect and check responses. List<Response> responses = responseRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(0, responses.size()); try { File artifactFile = new File("target/test-classes/io/github/microcks/util/grpc/HelloService.postman.json"); services = service.importServiceDefinition(artifactFile, null, new ArtifactInfo("HelloService.postman.json", false)); } catch (MockRepositoryImportException mrie) { mrie.printStackTrace(); fail("No MockRepositoryImportException should have be thrown"); } // Inspect Service own attributes. importedSvc = services.get(0); assertEquals("io.github.microcks.grpc.hello.v1.HelloService", importedSvc.getName()); assertEquals("v1", importedSvc.getVersion()); assertEquals("hello-v1.proto", importedSvc.getSourceArtifact()); assertNotNull(importedSvc.getMetadata()); assertEquals(1, importedSvc.getOperations().size()); // Inspect and check requests. requests = requestRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(2, requests.size()); for (Request request : requests) { assertEquals("HelloService.postman.json", request.getSourceArtifact()); } // Inspect and check responses. responses = responseRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(2, requests.size()); for (Response response : responses) { assertEquals("HelloService.postman.json", response.getSourceArtifact()); if ("Laurent".equals(response.getName())) { assertEquals("?firstname=Laurent?lastname=Broudoux", response.getDispatchCriteria()); } else if ("Philippe".equals(response.getName())) { assertEquals("?firstname=Philippe?lastname=Huet", response.getDispatchCriteria()); } else { fail("Unexpected response name: " + response.getName()); } } }
@Override public LinkEvent createOrUpdateLink(ProviderId providerId, LinkDescription linkDescription) { final DeviceId dstDeviceId = linkDescription.dst().deviceId(); final NodeId dstNodeId = mastershipService.getMasterFor(dstDeviceId); // Process link update only if we're the master of the destination node, // otherwise signal the actual master. if (clusterService.getLocalNode().id().equals(dstNodeId)) { LinkKey linkKey = linkKey(linkDescription.src(), linkDescription.dst()); Provided<LinkKey> internalLinkKey = getProvided(linkKey, providerId); if (internalLinkKey == null) { return null; } linkDescriptions.compute(internalLinkKey, (k, v) -> createOrUpdateLinkInternal(v, linkDescription)); return refreshLinkCache(linkKey); } else { // Only forward for ConfigProvider or NullProvider // Forwarding was added as a workaround for ONOS-490 if (!"cfg".equals(providerId.scheme()) && !"null".equals(providerId.scheme())) { return null; } // Temporary hack for NPE (ONOS-1171). // Proper fix is to implement forwarding to master on ConfigProvider if (dstNodeId == null) { return null; } return Futures.getUnchecked(clusterCommunicator.sendAndReceive(new Provided<>(linkDescription, providerId), LINK_INJECT_MESSAGE, SERIALIZER::encode, SERIALIZER::decode, dstNodeId)); } }
@Test public final void testCreateOrUpdateLink() { ConnectPoint src = new ConnectPoint(DID1, P1); ConnectPoint dst = new ConnectPoint(DID2, P2); final DefaultLinkDescription linkDescription = new DefaultLinkDescription(src, dst, INDIRECT); LinkEvent event = linkStore.createOrUpdateLink(PID, linkDescription); assertLink(DID1, P1, DID2, P2, INDIRECT, event.subject()); assertEquals(LINK_ADDED, event.type()); LinkEvent event2 = linkStore.createOrUpdateLink(PID, new DefaultLinkDescription(src, dst, DIRECT)); assertLink(DID1, P1, DID2, P2, DIRECT, event2.subject()); assertEquals(LINK_UPDATED, event2.type()); // no change LinkEvent event3 = linkStore.createOrUpdateLink(PID, new DefaultLinkDescription(src, dst, DIRECT)); assertNull("No change event expected", event3); }
@SuppressWarnings("unchecked") public static <T> T invoke(Object obj, String methodName, Object... arguments) { try { Method method = ReflectUtil.getMethodByName(obj.getClass(), methodName); if (method == null) { throw new RuntimeException(methodName + "method not exists"); } ReflectUtil.setAccessible(method); return (T) method.invoke(obj, arguments); } catch (IllegalAccessException | InvocationTargetException e) { throw new RuntimeException(e); } }
@Test public void invokeTest() { TestClass testClass = new TestClass(); Method method = ReflectUtil.getMethodByName(TestClass.class, "getPrivateField"); String invoke = ReflectUtil.invoke(testClass, method); Assert.assertEquals(invoke, "privateField"); }
@Override public boolean equals(@Nullable Object obj) { if (!(obj instanceof S3ResourceId)) { return false; } S3ResourceId o = (S3ResourceId) obj; return scheme.equals(o.scheme) && bucket.equals(o.bucket) && key.equals(o.key); }
@Test public void testEquals() { S3ResourceId a = S3ResourceId.fromComponents("s3", "bucket", "a/b/c"); S3ResourceId b = S3ResourceId.fromComponents("s3", "bucket", "a/b/c"); assertEquals(a, b); assertEquals(b, a); b = S3ResourceId.fromComponents("s3", a.getBucket(), "a/b/c/"); assertNotEquals(a, b); assertNotEquals(b, a); b = S3ResourceId.fromComponents("s3", a.getBucket(), "x/y/z"); assertNotEquals(a, b); assertNotEquals(b, a); b = S3ResourceId.fromComponents("s3", "other-bucket", a.getKey()); assertNotEquals(a, b); assertNotEquals(b, a); b = S3ResourceId.fromComponents("other", "bucket", "a/b/c"); assertNotEquals(a, b); assertNotEquals(b, a); }
public static String storeChangelogTopic(final String prefix, final String storeName, final String namedTopology) { if (namedTopology == null) { return prefix + "-" + storeName + STATE_CHANGELOG_TOPIC_SUFFIX; } else { return prefix + "-" + namedTopology + "-" + storeName + STATE_CHANGELOG_TOPIC_SUFFIX; } }
@Test public void shouldReturnDefaultChangelogTopicNameWithNamedTopology() { final String applicationId = "appId"; final String namedTopology = "namedTopology"; final String storeName = "store"; assertThat( ProcessorStateManager.storeChangelogTopic(applicationId, storeName, namedTopology), is(applicationId + "-" + namedTopology + "-" + storeName + "-changelog") ); }
@Override protected String buildHandle(final List<URIRegisterDTO> uriList, final SelectorDO selectorDO) { List<TarsUpstream> addList = buildTarsUpstreamList(uriList); List<TarsUpstream> canAddList = new CopyOnWriteArrayList<>(); boolean isEventDeleted = uriList.size() == 1 && EventType.DELETED.equals(uriList.get(0).getEventType()); if (isEventDeleted) { addList.get(0).setStatus(false); } List<TarsUpstream> existList = GsonUtils.getInstance().fromCurrentList(selectorDO.getHandle(), TarsUpstream.class); if (CollectionUtils.isEmpty(existList)) { canAddList = addList; } else { List<TarsUpstream> diffList = addList.stream().filter(upstream -> !existList.contains(upstream)).collect(Collectors.toList()); if (CollectionUtils.isNotEmpty(diffList)) { canAddList.addAll(diffList); existList.addAll(diffList); } List<TarsUpstream> diffStatusList = addList.stream().filter(upstream -> !upstream.isStatus() || existList.stream().anyMatch(e -> e.equals(upstream) && e.isStatus() != upstream.isStatus())).collect(Collectors.toList()); if (CollectionUtils.isNotEmpty(diffStatusList)) { canAddList.addAll(diffStatusList); } } if (doSubmit(selectorDO.getId(), canAddList)) { return null; } return GsonUtils.getInstance().toJson(CollectionUtils.isEmpty(existList) ? canAddList : existList); }
@Test public void testBuildHandle() { shenyuClientRegisterTarsService = spy(shenyuClientRegisterTarsService); final String returnStr = "[{upstreamUrl:'localhost:8090',weight:1,warmup:10,status:true,timestamp:1637826588267}," + "{upstreamUrl:'localhost:8091',weight:2,warmup:10,status:true,timestamp:1637826588267}]"; final String expected = "[{\"weight\":1,\"warmup\":10,\"upstreamUrl\":\"localhost:8090\",\"status\":true,\"timestamp\":1637826588267}," + "{\"weight\":2,\"warmup\":10,\"upstreamUrl\":\"localhost:8091\",\"status\":true,\"timestamp\":1637826588267}]"; List<URIRegisterDTO> list = new ArrayList<>(); list.add(URIRegisterDTO.builder().appName("test1").rpcType(RpcTypeEnum.TARS.getName()).host("localhost").port(8090).build()); SelectorDO selectorDO = mock(SelectorDO.class); when(selectorDO.getHandle()).thenReturn(returnStr); doReturn(false).when(shenyuClientRegisterTarsService).doSubmit(any(), any()); String actual = shenyuClientRegisterTarsService.buildHandle(list, selectorDO); assertEquals(actual, expected); List<TarsUpstream> resultList = GsonUtils.getInstance().fromCurrentList(actual, TarsUpstream.class); assertEquals(resultList.size(), 2); list.clear(); list.add(URIRegisterDTO.builder().appName("test1").rpcType(RpcTypeEnum.TARS.getName()).host("localhost").port(8092).build()); selectorDO = mock(SelectorDO.class); when(selectorDO.getHandle()).thenReturn(returnStr); doReturn(false).when(shenyuClientRegisterTarsService).doSubmit(any(), any()); actual = shenyuClientRegisterTarsService.buildHandle(list, selectorDO); resultList = GsonUtils.getInstance().fromCurrentList(actual, TarsUpstream.class); assertEquals(resultList.size(), 3); list.clear(); list.add(URIRegisterDTO.builder().appName("test1").rpcType(RpcTypeEnum.TARS.getName()).host("localhost").port(8090).build()); doReturn(false).when(shenyuClientRegisterTarsService).doSubmit(any(), any()); selectorDO = mock(SelectorDO.class); actual = shenyuClientRegisterTarsService.buildHandle(list, selectorDO); resultList = GsonUtils.getInstance().fromCurrentList(actual, TarsUpstream.class); assertEquals(resultList.size(), 1); }
public static String addOrIncrementIndexInName(String name) { Matcher m = TRAILING_NUMBER_PATTERN.matcher(name); int index = 2; if (m.matches()) { try { int newIndex = Integer.parseInt(m.group(2)) + 1; if (newIndex > 2) { index = newIndex; name = m.group(1); } } catch (NumberFormatException ignored) { } } return name + '-' + index; }
@Test public void test_addIndexToName() { assertEquals("a-2", addOrIncrementIndexInName("a")); assertEquals("a-3", addOrIncrementIndexInName("a-2")); assertEquals("a-26", addOrIncrementIndexInName("a-25")); assertEquals("a-25x-2", addOrIncrementIndexInName("a-25x")); assertEquals("a-1351318168168168168168-2", addOrIncrementIndexInName("a-1351318168168168168168")); assertEquals("a-" + Integer.MAX_VALUE + "-2", addOrIncrementIndexInName("a-" + Integer.MAX_VALUE)); assertEquals("a-0-2", addOrIncrementIndexInName("a-0")); assertEquals("a-1-2", addOrIncrementIndexInName("a-1")); assertEquals("a-1-3", addOrIncrementIndexInName("a-1-2")); assertEquals("a--1-2", addOrIncrementIndexInName("a--1")); }
public List<Service> importServiceDefinition(String repositoryUrl, Secret repositorySecret, boolean disableSSLValidation, boolean mainArtifact) throws MockRepositoryImportException { log.info("Importing service definitions from {}", repositoryUrl); File localFile = null; Map<String, List<String>> fileProperties = null; if (repositoryUrl.startsWith("http")) { try { HTTPDownloader.FileAndHeaders fileAndHeaders = HTTPDownloader .handleHTTPDownloadToFileAndHeaders(repositoryUrl, repositorySecret, disableSSLValidation); localFile = fileAndHeaders.getLocalFile(); fileProperties = fileAndHeaders.getResponseHeaders(); } catch (IOException ioe) { throw new MockRepositoryImportException(repositoryUrl + " cannot be downloaded", ioe); } } else { // Simply build localFile from repository url. localFile = new File(repositoryUrl); } RelativeReferenceURLBuilder referenceURLBuilder = RelativeReferenceURLBuilderFactory .getRelativeReferenceURLBuilder(fileProperties); String artifactName = referenceURLBuilder.getFileName(repositoryUrl, fileProperties); // Initialize a reference resolver to the folder of this repositoryUrl. ReferenceResolver referenceResolver = new ReferenceResolver(repositoryUrl, repositorySecret, disableSSLValidation, referenceURLBuilder); return importServiceDefinition(localFile, referenceResolver, new ArtifactInfo(artifactName, mainArtifact)); }
@Test void testImportServiceDefinitionMainGraphQLAndSecondaryPostman() { List<Service> services = null; try { File artifactFile = new File("target/test-classes/io/github/microcks/util/graphql/films.graphql"); services = service.importServiceDefinition(artifactFile, null, new ArtifactInfo("films.graphql", true)); } catch (MockRepositoryImportException mrie) { mrie.printStackTrace(); fail("No MockRepositoryImportException should have be thrown"); } assertNotNull(services); assertEquals(1, services.size()); // Inspect Service own attributes. Service importedSvc = services.get(0); assertEquals("Movie Graph API", importedSvc.getName()); assertEquals("1.0", importedSvc.getVersion()); assertEquals("films.graphql", importedSvc.getSourceArtifact()); assertNotNull(importedSvc.getMetadata()); assertEquals(4, importedSvc.getOperations().size()); // Inspect and check requests. List<Request> requests = requestRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(0, requests.size()); // Inspect and check responses. List<Response> responses = responseRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(0, responses.size()); try { File artifactFile = new File("target/test-classes/io/github/microcks/util/graphql/films-postman.json"); services = service.importServiceDefinition(artifactFile, null, new ArtifactInfo("films-postman.json", false)); } catch (MockRepositoryImportException mrie) { mrie.printStackTrace(); fail("No MockRepositoryImportException should have be thrown"); } // Inspect Service own attributes. importedSvc = services.get(0); assertEquals("Movie Graph API", importedSvc.getName()); assertEquals("1.0", importedSvc.getVersion()); assertEquals("films.graphql", importedSvc.getSourceArtifact()); assertNotNull(importedSvc.getMetadata()); assertEquals(4, importedSvc.getOperations().size()); // Inspect and check requests. requests = requestRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(1, requests.size()); for (Request request : requests) { assertEquals("films-postman.json", request.getSourceArtifact()); } // Inspect and check responses. responses = responseRepository .findByOperationId(IdBuilder.buildOperationId(importedSvc, importedSvc.getOperations().get(0))); assertEquals(1, requests.size()); for (Response response : responses) { assertEquals("films-postman.json", response.getSourceArtifact()); } }
@Override public int getMaxIndexLength() { return 0; }
@Test void assertGetMaxIndexLength() { assertThat(metaData.getMaxIndexLength(), is(0)); }
private StringBuilder read(int n) throws IOException { // Input stream finished? boolean eof = false; // Read that many. final StringBuilder s = new StringBuilder(n); while (s.length() < n && !eof) { // Always get from the pushBack buffer. if (pushBack.length() == 0) { // Read something from the stream into pushBack. eof = readIntoPushBack(); } // Pushback only contains deliverable codes. if (pushBack.length() > 0) { // Grab one character s.append(pushBack.charAt(0)); // Remove it from pushBack pushBack.deleteCharAt(0); } } return s; }
@Test public void testRead_0args() throws Exception { String data = ""; InputStream stream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8)); XmlInputStream instance = new XmlInputStream(stream); int expResult = -1; int result = instance.read(); assertEquals(expResult, result); data = "*"; stream = new ByteArrayInputStream(data.getBytes(StandardCharsets.UTF_8)); instance = new XmlInputStream(stream); expResult = 42; result = instance.read(); assertEquals(expResult, result); }
public XmlStreamInfo information() throws IOException { if (information.problem != null) { return information; } if (XMLStreamConstants.START_DOCUMENT != reader.getEventType()) { information.problem = new IllegalStateException("Expected START_DOCUMENT"); return information; } boolean skipComments = false; try { while (reader.hasNext()) { int ev = reader.next(); switch (ev) { case XMLStreamConstants.COMMENT: if (!skipComments) { // search for modelines String comment = reader.getText(); if (comment != null) { comment.lines().map(String::trim).forEach(l -> { if (l.startsWith("camel-k:")) { information.modelines.add(l); } }); } } break; case XMLStreamConstants.START_ELEMENT: if (information.rootElementName != null) { // only root element is checked. No need to parse more return information; } skipComments = true; information.rootElementName = reader.getLocalName(); information.rootElementNamespace = reader.getNamespaceURI(); for (int ns = 0; ns < reader.getNamespaceCount(); ns++) { String prefix = reader.getNamespacePrefix(ns); information.namespaceMapping.put(prefix == null ? "" : prefix, reader.getNamespaceURI(ns)); } for (int at = 0; at < reader.getAttributeCount(); at++) { QName qn = reader.getAttributeName(at); String prefix = qn.getPrefix() == null ? "" : qn.getPrefix().trim(); String nsURI = qn.getNamespaceURI() == null ? "" : qn.getNamespaceURI().trim(); String value = reader.getAttributeValue(at); String localPart = qn.getLocalPart(); if (nsURI.isEmpty() || prefix.isEmpty()) { // according to XML spec, this attribut is not namespaced, not in default namespace // https://www.w3.org/TR/xml-names/#defaulting // > The namespace name for an unprefixed attribute name always has no value. information.attributes.put(localPart, value); } else { information.attributes.put("{" + nsURI + "}" + localPart, value); information.attributes.put(prefix + ":" + localPart, value); } } break; case XMLStreamConstants.END_ELEMENT: case XMLStreamConstants.END_DOCUMENT: if (information.rootElementName == null) { information.problem = new IllegalArgumentException("XML Stream is empty"); return information; } break; default: break; } } } catch (XMLStreamException e) { information.problem = e; return information; } return information; }
@Test public void nonExistingDocument() throws IOException { XmlStreamDetector detector = new XmlStreamDetector(getClass().getResourceAsStream("non-existing")); assertFalse(detector.information().isValid()); }
public static String toCommaDelimitedString(String one, String... others) { String another = arrayToDelimitedString(others, COMMA_SEPARATOR); return isEmpty(another) ? one : one + COMMA_SEPARATOR + another; }
@Test void testToCommaDelimitedString() { String value = toCommaDelimitedString(null); assertNull(value); value = toCommaDelimitedString(null, null); assertNull(value); value = toCommaDelimitedString(""); assertEquals("", value); value = toCommaDelimitedString("one"); assertEquals("one", value); value = toCommaDelimitedString("one", "two"); assertEquals("one,two", value); value = toCommaDelimitedString("one", "two", "three"); assertEquals("one,two,three", value); }
public static ParsedCommand parse( // CHECKSTYLE_RULES.ON: CyclomaticComplexity final String sql, final Map<String, String> variables) { validateSupportedStatementType(sql); final String substituted; try { substituted = VariableSubstitutor.substitute(KSQL_PARSER.parse(sql).get(0), variables); } catch (ParseFailedException e) { throw new MigrationException(String.format( "Failed to parse the statement. Statement: %s. Reason: %s", sql, e.getMessage())); } final SqlBaseParser.SingleStatementContext statementContext = KSQL_PARSER.parse(substituted) .get(0).getStatement(); final boolean isStatement = StatementType.get(statementContext.statement().getClass()) == StatementType.STATEMENT; return new ParsedCommand(substituted, isStatement ? Optional.empty() : Optional.of(new AstBuilder(TypeRegistry.EMPTY) .buildStatement(statementContext))); }
@Test public void shouldParseCreateStatement() { // When: List<CommandParser.ParsedCommand> commands = parse("CREATE STREAM FOO (A STRING) WITH (KAFKA_TOPIC='FOO', PARTITIONS=1, VALUE_FORMAT='DELIMITED');"); // Then: assertThat(commands.size(), is(1)); assertThat(commands.get(0).getStatement().isPresent(), is (false)); assertThat(commands.get(0).getCommand(), is("CREATE STREAM FOO (A STRING) WITH (KAFKA_TOPIC='FOO', PARTITIONS=1, VALUE_FORMAT='DELIMITED');")); }
@Override public Object construct(String componentName) { ClusteringConfiguration clusteringConfiguration = configuration.clustering(); boolean shouldSegment = clusteringConfiguration.cacheMode().needsStateTransfer(); int level = configuration.locking().concurrencyLevel(); MemoryConfiguration memoryConfiguration = configuration.memory(); boolean offHeap = memoryConfiguration.isOffHeap(); EvictionStrategy strategy = memoryConfiguration.whenFull(); //handle case when < 0 value signifies unbounded container or when we are not removal based if (strategy.isExceptionBased() || !strategy.isEnabled()) { if (offHeap) { if (shouldSegment) { int segments = clusteringConfiguration.hash().numSegments(); Supplier<PeekableTouchableMap<WrappedBytes, WrappedBytes>> mapSupplier = this::createAndStartOffHeapConcurrentMap; if (clusteringConfiguration.l1().enabled()) { return new L1SegmentedDataContainer<>(mapSupplier, segments); } return new DefaultSegmentedDataContainer<>(mapSupplier, segments); } else { return new OffHeapDataContainer(); } } else if (shouldSegment) { Supplier<PeekableTouchableMap<Object, Object>> mapSupplier = PeekableTouchableContainerMap::new; int segments = clusteringConfiguration.hash().numSegments(); if (clusteringConfiguration.l1().enabled()) { return new L1SegmentedDataContainer<>(mapSupplier, segments); } return new DefaultSegmentedDataContainer<>(mapSupplier, segments); } else { return DefaultDataContainer.unBoundedDataContainer(level); } } boolean sizeInBytes = memoryConfiguration.maxSize() != null; long thresholdSize = sizeInBytes ? memoryConfiguration.maxSizeBytes() : memoryConfiguration.maxCount(); DataContainer<?, ?> dataContainer; if (offHeap) { if (shouldSegment) { int segments = clusteringConfiguration.hash().numSegments(); dataContainer = new SegmentedBoundedOffHeapDataContainer(segments, thresholdSize, memoryConfiguration.evictionType()); } else { dataContainer = new BoundedOffHeapDataContainer(thresholdSize, memoryConfiguration.evictionType()); } } else if (shouldSegment) { int segments = clusteringConfiguration.hash().numSegments(); dataContainer = new BoundedSegmentedDataContainer<>(segments, thresholdSize, memoryConfiguration.evictionType()); } else { dataContainer = DefaultDataContainer.boundedDataContainer(level, thresholdSize, memoryConfiguration.evictionType()); } if (sizeInBytes) { memoryConfiguration.attributes().attribute(MemoryConfiguration.MAX_SIZE) .addListener((newSize, old) -> dataContainer.resize(memoryConfiguration.maxSizeBytes())); } else { memoryConfiguration.attributes().attribute(MemoryConfiguration.MAX_COUNT) .addListener((newSize, old) -> dataContainer.resize(newSize.get())); } return dataContainer; }
@Test public void testEvictionRemoveNotSegmented() { dataContainerFactory.configuration = new ConfigurationBuilder().clustering() .memory().evictionStrategy(EvictionStrategy.REMOVE).size(1000).build(); assertEquals(DefaultDataContainer.class, this.dataContainerFactory.construct(COMPONENT_NAME).getClass()); }
public static HealthCheckRegistry getOrCreate(String name) { final HealthCheckRegistry existing = REGISTRIES.get(name); if (existing == null) { final HealthCheckRegistry created = new HealthCheckRegistry(); final HealthCheckRegistry raced = add(name, created); if (raced == null) { return created; } return raced; } return existing; }
@Test public void savesCreatedRegistry() { final HealthCheckRegistry one = SharedHealthCheckRegistries.getOrCreate("db"); final HealthCheckRegistry two = SharedHealthCheckRegistries.getOrCreate("db"); assertThat(one).isSameAs(two); }
public PlainAccessResource buildPlainAccessResource(PlainAccessConfig plainAccessConfig) throws AclException { checkPlainAccessConfig(plainAccessConfig); return PlainAccessResource.build(plainAccessConfig, remoteAddressStrategyFactory. getRemoteAddressStrategy(plainAccessConfig.getWhiteRemoteAddress())); }
@Test public void buildPlainAccessResourceTest() { PlainAccessResource plainAccessResource = null; PlainAccessConfig plainAccess = new PlainAccessConfig(); plainAccess.setAccessKey("RocketMQ"); plainAccess.setSecretKey("12345678"); plainAccessResource = plainPermissionManager.buildPlainAccessResource(plainAccess); Assert.assertEquals(plainAccessResource.getAccessKey(), "RocketMQ"); Assert.assertEquals(plainAccessResource.getSecretKey(), "12345678"); plainAccess.setWhiteRemoteAddress("127.0.0.1"); plainAccessResource = plainPermissionManager.buildPlainAccessResource(plainAccess); Assert.assertEquals(plainAccessResource.getWhiteRemoteAddress(), "127.0.0.1"); plainAccess.setAdmin(true); plainAccessResource = plainPermissionManager.buildPlainAccessResource(plainAccess); Assert.assertEquals(plainAccessResource.isAdmin(), true); List<String> groups = new ArrayList<>(); groups.add("groupA=DENY"); groups.add("groupB=PUB|SUB"); groups.add("groupC=PUB"); plainAccess.setGroupPerms(groups); plainAccessResource = plainPermissionManager.buildPlainAccessResource(plainAccess); Map<String, Byte> resourcePermMap = plainAccessResource.getResourcePermMap(); Assert.assertEquals(resourcePermMap.size(), 3); Assert.assertEquals(resourcePermMap.get(PlainAccessResource.getRetryTopic("groupA")).byteValue(), Permission.DENY); Assert.assertEquals(resourcePermMap.get(PlainAccessResource.getRetryTopic("groupB")).byteValue(), Permission.PUB | Permission.SUB); Assert.assertEquals(resourcePermMap.get(PlainAccessResource.getRetryTopic("groupC")).byteValue(), Permission.PUB); List<String> topics = new ArrayList<>(); topics.add("topicA=DENY"); topics.add("topicB=PUB|SUB"); topics.add("topicC=PUB"); plainAccess.setTopicPerms(topics); plainAccessResource = plainPermissionManager.buildPlainAccessResource(plainAccess); resourcePermMap = plainAccessResource.getResourcePermMap(); Assert.assertEquals(resourcePermMap.size(), 6); Assert.assertEquals(resourcePermMap.get("topicA").byteValue(), Permission.DENY); Assert.assertEquals(resourcePermMap.get("topicB").byteValue(), Permission.PUB | Permission.SUB); Assert.assertEquals(resourcePermMap.get("topicC").byteValue(), Permission.PUB); }
@Override public void close() { webSocketClient.close(); executor.shutdown(); }
@Test public void testCloseWebSocketOnClose() throws Exception { service.close(); verify(webSocketClient).close(); verify(executorService).shutdown(); }
@Override public TenantPackageDO getTenantPackage(Long id) { return tenantPackageMapper.selectById(id); }
@Test public void testGetTenantPackage() { // mock 数据 TenantPackageDO dbTenantPackage = randomPojo(TenantPackageDO.class); tenantPackageMapper.insert(dbTenantPackage);// @Sql: 先插入出一条存在的数据 // 调用 TenantPackageDO result = tenantPackageService.getTenantPackage(dbTenantPackage.getId()); // 断言 assertPojoEquals(result, dbTenantPackage); }
void fetchPluginSettingsMetaData(GoPluginDescriptor pluginDescriptor) { String pluginId = pluginDescriptor.id(); List<ExtensionSettingsInfo> allMetadata = findSettingsAndViewOfAllExtensionsIn(pluginId); List<ExtensionSettingsInfo> validMetadata = allSettingsAndViewPairsWhichAreValid(allMetadata); if (validMetadata.isEmpty()) { LOGGER.warn("Failed to fetch plugin settings metadata for plugin {}. Maybe the plugin does not implement plugin settings and view?", pluginId); LOGGER.warn("Plugin: {} - Metadata load info: {}", pluginId, allMetadata); LOGGER.warn("Not all plugins are required to implement the request above. This error may be safe to ignore."); return; } if (validMetadata.size() > 1) { throw new RuntimeException(String.format("Plugin with ID: %s has more than one extension which supports plugin settings. " + "Only one extension should support it and respond to %s and %s.", pluginId, REQUEST_PLUGIN_SETTINGS_CONFIGURATION, REQUEST_PLUGIN_SETTINGS_VIEW)); } ExtensionSettingsInfo extensionSettingsInfo = validMetadata.get(0); metadataStore.addMetadataFor(pluginId, extensionSettingsInfo.extensionName, extensionSettingsInfo.configuration, extensionSettingsInfo.viewTemplate); }
@Test public void shouldNotFailWhenAPluginWithMultipleExtensionsHasMoreThanOneExtensionRespondingWithSettings_BUT_OneIsValidAndOtherThrowsException() { PluginSettingsConfiguration configuration = new PluginSettingsConfiguration(); configuration.add(new PluginSettingsProperty("k1").with(Property.REQUIRED, true).with(Property.SECURE, false)); String pluginId = "plugin-id"; GoPluginDescriptor pluginDescriptor = GoPluginDescriptor.builder().id(pluginId).build(); setupSettingsResponses(notificationExtension, pluginId, configuration, "view"); when(packageRepositoryExtension.canHandlePlugin(pluginId)).thenReturn(false); when(scmExtension.canHandlePlugin(pluginId)).thenReturn(true); when(scmExtension.getPluginSettingsConfiguration(pluginId)).thenThrow(new RuntimeException("Ouch!")); when(scmExtension.getPluginSettingsView(pluginId)).thenReturn("view"); metadataLoader.fetchPluginSettingsMetaData(pluginDescriptor); assertThat(PluginSettingsMetadataStore.getInstance().hasPlugin(pluginId)).isTrue(); verify(packageRepositoryExtension, never()).getPluginSettingsConfiguration(pluginId); verify(packageRepositoryExtension, never()).getPluginSettingsView(pluginId); }
@Override public List<URI> services(String protocol, String serviceId, String tag) { if(StringUtils.isBlank(serviceId)) { logger.debug("The serviceId cannot be blank"); return new ArrayList<>(); } // transform to a list of URIs return discovery(protocol, serviceId, tag).stream() .map(this::toUri) .collect(Collectors.toList()); }
@Test public void testServices() { List<URI> l = cluster.services("http", "com.networknt.apib-1.0.0", null); Assert.assertEquals(2, l.size()); }
@Override public AppResponse process(Flow flow, ActivateWithCodeRequest request) throws SharedServiceClientException { digidClient.remoteLog("1092", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId())); if (appAuthenticator.getCreatedAt().isBefore(ZonedDateTime.now().minusDays(Integer.parseInt(appAuthenticator.getGeldigheidstermijn())))) { digidClient.remoteLog("90", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId())); return new EnterActivationResponse("expired", Map.of(DAYS_VALID, Integer.valueOf(appAuthenticator.getGeldigheidstermijn()))); } if (correctActivationCode(request.getActivationCode()) && digidClient.activateAccount(appSession.getAccountId(), appAuthenticator.getIssuerType()).get(lowerUnderscore(STATUS)).equals("OK")) { ((ActivationFlow) flow).activateApp(appAuthenticator, appSession); attemptService.removeAttemptsForAppAuthenticator(appAuthenticator, "activation"); return new OkResponse(); } else if (attemptService.registerFailedAttempt(appAuthenticator, "activation")) { digidClient.remoteLog("87", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId())); if(appAuthenticator.getStatus().equals("pending")) appAuthenticatorService.destroyExistingAppsByInstanceId(appAuthenticator.getInstanceId()); appSession.setState("CANCELLED"); appSessionService.save(appSession); setValid(false); return new StatusResponse(BLOCKED); } else { digidClient.remoteLog("88", Map.of(lowerUnderscore(ACCOUNT_ID), appSession.getAccountId())); var letterSent = digidClient.letterSendDate((appSession.getRegistrationId())); return new EnterActivationResponse(INVALID, Map.of(REMAINING_ATTEMPTS, attemptService.remainingAttempts(appAuthenticator, "activation"), DATE_LETTER_SENT, letterSent.get("date"))); } }
@Test void activationCodeExpiredResponse() throws SharedServiceClientException { //given mockedAppAuthenticator.setCreatedAt(ZonedDateTime.parse("2021-10-14T19:31:00.044077+01:00[Europe/Amsterdam]")); mockedAppAuthenticator.setGeldigheidstermijn("5"); //when AppResponse appResponse = enterActivationCode.process(mockedFlow, activateWithCodeRequest); //then assertTrue(appResponse instanceof EnterActivationResponse); assertEquals("expired", ((StatusResponse) appResponse).getError()); }
public static void zipDirectory(File sourceDirectory, File zipFile) throws IOException { zipDirectory(sourceDirectory, zipFile, false); }
@Test public void testSubdirectoryWithContentsHasNoZipEntry() throws Exception { File zipDir = new File(tmpDir, "zip"); File subDirContent = new File(zipDir, "subdirContent"); assertTrue(subDirContent.mkdirs()); createFileWithContents(subDirContent, "myTextFile.txt", "Simple Text"); ZipFiles.zipDirectory(tmpDir, zipFile); assertZipOnlyContains("zip/subdirContent/myTextFile.txt"); }
@Override public ReadwriteSplittingRule build(final ReadwriteSplittingRuleConfiguration ruleConfig, final String databaseName, final DatabaseType protocolType, final ResourceMetaData resourceMetaData, final Collection<ShardingSphereRule> builtRules, final ComputeNodeInstanceContext computeNodeInstanceContext) { return new ReadwriteSplittingRule(databaseName, ruleConfig, computeNodeInstanceContext); }
@SuppressWarnings({"rawtypes", "unchecked"}) @Test void assertBuild() { ReadwriteSplittingRuleConfiguration ruleConfig = new ReadwriteSplittingRuleConfiguration(Collections.singleton( new ReadwriteSplittingDataSourceGroupRuleConfiguration("name", "writeDataSourceName", Collections.singletonList("readDataSourceName"), "loadBalancerName")), Collections.emptyMap()); DatabaseRuleBuilder builder = OrderedSPILoader.getServices(DatabaseRuleBuilder.class, Collections.singleton(ruleConfig)).get(ruleConfig); assertThat(builder.build(ruleConfig, "", new MockedDatabaseType(), mock(ResourceMetaData.class), Collections.emptyList(), mock(ComputeNodeInstanceContext.class)), instanceOf(ReadwriteSplittingRule.class)); }
static int inferParallelism( ReadableConfig readableConfig, long limitCount, Supplier<Integer> splitCountProvider) { int parallelism = readableConfig.get(ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM); if (readableConfig.get(FlinkConfigOptions.TABLE_EXEC_ICEBERG_INFER_SOURCE_PARALLELISM)) { int maxInferParallelism = readableConfig.get(FlinkConfigOptions.TABLE_EXEC_ICEBERG_INFER_SOURCE_PARALLELISM_MAX); Preconditions.checkState( maxInferParallelism >= 1, FlinkConfigOptions.TABLE_EXEC_ICEBERG_INFER_SOURCE_PARALLELISM_MAX.key() + " cannot be less than 1"); parallelism = Math.min(splitCountProvider.get(), maxInferParallelism); } if (limitCount > 0) { int limit = limitCount >= Integer.MAX_VALUE ? Integer.MAX_VALUE : (int) limitCount; parallelism = Math.min(parallelism, limit); } // parallelism must be positive. parallelism = Math.max(1, parallelism); return parallelism; }
@Test public void testInferedParallelism() throws IOException { Configuration configuration = new Configuration(); // Empty table, infer parallelism should be at least 1 int parallelism = SourceUtil.inferParallelism(configuration, -1L, () -> 0); assertThat(parallelism).isEqualTo(1); // 2 splits (max infer is the default value 100 , max > splits num), the parallelism is splits // num : 2 parallelism = SourceUtil.inferParallelism(configuration, -1L, () -> 2); assertThat(parallelism).isEqualTo(2); // 2 splits and limit is 1 , max infer parallelism is default 100, // which is greater than splits num and limit, the parallelism is the limit value : 1 parallelism = SourceUtil.inferParallelism(configuration, 1, () -> 2); assertThat(parallelism).isEqualTo(1); // 2 splits and max infer parallelism is 1 (max < splits num), the parallelism is 1 configuration.setInteger(FlinkConfigOptions.TABLE_EXEC_ICEBERG_INFER_SOURCE_PARALLELISM_MAX, 1); parallelism = SourceUtil.inferParallelism(configuration, -1L, () -> 2); assertThat(parallelism).isEqualTo(1); // 2 splits, max infer parallelism is 1, limit is 3, the parallelism is max infer parallelism : // 1 parallelism = SourceUtil.inferParallelism(configuration, 3, () -> 2); assertThat(parallelism).isEqualTo(1); // 2 splits, infer parallelism is disabled, the parallelism is flink default parallelism 1 configuration.setBoolean(FlinkConfigOptions.TABLE_EXEC_ICEBERG_INFER_SOURCE_PARALLELISM, false); parallelism = SourceUtil.inferParallelism(configuration, 3, () -> 2); assertThat(parallelism).isEqualTo(1); }
public void initialize(StorageProvider storageProvider, BackgroundJobServer backgroundJobServer) { storageProviderMetricsBinder = new StorageProviderMetricsBinder(storageProvider, meterRegistry); if (backgroundJobServer != null) { backgroundJobServerMetricsBinder = new BackgroundJobServerMetricsBinder(backgroundJobServer, meterRegistry); } }
@Test void testWithStorageProviderOnly() { // GIVEN JobRunrMicroMeterIntegration jobRunrMicroMeterIntegration = new JobRunrMicroMeterIntegration(meterRegistry); when(storageProvider.getJobStats()).thenReturn(JobStats.empty()); // WHEN jobRunrMicroMeterIntegration.initialize(storageProvider, null); // THEN verify(storageProvider).getJobStats(); verify(storageProvider).addJobStorageOnChangeListener(any(StorageProviderMetricsBinder.class)); verify(meterRegistry, times(9)).gauge(any(String.class), any(Iterable.class), any(AtomicLong.class)); // WHEN assertThatCode(jobRunrMicroMeterIntegration::close).doesNotThrowAnyException(); }
public static <T> RetryTransformer<T> of(Retry retry) { return new RetryTransformer<>(retry); }
@Test public void retryOnResultUsingMaybe() throws InterruptedException { RetryConfig config = RetryConfig.<String>custom() .retryOnResult("retry"::equals) .waitDuration(Duration.ofMillis(50)) .maxAttempts(3).build(); Retry retry = Retry.of("testName", config); given(helloWorldService.returnHelloWorld()) .willReturn("retry") .willReturn("success"); Maybe.fromCallable(helloWorldService::returnHelloWorld) .compose(RetryTransformer.of(retry)) .test() .await() .assertValueCount(1) .assertValue("success") .assertComplete() .assertSubscribed(); then(helloWorldService).should(times(2)).returnHelloWorld(); Retry.Metrics metrics = retry.getMetrics(); assertThat(metrics.getNumberOfFailedCallsWithoutRetryAttempt()).isZero(); assertThat(metrics.getNumberOfSuccessfulCallsWithRetryAttempt()).isEqualTo(1); }
public static DataMap getAnnotationsMap(Annotation[] as) { return annotationsToData(as, true); }
@Test(description = "RestSpecAnnotation annotation with unsupported members with overrides") public void unsupportedScalarMembersWithOverriddenValues() { @UnsupportedScalarMembers( charMember = UnsupportedScalarMembers.DEFAULT_CHAR_MEMBER + 1, shortMember = UnsupportedScalarMembers.DEFAULT_SHORT_MEMBER + 1 ) class LocalClass { } final Annotation[] annotations = LocalClass.class.getAnnotations(); final DataMap actual = ResourceModelAnnotation.getAnnotationsMap(annotations); Assert.assertNotNull(actual); Assert.assertTrue(actual.get(UNSUPPORTED_SCALAR_MEMBERS) instanceof DataMap); final DataMap dataMap = ((DataMap) actual.get(UNSUPPORTED_SCALAR_MEMBERS)); Assert.assertEquals(dataMap.size(), 0); }
@Override public final String path() { return delegate.getRequestURI(); }
@Test void path_getRequestURI() { when(request.getRequestURI()).thenReturn("/bar"); assertThat(wrapper.path()) .isEqualTo("/bar"); }
protected void addToRouteTotals(RouteStatistic routeStatistic) { routeTotalsStatistic.incrementTotalEips(routeStatistic.getTotalEips()); routeTotalsStatistic.incrementTotalEipsTested(routeStatistic.getTotalEipsTested()); routeTotalsStatistic.incrementTotalProcessingTime(routeStatistic.getTotalProcessingTime()); }
@Test public void testAddToRouteTotals() { }
public List<ShardingCondition> createShardingConditions(final InsertStatementContext sqlStatementContext, final List<Object> params) { List<ShardingCondition> result = null == sqlStatementContext.getInsertSelectContext() ? createShardingConditionsWithInsertValues(sqlStatementContext, params) : createShardingConditionsWithInsertSelect(sqlStatementContext, params); appendGeneratedKeyConditions(sqlStatementContext, result); return result; }
@Test void assertCreateShardingConditionsInsertStatement() { when(insertStatementContext.getGeneratedKeyContext()).thenReturn(Optional.empty()); List<ShardingCondition> shardingConditions = shardingConditionEngine.createShardingConditions(insertStatementContext, Collections.emptyList()); assertThat(shardingConditions.get(0).getStartIndex(), is(0)); assertTrue(shardingConditions.get(0).getValues().isEmpty()); }
public void receiveMessage(ProxyContext ctx, ReceiveMessageRequest request, StreamObserver<ReceiveMessageResponse> responseObserver) { ReceiveMessageResponseStreamWriter writer = createWriter(ctx, responseObserver); try { Settings settings = this.grpcClientSettingsManager.getClientSettings(ctx); Subscription subscription = settings.getSubscription(); boolean fifo = subscription.getFifo(); int maxAttempts = settings.getBackoffPolicy().getMaxAttempts(); ProxyConfig config = ConfigurationManager.getProxyConfig(); Long timeRemaining = ctx.getRemainingMs(); long pollingTime; if (request.hasLongPollingTimeout()) { pollingTime = Durations.toMillis(request.getLongPollingTimeout()); } else { pollingTime = timeRemaining - Durations.toMillis(settings.getRequestTimeout()) / 2; } if (pollingTime < config.getGrpcClientConsumerMinLongPollingTimeoutMillis()) { pollingTime = config.getGrpcClientConsumerMinLongPollingTimeoutMillis(); } if (pollingTime > config.getGrpcClientConsumerMaxLongPollingTimeoutMillis()) { pollingTime = config.getGrpcClientConsumerMaxLongPollingTimeoutMillis(); } if (pollingTime > timeRemaining) { if (timeRemaining >= config.getGrpcClientConsumerMinLongPollingTimeoutMillis()) { pollingTime = timeRemaining; } else { final String clientVersion = ctx.getClientVersion(); Code code = null == clientVersion || ILLEGAL_POLLING_TIME_INTRODUCED_CLIENT_VERSION.compareTo(clientVersion) > 0 ? Code.BAD_REQUEST : Code.ILLEGAL_POLLING_TIME; writer.writeAndComplete(ctx, code, "The deadline time remaining is not enough" + " for polling, please check network condition"); return; } } validateTopicAndConsumerGroup(request.getMessageQueue().getTopic(), request.getGroup()); String topic = request.getMessageQueue().getTopic().getName(); String group = request.getGroup().getName(); long actualInvisibleTime = Durations.toMillis(request.getInvisibleDuration()); ProxyConfig proxyConfig = ConfigurationManager.getProxyConfig(); if (proxyConfig.isEnableProxyAutoRenew() && request.getAutoRenew()) { actualInvisibleTime = proxyConfig.getDefaultInvisibleTimeMills(); } else { validateInvisibleTime(actualInvisibleTime, ConfigurationManager.getProxyConfig().getMinInvisibleTimeMillsForRecv()); } FilterExpression filterExpression = request.getFilterExpression(); SubscriptionData subscriptionData; try { subscriptionData = FilterAPI.build(topic, filterExpression.getExpression(), GrpcConverter.getInstance().buildExpressionType(filterExpression.getType())); } catch (Exception e) { writer.writeAndComplete(ctx, Code.ILLEGAL_FILTER_EXPRESSION, e.getMessage()); return; } this.messagingProcessor.popMessage( ctx, new ReceiveMessageQueueSelector( request.getMessageQueue().getBroker().getName() ), group, topic, request.getBatchSize(), actualInvisibleTime, pollingTime, ConsumeInitMode.MAX, subscriptionData, fifo, new PopMessageResultFilterImpl(maxAttempts), request.hasAttemptId() ? request.getAttemptId() : null, timeRemaining ).thenAccept(popResult -> { if (proxyConfig.isEnableProxyAutoRenew() && request.getAutoRenew()) { if (PopStatus.FOUND.equals(popResult.getPopStatus())) { List<MessageExt> messageExtList = popResult.getMsgFoundList(); for (MessageExt messageExt : messageExtList) { String receiptHandle = messageExt.getProperty(MessageConst.PROPERTY_POP_CK); if (receiptHandle != null) { MessageReceiptHandle messageReceiptHandle = new MessageReceiptHandle(group, topic, messageExt.getQueueId(), receiptHandle, messageExt.getMsgId(), messageExt.getQueueOffset(), messageExt.getReconsumeTimes()); messagingProcessor.addReceiptHandle(ctx, grpcChannelManager.getChannel(ctx.getClientID()), group, messageExt.getMsgId(), messageReceiptHandle); } } } } writer.writeAndComplete(ctx, request, popResult); }) .exceptionally(t -> { writer.writeAndComplete(ctx, request, t); return null; }); } catch (Throwable t) { writer.writeAndComplete(ctx, request, t); } }
@Test public void testReceiveMessageIllegalInvisibleTimeTooSmall() { StreamObserver<ReceiveMessageResponse> receiveStreamObserver = mock(ServerCallStreamObserver.class); ArgumentCaptor<ReceiveMessageResponse> responseArgumentCaptor = ArgumentCaptor.forClass(ReceiveMessageResponse.class); doNothing().when(receiveStreamObserver).onNext(responseArgumentCaptor.capture()); when(this.grpcClientSettingsManager.getClientSettings(any())).thenReturn(Settings.newBuilder().getDefaultInstanceForType()); this.receiveMessageActivity.receiveMessage( createContext(), ReceiveMessageRequest.newBuilder() .setGroup(Resource.newBuilder().setName(CONSUMER_GROUP).build()) .setMessageQueue(MessageQueue.newBuilder().setTopic(Resource.newBuilder().setName(TOPIC).build()).build()) .setAutoRenew(false) .setInvisibleDuration(Durations.fromSeconds(0)) .build(), receiveStreamObserver ); assertEquals(Code.ILLEGAL_INVISIBLE_TIME, getResponseCodeFromReceiveMessageResponseList(responseArgumentCaptor.getAllValues())); }
public ConfigCheckResult checkConfig() { Optional<Long> appId = getAppId(); if (appId.isEmpty()) { return failedApplicationStatus(INVALID_APP_ID_STATUS); } GithubAppConfiguration githubAppConfiguration = new GithubAppConfiguration(appId.get(), gitHubSettings.privateKey(), gitHubSettings.apiURLOrDefault()); return checkConfig(githubAppConfiguration); }
@Test public void checkConfig_whenNoInstallationsAreReturned_shouldReturnFailedAppAutoProvisioningCheck() { mockGithubConfiguration(); ArgumentCaptor<GithubAppConfiguration> appConfigurationCaptor = ArgumentCaptor.forClass(GithubAppConfiguration.class); mockGithubAppWithValidConfig(appConfigurationCaptor); mockOrganizationsWithoutPermissions(appConfigurationCaptor); ConfigCheckResult checkResult = configValidator.checkConfig(); assertThat(checkResult.application().jit()).isEqualTo(ConfigStatus.failed(NO_INSTALLATIONS_STATUS)); assertThat(checkResult.application().autoProvisioning()).isEqualTo(ConfigStatus.failed(NO_INSTALLATIONS_STATUS)); assertThat(checkResult.installations()).isEmpty(); verifyAppConfiguration(appConfigurationCaptor.getValue()); }
@Override public boolean requiresDestruction(Object bean) { return bean instanceof Startable; }
@Test public void startable_and_autoCloseable_should_require_destruction(){ assertThat(underTest.requiresDestruction(mock(Startable.class))).isTrue(); assertThat(underTest.requiresDestruction(mock(org.sonar.api.Startable.class))).isTrue(); assertThat(underTest.requiresDestruction(mock(Object.class))).isFalse(); }
@Override public <T> Future<T> submit(Callable<T> task) { return delegate.submit(task); }
@Test public void submit_runnable_delegates_to_executorService() { underTest.submit(SOME_RUNNABLE); inOrder.verify(executorService).submit(SOME_RUNNABLE); inOrder.verifyNoMoreInteractions(); }
public TrackingResult track(Component component, Input<DefaultIssue> rawInput, @Nullable Input<DefaultIssue> targetInput) { if (analysisMetadataHolder.isPullRequest()) { return standardResult(pullRequestTracker.track(component, rawInput, targetInput)); } if (isFirstAnalysisSecondaryBranch()) { Tracking<DefaultIssue, DefaultIssue> tracking = referenceBranchTracker.track(component, rawInput); return new TrackingResult(tracking.getMatchedRaws(), emptyMap(), empty(), tracking.getUnmatchedRaws()); } return standardResult(tracker.track(component, rawInput)); }
@Test public void delegate_merge_tracker() { Branch branch = mock(Branch.class); when(branch.getType()).thenReturn(BranchType.BRANCH); when(branch.isMain()).thenReturn(false); when(analysisMetadataHolder.getBranch()).thenReturn(branch); when(analysisMetadataHolder.isFirstAnalysis()).thenReturn(true); underTest.track(component, rawInput, targetInput); verify(mergeBranchTracker).track(component, rawInput); verifyNoInteractions(tracker); verifyNoInteractions(prBranchTracker); }
@Override public Object getSmallintValue(final ResultSet resultSet, final int columnIndex) throws SQLException { return resultSet.getShort(columnIndex); }
@Test void assertGetSmallintValue() throws SQLException { when(resultSet.getShort(1)).thenReturn((short) 0); assertThat(dialectResultSetMapper.getSmallintValue(resultSet, 1), is((short) 0)); }
@Override public boolean getAutoCommit() { return false; }
@Test void assertGetAutoCommit() { assertFalse(connection.getAutoCommit()); }
@Override protected Mono<Boolean> doMatcher(final ServerWebExchange exchange, final WebFilterChain chain) { return Mono.just(paths.stream().anyMatch(path -> exchange.getRequest().getURI().getRawPath().startsWith(path))); }
@Test public void testDoMatcher() { Mono<Boolean> health = healthFilter.doMatcher(MockServerWebExchange .from(MockServerHttpRequest.post("http://localhost:8080/actuator/health")), webFilterChain); StepVerifier.create(health).expectNext(Boolean.TRUE).verifyComplete(); Mono<Boolean> healthCheck = healthFilter.doMatcher(MockServerWebExchange .from(MockServerHttpRequest.post("http://localhost:8080/health_check")), webFilterChain); StepVerifier.create(healthCheck).expectNext(Boolean.TRUE).verifyComplete(); Mono<Boolean> readiness = healthFilter.doMatcher(MockServerWebExchange .from(MockServerHttpRequest.post("http://localhost:8080/actuator/health/readiness")), webFilterChain); StepVerifier.create(readiness).expectNext(Boolean.TRUE).verifyComplete(); }
public Materialization create( final StreamsMaterialization delegate, final MaterializationInfo info, final QueryId queryId, final QueryContext.Stacker contextStacker ) { final TransformVisitor transformVisitor = new TransformVisitor(queryId, contextStacker); final List<Transform> transforms = info .getTransforms() .stream() .map(xform -> xform.visit(transformVisitor)) .collect(Collectors.toList()); return materializationFactory.create( delegate, info.getSchema(), transforms ); }
@Test public void shouldUseCorrectLoggerForPredicate() { // When: factory.create(materialization, info, queryId, new Stacker().push("filter")); // Then: verify(predicateInfo).getPredicate(loggerCaptor.capture()); assertThat( loggerCaptor.getValue().apply(new Stacker().getQueryContext()), is(filterProcessingLogger) ); }
@VisibleForTesting static boolean isBrokenPipe(IOException original) { Throwable exception = original; while (exception != null) { String message = exception.getMessage(); if (message != null && message.toLowerCase(Locale.US).contains("broken pipe")) { return true; } exception = exception.getCause(); if (exception == original) { // just in case if there's a circular chain return false; } } return false; }
@Test public void testIsBrokenPipe_notBrokenPipe() { Assert.assertFalse(RegistryEndpointCaller.isBrokenPipe(new IOException())); Assert.assertFalse(RegistryEndpointCaller.isBrokenPipe(new SocketException())); Assert.assertFalse(RegistryEndpointCaller.isBrokenPipe(new SSLException("mock"))); }
public Optional<YamlRuleConfiguration> swapToYamlRuleConfiguration(final Collection<RepositoryTuple> repositoryTuples, final Class<? extends YamlRuleConfiguration> toBeSwappedType) { RepositoryTupleEntity tupleEntity = toBeSwappedType.getAnnotation(RepositoryTupleEntity.class); if (null == tupleEntity) { return Optional.empty(); } return tupleEntity.leaf() ? swapToYamlRuleConfiguration(repositoryTuples, toBeSwappedType, tupleEntity) : swapToYamlRuleConfiguration(repositoryTuples, toBeSwappedType, getFields(toBeSwappedType)); }
@Test void assertSwapToYamlRuleConfigurationWithNodeYamlRuleConfiguration() { Optional<YamlRuleConfiguration> actual = new RepositoryTupleSwapperEngine().swapToYamlRuleConfiguration(Arrays.asList( new RepositoryTuple("/metadata/foo_db/rules/node/map_value/k/versions/0", "v"), new RepositoryTuple("/metadata/foo_db/rules/node/collection_value/versions/0", "- !LEAF" + System.lineSeparator() + " value: foo"), new RepositoryTuple("/metadata/foo_db/rules/node/string_value/versions/0", "str"), new RepositoryTuple("/metadata/foo_db/rules/node/boolean_value/versions/0", "true"), new RepositoryTuple("/metadata/foo_db/rules/node/integer_value/versions/0", "1"), new RepositoryTuple("/metadata/foo_db/rules/node/long_value/versions/0", "10"), new RepositoryTuple("/metadata/foo_db/rules/node/enum_value/versions/0", "FOO")), NodeYamlRuleConfiguration.class); assertTrue(actual.isPresent()); NodeYamlRuleConfiguration actualYamlConfig = (NodeYamlRuleConfiguration) actual.get(); assertThat(actualYamlConfig.getMapValue().size(), is(1)); assertThat(actualYamlConfig.getMapValue().get("k").getValue(), is("v")); assertThat(actualYamlConfig.getCollectionValue().size(), is(1)); assertThat(actualYamlConfig.getCollectionValue().iterator().next().getValue(), is("foo")); assertThat(actualYamlConfig.getStringValue(), is("str")); assertTrue(actualYamlConfig.getBooleanValue()); assertThat(actualYamlConfig.getIntegerValue(), is(1)); assertThat(actualYamlConfig.getLongValue(), is(10L)); assertThat(actualYamlConfig.getEnumValue(), is(NodeYamlRuleConfigurationEnum.FOO)); }
@Override public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay readerWay, IntsRef relationFlags) { String roadClassTag = readerWay.getTag("highway"); if (roadClassTag == null) return; RoadClass roadClass = RoadClass.find(roadClassTag); if (roadClass == OTHER && roadClassTag.endsWith("_link")) roadClass = RoadClass.find(roadClassTag.substring(0, roadClassTag.length() - 5)); if (roadClass != OTHER) roadClassEnc.setEnum(false, edgeId, edgeIntAccess, roadClass); }
@Test public void testNoNPE() { ReaderWay readerWay = new ReaderWay(1); ArrayEdgeIntAccess intAccess = new ArrayEdgeIntAccess(1); int edgeId = 0; parser.handleWayTags(edgeId, intAccess, readerWay, relFlags); assertEquals(RoadClass.OTHER, rcEnc.getEnum(false, edgeId, intAccess)); }
public static StringUtils.Pair parseVariableAndPath(String text) { Matcher matcher = VAR_AND_PATH_PATTERN.matcher(text); matcher.find(); String name = text.substring(0, matcher.end()); String path; if (matcher.end() == text.length()) { path = ""; } else { path = text.substring(matcher.end()).trim(); } if (isXmlPath(path) || isXmlPathFunction(path)) { // xml, don't prefix for json } else { path = "$" + path; } return StringUtils.pair(name, path); }
@Test void testParsingVariableAndJsonPath() { assertEquals(StringUtils.pair("foo", "$"), ScenarioEngine.parseVariableAndPath("foo")); assertEquals(StringUtils.pair("foo", "$.bar"), ScenarioEngine.parseVariableAndPath("foo.bar")); assertEquals(StringUtils.pair("foo", "$['bar']"), ScenarioEngine.parseVariableAndPath("foo['bar']")); assertEquals(StringUtils.pair("foo", "$[0]"), ScenarioEngine.parseVariableAndPath("foo[0]")); assertEquals(StringUtils.pair("foo", "$[0].bar"), ScenarioEngine.parseVariableAndPath("foo[0].bar")); assertEquals(StringUtils.pair("foo", "$[0]['bar']"), ScenarioEngine.parseVariableAndPath("foo[0]['bar']")); assertEquals(StringUtils.pair("foo", "/bar"), ScenarioEngine.parseVariableAndPath("foo/bar")); assertEquals(StringUtils.pair("foo", "/"), ScenarioEngine.parseVariableAndPath("foo/")); assertEquals(StringUtils.pair("foo", "/"), ScenarioEngine.parseVariableAndPath("foo /")); assertEquals(StringUtils.pair("foo", "/bar"), ScenarioEngine.parseVariableAndPath("foo /bar")); assertEquals(StringUtils.pair("foo", "/bar/baz[1]/ban"), ScenarioEngine.parseVariableAndPath("foo/bar/baz[1]/ban")); }
@Override public long approximateNumEntries() { final List<ReadOnlyKeyValueStore<K, V>> stores = storeProvider.stores(storeName, storeType); long total = 0; for (final ReadOnlyKeyValueStore<K, V> store : stores) { total += store.approximateNumEntries(); if (total < 0) { return Long.MAX_VALUE; } } return total; }
@Test public void shouldReturnLongMaxValueOnOverflow() { stubProviderTwo.addStore(storeName, new NoOpReadOnlyStore<Object, Object>() { @Override public long approximateNumEntries() { return Long.MAX_VALUE; } }); stubOneUnderlying.put("overflow", "me"); assertEquals(Long.MAX_VALUE, theStore.approximateNumEntries()); }
@Override public void delete(final Map<Path, TransferStatus> files, final PasswordCallback prompt, final Callback callback) throws BackgroundException { final Map<Path, List<String>> containers = new HashMap<>(); for(Path file : files.keySet()) { if(containerService.isContainer(file)) { continue; } callback.delete(file); final Path container = containerService.getContainer(file); if(containers.containsKey(container)) { containers.get(container).add(containerService.getKey(file)); } else { final List<String> keys = new ArrayList<>(); keys.add(containerService.getKey(file)); // Collect a list of existing segments. Must do this before deleting the manifest file. for(Path segment : segmentService.list(file)) { keys.add(containerService.getKey(segment)); } containers.put(container, keys); } } try { for(Map.Entry<Path, List<String>> container : containers.entrySet()) { final Region region = regionService.lookup(container.getKey()); final List<String> keys = container.getValue(); for(List<String> partition : new Partition<>(keys, new HostPreferences(session.getHost()).getInteger("openstack.delete.multiple.partition"))) { session.getClient().deleteObjects(region, container.getKey().getName(), partition); } } } catch(GenericException e) { if(new SwiftExceptionMappingService().map(e) instanceof InteroperabilityException) { new SwiftDeleteFeature(session, regionService).delete(files, prompt, callback); return; } else { throw new SwiftExceptionMappingService().map("Cannot delete {0}", e, files.keySet().iterator().next()); } } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Cannot delete {0}", e, files.keySet().iterator().next()); } for(Path file : files.keySet()) { if(containerService.isContainer(file)) { callback.delete(file); // Finally delete bucket itself try { session.getClient().deleteContainer(regionService.lookup(file), containerService.getContainer(file).getName()); } catch(GenericException e) { throw new SwiftExceptionMappingService().map("Cannot delete {0}", e, file); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Cannot delete {0}", e, file); } } } }
@Test public void testDeleteMultiple() throws Exception { final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume)); for(Location.Name region : new SwiftLocationFeature(session).getLocations()) { container.attributes().setRegion(region.getIdentifier()); new SwiftListService(session, new SwiftRegionService(session)).list(container, new ListProgressListener() { @Override public void chunk(final Path folder, final AttributedList<Path> list) { try { new SwiftMultipleDeleteFeature(session).delete(list.toList(), new DisabledLoginCallback(), new Delete.DisabledCallback()); } catch(BackgroundException e) { fail(e.getDetail()); } } @Override public ListProgressListener reset() { return this; } @Override public void message(final String message) { // } }); } }
public Node parse() throws ScanException { return E(); }
@Test public void testBasic() throws Exception { Parser<Object> p = new Parser<>("hello"); Node t = p.parse(); Assertions.assertEquals(Node.LITERAL, t.getType()); Assertions.assertEquals("hello", t.getValue()); }
public void setFatigue(Fatigue fatigue) { this.giant.setFatigue(fatigue); }
@Test void testSetFatigue() { final var model = mock(GiantModel.class); final var view = mock(GiantView.class); final var controller = new GiantController(model, view); verifyNoMoreInteractions(model, view); for (final var fatigue : Fatigue.values()) { controller.setFatigue(fatigue); verify(model).setFatigue(fatigue); verifyNoMoreInteractions(view); } controller.getFatigue(); //noinspection ResultOfMethodCallIgnored verify(model).getFatigue(); verifyNoMoreInteractions(model, view); }
String getSubstitutionVariable(String key) { return substitutionVariables.get(key); }
@Test public void shouldNotBeVerboseByDefault() { assertThat(new LoggingConfiguration(null) .getSubstitutionVariable(LoggingConfiguration.PROPERTY_ROOT_LOGGER_LEVEL)).isEqualTo(LoggingConfiguration.LEVEL_ROOT_DEFAULT); }
@Override public void process() { try { if (containers.length > 1) { throw new RuntimeException("This processor can only handle single containers"); } ContainerUnloader container = containers[0]; // Get config Configuration config = createConfiguration(container); //Workspace ProjectController pc = Lookup.getDefault().lookup(ProjectController.class); if (workspace == null) { workspace = pc.openNewWorkspace(config); } else if(!configurationMatchesExisting(config, workspace)) { // The configuration check failed, stop processing return; } processMeta(container, workspace); if (container.getSource() != null && !container.getSource().isEmpty()) { pc.setSource(workspace, container.getSource()); // Remove extensions pc.renameWorkspace(workspace, container.getSource().replaceAll("(?<!^)[.].*", "")); } Progress.start(progressTicket, calculateWorkUnits()); process(container, workspace); Progress.finish(progressTicket); } finally { clean(); } }
@Test public void testMeta() { ImportContainerImpl importContainer = new ImportContainerImpl(); importContainer.setMetadata(MetadataDraft.builder().description("foo").title("bar").build()); Workspace workspace = new WorkspaceImpl(null, 1); DefaultProcessor defaultProcessor = new DefaultProcessor(); defaultProcessor.setContainers(new ImportContainerImpl[] {importContainer}); defaultProcessor.setWorkspace(workspace); defaultProcessor.process(); WorkspaceMetaData workspaceMetaData = workspace.getWorkspaceMetadata(); Assert.assertEquals("foo", workspaceMetaData.getDescription()); Assert.assertEquals("bar", workspaceMetaData.getTitle()); }
public void close() { close(Long.MAX_VALUE, false); }
@Test public void shouldThrowOnNegativeTimeoutForCloseWithCloseOptionLeaveGroupTrue() throws Exception { prepareStreams(); prepareStreamThread(streamThreadOne, 1); prepareStreamThread(streamThreadTwo, 2); prepareTerminableThread(streamThreadOne); final MockClientSupplier mockClientSupplier = spy(MockClientSupplier.class); when(mockClientSupplier.getAdmin(any())).thenReturn(adminClient); final KafkaStreams.CloseOptions closeOptions = new KafkaStreams.CloseOptions(); closeOptions.timeout(Duration.ofMillis(-1L)); closeOptions.leaveGroup(true); try (final KafkaStreams streams = new KafkaStreams(getBuilderWithSource().build(), props, mockClientSupplier, time)) { assertThrows(IllegalArgumentException.class, () -> streams.close(closeOptions)); } }
public static String parsePath(String uri, Map<String, String> patterns) { if (uri == null) { return null; } else if (StringUtils.isBlank(uri)) { return String.valueOf(SLASH); } CharacterIterator ci = new StringCharacterIterator(uri); StringBuilder pathBuffer = new StringBuilder(); char c = ci.first(); if (c == CharacterIterator.DONE) { return String.valueOf(SLASH); } do { if (c == OPEN) { String regexBuffer = cutParameter(ci, patterns); if (regexBuffer == null) { LOGGER.warn("Operation path \"{}\" contains syntax error.", uri); return null; } pathBuffer.append(regexBuffer); } else { int length = pathBuffer.length(); if (!(c == SLASH && (length != 0 && pathBuffer.charAt(length - 1) == SLASH))) { pathBuffer.append(c); } } } while ((c = ci.next()) != CharacterIterator.DONE); return pathBuffer.toString(); }
@Test(description = "parse regex with slash inside it from issue 1153") public void parseRegexWithSlashInside() { final Map<String, String> regexMap = new HashMap<String, String>(); final String path = PathUtils.parsePath("/{itemId: [0-9]{4}/[0-9]{2}/[0-9]{2}/[0-9]{2}/[0-9]{2}/[0-9]{2}/[0-9]{3}/[A-Za-z0-9]+}", regexMap); assertEquals(path, "/{itemId}"); assertEquals(regexMap.get("itemId"), "[0-9]{4}/[0-9]{2}/[0-9]{2}/[0-9]{2}/[0-9]{2}/[0-9]{2}/[0-9]{3}/[A-Za-z0-9]+"); }
@Operation(summary = "queryAuthorizedNamespace", description = "QUERY_AUTHORIZED_NAMESPACE_NOTES") @Parameters({ @Parameter(name = "userId", description = "USER_ID", schema = @Schema(implementation = int.class, example = "100")) }) @GetMapping(value = "/authed-namespace") @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_AUTHORIZED_NAMESPACE_ERROR) public Result queryAuthorizedNamespace(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam("userId") Integer userId) { Map<String, Object> result = k8sNamespaceService.queryAuthorizedNamespace(loginUser, userId); return returnDataList(result); }
@Test public void testQueryAuthorizedNamespace() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("userId", "1"); MvcResult mvcResult = mockMvc.perform(get("/k8s-namespace/authed-namespace") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); }
public static List<AclEntry> mergeAclEntries(List<AclEntry> existingAcl, List<AclEntry> inAclSpec) throws AclException { ValidatedAclSpec aclSpec = new ValidatedAclSpec(inAclSpec); ArrayList<AclEntry> aclBuilder = Lists.newArrayListWithCapacity(MAX_ENTRIES); List<AclEntry> foundAclSpecEntries = Lists.newArrayListWithCapacity(MAX_ENTRIES); EnumMap<AclEntryScope, AclEntry> providedMask = Maps.newEnumMap(AclEntryScope.class); EnumSet<AclEntryScope> maskDirty = EnumSet.noneOf(AclEntryScope.class); EnumSet<AclEntryScope> scopeDirty = EnumSet.noneOf(AclEntryScope.class); for (AclEntry existingEntry: existingAcl) { AclEntry aclSpecEntry = aclSpec.findByKey(existingEntry); if (aclSpecEntry != null) { foundAclSpecEntries.add(aclSpecEntry); scopeDirty.add(aclSpecEntry.getScope()); if (aclSpecEntry.getType() == MASK) { providedMask.put(aclSpecEntry.getScope(), aclSpecEntry); maskDirty.add(aclSpecEntry.getScope()); } else { aclBuilder.add(aclSpecEntry); } } else { if (existingEntry.getType() == MASK) { providedMask.put(existingEntry.getScope(), existingEntry); } else { aclBuilder.add(existingEntry); } } } // ACL spec entries that were not replacements are new additions. for (AclEntry newEntry: aclSpec) { if (Collections.binarySearch(foundAclSpecEntries, newEntry, ACL_ENTRY_COMPARATOR) < 0) { scopeDirty.add(newEntry.getScope()); if (newEntry.getType() == MASK) { providedMask.put(newEntry.getScope(), newEntry); maskDirty.add(newEntry.getScope()); } else { aclBuilder.add(newEntry); } } } copyDefaultsIfNeeded(aclBuilder); calculateMasks(aclBuilder, providedMask, maskDirty, scopeDirty); return buildAndValidateAcl(aclBuilder); }
@Test(expected=AclException.class) public void testMergeAclEntriesDuplicateEntries() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, "bruce", ALL), aclEntry(ACCESS, USER, "diana", READ_WRITE), aclEntry(ACCESS, USER, "clark", READ), aclEntry(ACCESS, USER, "bruce", READ_EXECUTE)); mergeAclEntries(existing, aclSpec); }
public static Map<String, String> mergeHeaders(Map<String, String> headers1, Map<String, String> headers2) { TreeMap<String, String> combinedHeaders = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); if (headers2 != null) { combinedHeaders.putAll(headers2); } if (headers1 != null) { for (Map.Entry<String, String> header : headers1.entrySet()) { String name = header.getKey(); if (!combinedHeaders.containsKey(name)) { combinedHeaders.put(name, header.getValue()); } } } return combinedHeaders; }
@Test public void testMergeHeader() { Map<String, String> headers1 = new HashMap<>(); Map<String, String> headers2 = new HashMap<>(); headers1.put("X-header1", "header1Value"); headers1.put("X-commonheader", "commonHeader1Value"); headers2.put("X-header2", "header2Value"); headers2.put("X-CommonHeader", "commonHeader2Value"); Map<String, String> combineHeaders = HeaderUtil.mergeHeaders(headers1, headers2); Assert.assertEquals(combineHeaders.size(), 3); Assert.assertEquals(combineHeaders.get("x-header1"), "header1Value"); Assert.assertEquals(combineHeaders.get("x-header2"), "header2Value"); Assert.assertEquals(combineHeaders.get("x-commonheader"), "commonHeader2Value"); }
@Override public String named() { return PluginEnum.RPC_PARAM_TRANSFORM.getName(); }
@Test public void testNamed() { String result = rpcParamTransformPlugin.named(); Assertions.assertEquals(PluginEnum.RPC_PARAM_TRANSFORM.getName(), result); }
@Override public void filter(ContainerRequestContext requestContext) throws IOException { final MultivaluedMap<String, String> headers = requestContext.getHeaders(); final List<String> authorization = headers.get(AUTHORIZATION_PROPERTY); if (authorization == null || authorization.isEmpty()) { abortRequestUnauthorized(requestContext, "You cannot access this resource, missing authorization header!"); return; } final String encodedUserPassword = authorization.get(0).replaceFirst(AUTHENTICATION_SCHEME + " ", ""); String usernameAndPassword = new String(Base64.decode(encodedUserPassword.getBytes(StandardCharsets.UTF_8)), StandardCharsets.UTF_8); final String[] parts = usernameAndPassword.split(":"); if (parts.length != 2) { abortRequestUnauthorized(requestContext, "You cannot access this resource, invalid username/password combination!"); return; } final String username = parts[0]; final String password = parts[1]; if (!isUserMatching(username, password)) { abortRequestUnauthorized(requestContext, "You cannot access this resource, invalid username/password combination!"); } }
@Test void testMissingPassword() throws IOException { final BasicAuthFilter filter = new BasicAuthFilter("admin", DigestUtils.sha256Hex("admin"), "junit-test"); final ContainerRequest request = mockRequest("admin", ""); filter.filter(request); final ArgumentCaptor<Response> captor = ArgumentCaptor.forClass(Response.class); Mockito.verify(request, times(1)).abortWith(captor.capture()); final Response response = captor.getValue(); Assertions.assertThat(response.getStatusInfo().getStatusCode()).isEqualTo(401); Assertions.assertThat(response.getStatusInfo().getReasonPhrase()).isEqualTo("Unauthorized"); Assertions.assertThat(response.getEntity()).isEqualTo("You cannot access this resource, invalid username/password combination!"); }
@Override public final ChannelFuture close() { return close(newPromise()); }
@Test @Timeout(value = 2000, unit = TimeUnit.MILLISECONDS) public void testFireChannelInactiveAndUnregisteredOnClose() throws InterruptedException { testFireChannelInactiveAndUnregistered(new Action() { @Override public ChannelFuture doRun(Channel channel) { return channel.close(); } }); testFireChannelInactiveAndUnregistered(new Action() { @Override public ChannelFuture doRun(Channel channel) { return channel.close(channel.newPromise()); } }); }
public static FhirIOPatientEverything getPatientEverything() { return new FhirIOPatientEverything(); }
@Test public void test_FhirIO_failedPatientEverything() { PatientEverythingParameter input = PatientEverythingParameter.builder().setResourceName("bad-resource-name").build(); FhirIOPatientEverything.Result everythingResult = pipeline.apply(Create.of(input)).apply(FhirIO.getPatientEverything()); PCollection<HealthcareIOError<String>> failed = everythingResult.getFailedReads(); PCollection<String> failedEverything = failed.apply( MapElements.into(TypeDescriptors.strings()).via(HealthcareIOError::getDataResource)); PAssert.that(failedEverything).containsInAnyOrder(input.toString()); PAssert.that(everythingResult.getPatientCompartments()).empty(); pipeline.run(); }
public static GeneratorResult run(String resolverPath, String defaultPackage, final boolean generateImported, final boolean generateDataTemplates, RestliVersion version, RestliVersion deprecatedByVersion, String targetDirectoryPath, String[] sources) throws IOException { return run(resolverPath, defaultPackage, null, generateImported, generateDataTemplates, version, deprecatedByVersion, targetDirectoryPath, sources); }
@Test(dataProvider = "deprecatedByVersionDataProvider") public void testDeprecatedByVersion(String idlName, String buildersName, String substituteClassName) throws Exception { final String pegasusDir = moduleDir + FS + RESOURCES_DIR + FS + "pegasus"; final String outPath = outdir.getPath(); RestRequestBuilderGenerator.run(pegasusDir, null, moduleDir, true, false, RestliVersion.RESTLI_1_0_0, RestliVersion.RESTLI_2_0_0, outPath, new String[] { moduleDir + FS + RESOURCES_DIR + FS + "idls" + FS + idlName }); final File builderFile = new File(outPath + FS + buildersName); Assert.assertTrue(builderFile.exists()); final String fileContent = IOUtils.toString(new FileInputStream(builderFile)); final Pattern regex = Pattern.compile(".*@deprecated$.*\\{@link " + substituteClassName + "\\}.*^@Deprecated$\n^public class .*", Pattern.MULTILINE | Pattern.DOTALL); Assert.assertTrue(regex.matcher(fileContent).matches()); Assert.assertTrue(fileContent.contains("Generated from " + RESOURCES_DIR + FS + "idls" + FS + idlName)); }
public void triggerPartitionReplicaSync(int partitionId, Collection<ServiceNamespace> namespaces, int replicaIndex) { assert replicaIndex >= 0 && replicaIndex < InternalPartition.MAX_REPLICA_COUNT : "Invalid replica index! partitionId=" + partitionId + ", replicaIndex=" + replicaIndex; PartitionReplica target = checkAndGetPrimaryReplicaOwner(partitionId, replicaIndex); if (target == null) { return; } if (!partitionService.areMigrationTasksAllowed()) { logger.finest("Cannot send sync replica request for partitionId=" + partitionId + ", replicaIndex=" + replicaIndex + ", namespaces=" + namespaces + ". Sync is not allowed."); return; } InternalPartitionImpl partition = partitionStateManager.getPartitionImpl(partitionId); if (partition.isMigrating()) { logger.finest("Cannot send sync replica request for partitionId=" + partitionId + ", replicaIndex=" + replicaIndex + ", namespaces=" + namespaces + ". Partition is already migrating."); return; } sendSyncReplicaRequest(partitionId, namespaces, replicaIndex, target); }
@Test(expected = AssertionError.class) public void testTriggerPartitionReplicaSync_whenReplicaIndexTooLarge_thenThrowException() { Set<ServiceNamespace> namespaces = Collections.singleton(INSTANCE); manager.triggerPartitionReplicaSync(PARTITION_ID, namespaces, InternalPartition.MAX_REPLICA_COUNT + 1); }
public static Map<?, ?> convertToMap(Schema schema, Object value) { return convertToMapInternal(MAP_SELECTOR_SCHEMA, value); }
@Test public void shouldFailToParseStringOfMalformedMap() { assertThrows(DataException.class, () -> Values.convertToMap(Schema.STRING_SCHEMA, " { \"foo\" : 1234567890 , \"a\", \"bar\" : 0, \"baz\" : -987654321 } ")); }
public static Map<String, String> getTrimmedStringCollectionSplitByEquals( String str) { String[] trimmedList = getTrimmedStrings(str); Map<String, String> pairs = new HashMap<>(); for (String s : trimmedList) { if (s.isEmpty()) { continue; } String[] splitByKeyVal = getTrimmedStringsSplitByEquals(s); Preconditions.checkArgument( splitByKeyVal.length == 2, STRING_COLLECTION_SPLIT_EQUALS_INVALID_ARG + " Input: " + str); boolean emptyKey = org.apache.commons.lang3.StringUtils.isEmpty(splitByKeyVal[0]); boolean emptyVal = org.apache.commons.lang3.StringUtils.isEmpty(splitByKeyVal[1]); Preconditions.checkArgument( !emptyKey && !emptyVal, STRING_COLLECTION_SPLIT_EQUALS_INVALID_ARG + " Input: " + str); pairs.put(splitByKeyVal[0], splitByKeyVal[1]); } return pairs; }
@Test public void testStringCollectionSplitByEqualsSuccess() { Map<String, String> splitMap = StringUtils.getTrimmedStringCollectionSplitByEquals(""); Assertions .assertThat(splitMap) .describedAs("Map of key value pairs split by equals(=) and comma(,)") .hasSize(0); splitMap = StringUtils.getTrimmedStringCollectionSplitByEquals(null); Assertions .assertThat(splitMap) .describedAs("Map of key value pairs split by equals(=) and comma(,)") .hasSize(0); splitMap = StringUtils.getTrimmedStringCollectionSplitByEquals( "element.first.key1 = element.first.val1"); Assertions .assertThat(splitMap) .describedAs("Map of key value pairs split by equals(=) and comma(,)") .hasSize(1) .containsEntry("element.first.key1", "element.first.val1"); splitMap = StringUtils.getTrimmedStringCollectionSplitByEquals( "element.xyz.key1 =element.abc.val1 , element.xyz.key2= element.abc.val2"); Assertions .assertThat(splitMap) .describedAs("Map of key value pairs split by equals(=) and comma(,)") .hasSize(2) .containsEntry("element.xyz.key1", "element.abc.val1") .containsEntry("element.xyz.key2", "element.abc.val2"); splitMap = StringUtils.getTrimmedStringCollectionSplitByEquals( "\nelement.xyz.key1 =element.abc.val1 \n" + ", element.xyz.key2=element.abc.val2,element.xyz.key3=element.abc.val3" + " , element.xyz.key4 =element.abc.val4,element.xyz.key5= " + "element.abc.val5 ,\n \n \n " + " element.xyz.key6 = element.abc.val6 \n , \n" + "element.xyz.key7=element.abc.val7,\n"); Assertions .assertThat(splitMap) .describedAs("Map of key value pairs split by equals(=) and comma(,)") .hasSize(7) .containsEntry("element.xyz.key1", "element.abc.val1") .containsEntry("element.xyz.key2", "element.abc.val2") .containsEntry("element.xyz.key3", "element.abc.val3") .containsEntry("element.xyz.key4", "element.abc.val4") .containsEntry("element.xyz.key5", "element.abc.val5") .containsEntry("element.xyz.key6", "element.abc.val6") .containsEntry("element.xyz.key7", "element.abc.val7"); splitMap = StringUtils.getTrimmedStringCollectionSplitByEquals( "element.first.key1 = element.first.val2 ,element.first.key1 =element.first.val1"); Assertions .assertThat(splitMap) .describedAs("Map of key value pairs split by equals(=) and comma(,)") .hasSize(1) .containsEntry("element.first.key1", "element.first.val1"); splitMap = StringUtils.getTrimmedStringCollectionSplitByEquals( ",,, , ,, ,element.first.key1 = element.first.val2 ," + "element.first.key1 = element.first.val1 , ,,, ,"); Assertions .assertThat(splitMap) .describedAs("Map of key value pairs split by equals(=) and comma(,)") .hasSize(1) .containsEntry("element.first.key1", "element.first.val1"); splitMap = StringUtils.getTrimmedStringCollectionSplitByEquals( ",, , , ,, ,"); Assertions .assertThat(splitMap) .describedAs("Map of key value pairs split by equals(=) and comma(,)") .hasSize(0); }
public void validateWriter(final Long memberId) { if (!this.writerId.equals(memberId)) { throw new WriterNotEqualsException(); } }
@Test void 작성자가_아니면_예외를_발생한다() { // given Board board = 게시글_생성_사진없음(); // when & then assertThatThrownBy(() -> board.validateWriter(board.getWriterId() + 1)) .isInstanceOf(WriterNotEqualsException.class); }
@Override public void setUpParameters(final List<Object> params) { AtomicInteger parametersOffset = new AtomicInteger(0); insertValueContexts = getInsertValueContexts(params, parametersOffset, valueExpressions); insertSelectContext = getInsertSelectContext(metaData, params, parametersOffset, currentDatabaseName).orElse(null); onDuplicateKeyUpdateValueContext = getOnDuplicateKeyUpdateValueContext(params, parametersOffset).orElse(null); ShardingSphereSchema schema = getSchema(metaData, currentDatabaseName); generatedKeyContext = new GeneratedKeyContextEngine(getSqlStatement(), schema).createGenerateKeyContext(insertColumnNames, insertValueContexts, params).orElse(null); }
@Test void assertInsertStatementContextWithoutColumnNames() { InsertStatement insertStatement = new MySQLInsertStatement(); insertStatement.setTable(new SimpleTableSegment(new TableNameSegment(0, 0, new IdentifierValue("tbl")))); setUpInsertValues(insertStatement); InsertStatementContext actual = createInsertStatementContext(Arrays.asList(1, "Tom", 2, "Jerry"), insertStatement); actual.setUpParameters(Arrays.asList(1, "Tom", 2, "Jerry")); assertInsertStatementContext(actual); }
public static void syncToFile(Collection<Member> members) { try { StringBuilder builder = new StringBuilder(); builder.append('#').append(LocalDateTime.now()).append(StringUtils.LF); for (String member : simpleMembers(members)) { builder.append(member).append(StringUtils.LF); } EnvUtil.writeClusterConf(builder.toString()); } catch (Throwable ex) { Loggers.CLUSTER.error("cluster member node persistence failed : {}", ExceptionUtil.getAllExceptionMsg(ex)); } }
@Test void testSyncToFile() throws IOException { File file = new File(EnvUtil.getClusterConfFilePath()); file.getParentFile().mkdirs(); assertTrue(file.createNewFile()); MemberUtil.syncToFile(Collections.singleton(originalMember)); try (BufferedReader reader = new BufferedReader(new FileReader(EnvUtil.getClusterConfFilePath()))) { String line = ""; while ((line = reader.readLine()) != null) { if (!line.startsWith("#")) { assertEquals(IP + ":" + PORT, line.trim()); return; } } fail("No found member info in cluster.conf"); } finally { file.delete(); } }