focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public Optional<Rule> findByKey(RuleKey key) { verifyKeyArgument(key); ensureInitialized(); return Optional.ofNullable(rulesByKey.get(key)); }
@Test public void findByKey_returns_Rule_if_it_exists_in_DB() { Optional<Rule> rule = underTest.findByKey(AB_RULE.getKey()); assertIsABRule(rule.get()); }
@Override public int read() throws IOException { if (mPosition == mLength) { // at end of file return -1; } updateStreamIfNeeded(); int res = mUfsInStream.get().read(); if (res == -1) { return -1; } mPosition++; Metrics.BYTES_READ_FROM_UFS.inc(1); return res; }
@Test public void readOverflowOffLenByteBuffer() throws IOException, AlluxioException { // TODO(lu) the read(ByteBuffer, offset, length) API does not make sense // reconsider the API before enabling this test for all in streams Assume.assumeFalse(mConf.getBoolean(PropertyKey.USER_CLIENT_CACHE_ENABLED)); AlluxioURI ufsPath = getUfsPath(); createFile(ufsPath, CHUNK_SIZE); try (FileInStream inStream = getStream(ufsPath)) { assertThrows(IllegalArgumentException.class, () -> inStream.read(ByteBuffer.allocate(CHUNK_SIZE), 0, CHUNK_SIZE * 2)); } }
@Override public void zoom(byte zoomLevelDiff) { zoom(zoomLevelDiff, true); }
@Test public void zoomTest() { MapViewPosition mapViewPosition = new MapViewPosition(new DisplayModel()); Assert.assertEquals(0, mapViewPosition.getZoomLevel()); mapViewPosition.zoom((byte) 1); Assert.assertEquals(1, mapViewPosition.getZoomLevel()); mapViewPosition.zoom((byte) -1); Assert.assertEquals(0, mapViewPosition.getZoomLevel()); mapViewPosition.zoom((byte) 5); Assert.assertEquals(5, mapViewPosition.getZoomLevel()); mapViewPosition.zoom((byte) -2); Assert.assertEquals(3, mapViewPosition.getZoomLevel()); mapViewPosition.zoom(Byte.MAX_VALUE); Assert.assertEquals(Byte.MAX_VALUE, mapViewPosition.getZoomLevel()); mapViewPosition.zoom(Byte.MIN_VALUE); Assert.assertEquals(0, mapViewPosition.getZoomLevel()); }
@Override public boolean trySet(V value) { return get(trySetAsync(value)); }
@Test public void testTrySet() { RJsonBucket<TestType> al = redisson.getJsonBucket("test", new JacksonCodec<>(TestType.class)); TestType t = new TestType(); t.setName("name1"); al.set(t); NestedType nt = new NestedType(); nt.setValue(123); nt.setValues(Arrays.asList("t1", "t2")); al.set("$.type", nt); NestedType nt2 = new NestedType(); nt2.setValue(124); nt2.setValues(Arrays.asList("t4", "t3")); assertThat(al.setIfAbsent("$.type", nt2)).isFalse(); assertThat(al.setIfAbsent("type", nt2)).isFalse(); Integer n2 = al.get(new JacksonCodec<>(Integer.class), "type.value"); assertThat(n2).isEqualTo(123); }
static String getModel(String fileName) { return FileNameUtils.getSuffix(fileName).replace(FINAL_SUFFIX, ""); }
@Test void testGetModel() { String fileName = String.format("%1$sthis%1$sis%1$svalid%1$sfile.model_json", File.separator); String expected = "model"; IndexFile indexFile = new IndexFile(fileName); assertThat(indexFile.getModel()).isEqualTo(expected); }
public static String asContent(final HttpMessage message) { if (message.hasContent()) { return StringUtil.NEWLINE + StringUtil.NEWLINE + contentForDump(message); } return ""; }
@Test public void should_parse_content_when_content_length_not_set() { assertThat(asContent(messageWithHeaders(ImmutableMap.of(HttpHeaders.CONTENT_TYPE, "text/plain"))), is(EXPECTED_MESSAGE_BODY)); }
int doIO(ByteBuffer buf, int ops) throws IOException { /* For now only one thread is allowed. If user want to read or write * from multiple threads, multiple streams could be created. In that * case multiple threads work as well as underlying channel supports it. */ if (!buf.hasRemaining()) { throw new IllegalArgumentException("Buffer has no data left."); //or should we just return 0? } while (buf.hasRemaining()) { if (closed) { return -1; } try { int n = performIO(buf); if (n != 0) { // successful io or an error. return n; } } catch (IOException e) { if (!channel.isOpen()) { closed = true; } throw e; } //now wait for socket to be ready. int count = 0; try { count = SelectorPool.select(channel, ops, timeout); } catch (IOException e) { //unexpected IOException. closed = true; throw e; } if (count == 0) { throw new SocketTimeoutException(timeoutExceptionString(channel, timeout, ops)); } // otherwise the socket should be ready for io. } return 0; // does not reach here. }
@Test public void testSocketIOWithTimeoutByMultiThread() throws Exception { CountDownLatch latch = new CountDownLatch(1); Runnable ioTask = () -> { try { Pipe pipe = Pipe.open(); try (Pipe.SourceChannel source = pipe.source(); InputStream in = new SocketInputStream(source, TIMEOUT); Pipe.SinkChannel sink = pipe.sink(); OutputStream out = new SocketOutputStream(sink, TIMEOUT)) { byte[] writeBytes = TEST_STRING.getBytes(); byte[] readBytes = new byte[writeBytes.length]; latch.await(); out.write(writeBytes); doIO(null, out, TIMEOUT); in.read(readBytes); assertArrayEquals(writeBytes, readBytes); doIO(in, null, TIMEOUT); } } catch (Exception e) { fail(e.getMessage()); } }; int threadCnt = 64; ExecutorService threadPool = Executors.newFixedThreadPool(threadCnt); for (int i = 0; i < threadCnt; ++i) { threadPool.submit(ioTask); } Thread.sleep(1000); latch.countDown(); threadPool.shutdown(); assertTrue(threadPool.awaitTermination(3, TimeUnit.SECONDS)); }
@Override public boolean isSingleton() { return true; }
@Test public final void infinispanNamedEmbeddedCacheFactoryBeanShouldDeclareItselfToBeSingleton() { final InfinispanNamedEmbeddedCacheFactoryBean<Object, Object> objectUnderTest = new InfinispanNamedEmbeddedCacheFactoryBean<Object, Object>(); assertTrue( "InfinispanNamedEmbeddedCacheFactoryBean should declare itself to produce a singleton. However, it didn't.", objectUnderTest.isSingleton()); }
public static CreateSourceProperties from(final Map<String, Literal> literals) { try { return new CreateSourceProperties(literals, DurationParser::parse, false); } catch (final ConfigException e) { final String message = e.getMessage().replace( "configuration", "property" ); throw new KsqlException(message, e); } }
@Test public void shouldThrowIfValueSchemaNameAndAvroSchemaNameProvided() { // When: final Exception e = assertThrows( KsqlException.class, () -> CreateSourceProperties.from( ImmutableMap.<String, Literal>builder() .putAll(MINIMUM_VALID_PROPS) .put(VALUE_SCHEMA_FULL_NAME, new StringLiteral("value_schema")) .put(VALUE_AVRO_SCHEMA_FULL_NAME, new StringLiteral("value_schema")) .build()) ); // Then: assertThat(e.getMessage(), is("Cannot supply both 'VALUE_AVRO_SCHEMA_FULL_NAME' " + "and 'VALUE_SCHEMA_FULL_NAME' properties. Please only set 'VALUE_SCHEMA_FULL_NAME'.")); }
private boolean writeToFile() throws IOException { String fileName = ((CompoundVariable) values[0]).execute().trim(); String content = ((CompoundVariable) values[1]).execute(); boolean append = true; if (values.length >= 3) { String appendString = ((CompoundVariable) values[2]).execute().toLowerCase(Locale.ROOT).trim(); if (!appendString.isEmpty()) { append = Boolean.parseBoolean(appendString); } } content = NEW_LINE_PATTERN.matcher(content).replaceAll(System.lineSeparator()); Charset charset = StandardCharsets.UTF_8; if (values.length == 4) { String charsetParamValue = ((CompoundVariable) values[3]).execute(); if (StringUtils.isNotEmpty(charsetParamValue)) { charset = Charset.forName(charsetParamValue); } } if (fileName.isEmpty()) { log.error("File name '{}' is empty", fileName); return false; } log.debug("Writing {} to file {} with charset {} and append {}", content, fileName, charset, append); Lock lock = lockMap.computeIfAbsent(fileName, key -> new ReentrantLock()); lock.lock(); try { File file = new File(fileName); File fileParent = file.getParentFile(); if (fileParent == null || (fileParent.exists() && fileParent.isDirectory() && fileParent.canWrite())) { try { FileUtils.writeStringToFile(file, content, charset, append); } catch (IllegalArgumentException e) { log.error("The file {} can't be written to", file, e); return false; } } else { log.error("The parent file of {} doesn't exist or is not writable", file); return false; } } finally { lock.unlock(); } return true; }
@Test public void testWriteToFile(@TempDir Path tempDir) throws Exception { String tempAbsolutePath = tempDir.resolve("output.txt").toAbsolutePath().toString(); function.setParameters(functionParams(tempAbsolutePath, STRING_TO_WRITE, "true", ENCODING)); String returnValue = function.execute(result, null); assertTrue(Boolean.parseBoolean(returnValue), "This method 'Stringtofile' should have successfully run"); }
@Override public long getAndIncrement() { return getAndAdd(1); }
@Test public void testGetAndIncrement() { RAtomicLong al = redisson.getAtomicLong("test"); Assertions.assertEquals(0, al.getAndIncrement()); Assertions.assertEquals(1, al.get()); }
public HollowHashIndexResult findMatches(Object... query) { if (hashStateVolatile == null) { throw new IllegalStateException(this + " wasn't initialized"); } int hashCode = 0; for(int i=0;i<query.length;i++) { if(query[i] == null) throw new IllegalArgumentException("querying by null unsupported; i=" + i); hashCode ^= HashCodes.hashInt(keyHashCode(query[i], i)); } HollowHashIndexResult result; HollowHashIndexState hashState; do { result = null; hashState = hashStateVolatile; long bucket = hashCode & hashState.getMatchHashMask(); long hashBucketBit = bucket * hashState.getBitsPerMatchHashEntry(); boolean bucketIsEmpty = hashState.getMatchHashTable().getElementValue(hashBucketBit, hashState.getBitsPerTraverserField()[0]) == 0; while (!bucketIsEmpty) { if (matchIsEqual(hashState.getMatchHashTable(), hashBucketBit, query)) { int selectSize = (int) hashState.getMatchHashTable().getElementValue(hashBucketBit + hashState.getBitsPerMatchHashKey(), hashState.getBitsPerSelectTableSize()); long selectBucketPointer = hashState.getMatchHashTable().getElementValue(hashBucketBit + hashState.getBitsPerMatchHashKey() + hashState.getBitsPerSelectTableSize(), hashState.getBitsPerSelectTablePointer()); result = new HollowHashIndexResult(hashState, selectBucketPointer, selectSize); break; } bucket = (bucket + 1) & hashState.getMatchHashMask(); hashBucketBit = bucket * hashState.getBitsPerMatchHashEntry(); bucketIsEmpty = hashState.getMatchHashTable().getElementValue(hashBucketBit, hashState.getBitsPerTraverserField()[0]) == 0; } } while (hashState != hashStateVolatile); return result; }
@Test public void testBasicHashIndexFunctionality() throws Exception { mapper.add(new TypeA(1, 1.1d, new TypeB("one"))); mapper.add(new TypeA(1, 1.1d, new TypeB("1"))); mapper.add(new TypeA(2, 2.2d, new TypeB("two"), new TypeB("twenty"), new TypeB("two hundred"))); mapper.add(new TypeA(3, 3.3d, new TypeB("three"), new TypeB("thirty"), new TypeB("three hundred"))); mapper.add(new TypeA(4, 4.4d, new TypeB("four"))); mapper.add(new TypeA(4, 4.5d, new TypeB("four"), new TypeB("forty"))); roundTripSnapshot(); HollowHashIndex index = new HollowHashIndex(readStateEngine, "TypeA", "a1", new String[]{"a1", "ab.element.b1.value"}); Assert.assertNull("An entry that doesn't have any matches has a null iterator", index.findMatches(0, "notfound")); assertIteratorContainsAll(index.findMatches(1, "one").iterator(), 0); assertIteratorContainsAll(index.findMatches(1, "1").iterator(), 1); assertIteratorContainsAll(index.findMatches(2, "two").iterator(), 2); assertIteratorContainsAll(index.findMatches(2, "twenty").iterator(), 2); assertIteratorContainsAll(index.findMatches(2, "two hundred").iterator(), 2); assertIteratorContainsAll(index.findMatches(3, "three").iterator(), 3); assertIteratorContainsAll(index.findMatches(3, "thirty").iterator(), 3); assertIteratorContainsAll(index.findMatches(3, "three hundred").iterator(), 3); assertIteratorContainsAll(index.findMatches(4, "four").iterator(), 4, 5); assertIteratorContainsAll(index.findMatches(4, "forty").iterator(), 5); }
@Override public void encode(Object object, Type bodyType, RequestTemplate template) throws EncodeException { if (object == null) return; if (object instanceof JSONArray || object instanceof JSONObject) { template.body(object.toString()); } else { throw new EncodeException(format("%s is not a type supported by this encoder.", bodyType)); } }
@Test void encodesObject() { new JsonEncoder().encode(jsonObject, JSONObject.class, requestTemplate); JSONAssert.assertEquals("{\"a\":\"b\",\"c\":1}", new String(requestTemplate.body(), UTF_8), false); }
public static long getNumSector(String requestSize, String sectorSize) { Double memSize = Double.parseDouble(requestSize); Double sectorBytes = Double.parseDouble(sectorSize); Double nSectors = memSize / sectorBytes; Double memSizeKB = memSize / 1024; Double memSizeGB = memSize / (1024 * 1024 * 1024); Double memSize100GB = memSizeGB / 100; // allocation bitmap file: one bit per sector Double allocBitmapSize = nSectors / 8; // extend overflow file: 4MB, plus 4MB per 100GB Double extOverflowFileSize = memSize100GB * 1024 * 1024 * 4; // journal file: 8MB, plus 8MB per 100GB Double journalFileSize = memSize100GB * 1024 * 1024 * 8; // catalog file: 10bytes per KB Double catalogFileSize = memSizeKB * 10; // hot files: 5bytes per KB Double hotFileSize = memSizeKB * 5; // quota users file and quota groups file Double quotaUsersFileSize = (memSizeGB * 256 + 1) * 64; Double quotaGroupsFileSize = (memSizeGB * 32 + 1) * 64; Double metadataSize = allocBitmapSize + extOverflowFileSize + journalFileSize + catalogFileSize + hotFileSize + quotaUsersFileSize + quotaGroupsFileSize; Double allocSize = memSize + metadataSize; Double numSectors = allocSize / sectorBytes; System.out.println(numSectors.longValue() + 1); // round up return numSectors.longValue() + 1; }
@Test public void getSectorTest512() { String testRequestSize = "512"; String testSectorSize = "512"; long result = HFSUtils.getNumSector(testRequestSize, testSectorSize); assertEquals(2L, result); }
public Stream<String> stream() { return cwes.stream(); }
@Test public void testStream() { CweSet instance = new CweSet(); instance.addCwe("79"); String expResult = "79"; String result = instance.stream().collect(Collectors.joining(" ")); assertEquals(expResult, result); }
public static boolean isUri(String potentialUri) { if (StringUtils.isBlank(potentialUri)) { return false; } try { URI uri = new URI(potentialUri); return uri.getScheme() != null && uri.getHost() != null; } catch (URISyntaxException e) { return false; } }
@Test public void returns_true_when_uri_contains_scheme_and_host() { assertThat(UriValidator.isUri("http://127.0.0.1"), is(true)); }
public static <N> ImmutableGraph<N> emptyGraph(Graph<N> graph) { return ImmutableGraph.copyOf(GraphBuilder.from(graph).build()); }
@Test public void emptyGraphWithTemplate() { final MutableGraph<String> templateGraph = GraphBuilder .directed() .allowsSelfLoops(true) .build(); final ImmutableGraph<String> emptyGraph = Graphs.emptyGraph(templateGraph); assertThat(emptyGraph.isDirected()).isTrue(); assertThat(emptyGraph.allowsSelfLoops()).isTrue(); assertThat(emptyGraph.edges()).isEmpty(); assertThat(emptyGraph.edges()).isEmpty(); }
public void finish(boolean success) { progressReport.stopAndLogTotalTime("SCM Publisher " + count + "/" + total + " " + pluralize(count) + " have been analyzed"); if (success && !allFilesToBlame.isEmpty()) { LOG.warn("Missing blame information for the following files:"); for (InputFile f : allFilesToBlame) { LOG.warn(" * " + f); } LOG.warn("This may lead to missing/broken features in SonarQube"); String docUrl = documentationLinkGenerator.getDocumentationLink(SCM_INTEGRATION_DOCUMENTATION_SUFFIX); analysisWarnings.addUnique(format("Missing blame information for %d %s. This may lead to some features not working correctly. " + "Please check the analysis logs and refer to <a href=\"%s\" rel=\"noopener noreferrer\" target=\"_blank\">the documentation</a>.", allFilesToBlame.size(), allFilesToBlame.size() > 1 ? "files" : "file", docUrl)); } }
@Test public void addWarningWithContextIfFiles() { when(documentationLinkGenerator.getDocumentationLink(SCM_INTEGRATION_DOCUMENTATION_SUFFIX)).thenReturn(DOC_LINK); InputFile file = new TestInputFileBuilder("foo", "src/main/java/Foo.java").setLines(10).build(); when(documentationLinkGenerator.getDocumentationLink("suffix")).thenReturn("blababl"); new DefaultBlameOutput(null, analysisWarnings, singletonList(file), documentationLinkGenerator).finish(true); assertThat(analysisWarnings.warnings()).extracting(DefaultAnalysisWarnings.Message::getText) .containsOnly("Missing blame information for 1 file. This may lead to some features not working correctly. " + "Please check the analysis logs and refer to <a href=\"" + DOC_LINK + "\" rel=\"noopener noreferrer\" target=\"_blank\">the documentation</a>."); }
@Override public String getName() { return _name; }
@Test public void testMd5TransformFunction() { ExpressionContext expression = RequestContextUtils.getExpression(String.format("md5(%s)", BYTES_SV_COLUMN)); TransformFunction transformFunction = TransformFunctionFactory.get(expression, _dataSourceMap); assertTrue(transformFunction instanceof ScalarTransformFunctionWrapper); assertEquals(transformFunction.getName(), "md5"); String[] expectedValues = new String[NUM_ROWS]; for (int i = 0; i < NUM_ROWS; i++) { expectedValues[i] = DigestUtils.md5Hex(_bytesSVValues[i]); } testTransformFunction(transformFunction, expectedValues); }
public String[] getParameterNames(Method originalMethod) { Class<?> declaringClass = originalMethod.getDeclaringClass(); Map<Member, String[]> map = this.parameterNamesCache.get(declaringClass); if (map == null) { map = inspectClass(declaringClass); this.parameterNamesCache.put(declaringClass, map); } if (map != NO_DEBUG_INFO_MAP) { return map.get(originalMethod); } return null; }
@Test public void test() { String[] parameterNames; parameterNames = DISCOVERER.getParameterNames(CLASS_NO_PARAM_METHOD); Assert.assertTrue(CommonUtils.isEmpty(parameterNames)); parameterNames = DISCOVERER.getParameterNames(CLASS_ONE_PARAM_METHOD); Assert.assertEquals("a", parameterNames[0]); parameterNames = DISCOVERER.getParameterNames(CLASS_MULTI_PARAM_METHOD); Assert.assertEquals("a", parameterNames[0]); Assert.assertEquals("b", parameterNames[1]); }
public double asTHz() { return (double) frequency / THZ; }
@Test public void testasTHz() { Frequency frequency = Frequency.ofHz(1_000_000_000_000L); assertThat(frequency.asTHz(), is(1.0)); }
public static String getAccessTokenFromClientCredentialsGrantFlow(String host, String clientId, String clientSecret, String tenant, String scope) { final OAuth20Service service = new ServiceBuilder(clientId) .apiSecret(clientSecret) .defaultScope(scope) .build(new ScribeMicrosoftOauth2Api(host, tenant)); try { return service.getAccessTokenClientCredentialsGrant().getAccessToken(); } catch (IOException | ExecutionException e) { throw new IllegalStateException("Unable to get a token: " + e); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new IllegalStateException("Interrupted while getting a token: " + e); } }
@Test void getAccessTokenFromClientCredentialsGrantFlow_throwsException() { assertThatThrownBy(() -> OAuthMicrosoftRestClient.getAccessTokenFromClientCredentialsGrantFlow("https://localhost", "clientId", "clientSecret", "tenant", "scope")) .isInstanceOf(IllegalStateException.class) .hasMessageStartingWith("Unable to get a token: "); }
public void findIntersections(Rectangle query, Consumer<T> consumer) { IntArrayList todoNodes = new IntArrayList(levelOffsets.length * degree); IntArrayList todoLevels = new IntArrayList(levelOffsets.length * degree); int rootLevel = levelOffsets.length - 1; int rootIndex = levelOffsets[rootLevel]; if (doesIntersect(query, rootIndex)) { todoNodes.push(rootIndex); todoLevels.push(rootLevel); } while (!todoNodes.isEmpty()) { int nodeIndex = todoNodes.popInt(); int level = todoLevels.popInt(); if (level == 0) { // This is a leaf node consumer.accept(items[nodeIndex / ENVELOPE_SIZE]); } else { int childrenOffset = getChildrenOffset(nodeIndex, level); for (int i = 0; i < degree; i++) { int childIndex = childrenOffset + ENVELOPE_SIZE * i; if (doesIntersect(query, childIndex)) { todoNodes.push(childIndex); todoLevels.push(level - 1); } } } } }
@Test public void testEmptyFlatbush() { Flatbush<Rectangle> rtree = new Flatbush<>(new Rectangle[] {}); assertEquals(findIntersections(rtree, EVERYTHING), ImmutableList.of()); }
@Override public Iterable<Device> getDevices() { return manager.getVirtualDevices( this.networkId).stream().collect(Collectors.toSet()); }
@Test(expected = NullPointerException.class) public void testGetDeviceByNullType() { manager.registerTenantId(TenantId.tenantId(tenantIdValue1)); VirtualNetwork virtualNetwork = manager.createVirtualNetwork(TenantId.tenantId(tenantIdValue1)); DeviceService deviceService = manager.get(virtualNetwork.id(), DeviceService.class); // test the getDevices() method with null type value. deviceService.getDevices(null); }
protected synchronized RequestFuture<Void> lookupCoordinator() { if (findCoordinatorFuture == null) { // find a node to ask about the coordinator Node node = this.client.leastLoadedNode(); if (node == null) { log.debug("No broker available to send FindCoordinator request"); return RequestFuture.noBrokersAvailable(); } else { findCoordinatorFuture = sendFindCoordinatorRequest(node); } } return findCoordinatorFuture; }
@Test public void testLookupCoordinator() { setupCoordinator(); mockClient.backoff(node, 50); RequestFuture<Void> noBrokersAvailableFuture = coordinator.lookupCoordinator(); assertTrue(noBrokersAvailableFuture.failed(), "Failed future expected"); mockTime.sleep(50); RequestFuture<Void> future = coordinator.lookupCoordinator(); assertFalse(future.isDone(), "Request not sent"); assertSame(future, coordinator.lookupCoordinator(), "New request sent while one is in progress"); mockClient.prepareResponse(groupCoordinatorResponse(node, Errors.NONE)); coordinator.ensureCoordinatorReady(mockTime.timer(Long.MAX_VALUE)); assertNotSame(future, coordinator.lookupCoordinator(), "New request not sent after previous completed"); }
@Override public MetricsRepository load() { List<Metric> metrics = new ArrayList<>(); try { loadFromPaginatedWs(metrics); } catch (Exception e) { throw new IllegalStateException("Unable to load metrics", e); } return new MetricsRepository(metrics); }
@Test public void testCloseError() throws IOException { Reader reader = mock(Reader.class); when(reader.read(any(char[].class), anyInt(), anyInt())).thenReturn(-1); doThrow(new IOException()).when(reader).close(); WsTestUtil.mockReader(wsClient, reader); assertThatThrownBy(() -> metricsRepositoryLoader.load()) .isInstanceOf(IllegalStateException.class); }
public String generateRedirectUrl(String artifact, String transactionId, String sessionId, BvdStatus status) throws SamlSessionException, UnsupportedEncodingException { final var samlSession = findSamlSessionByArtifactOrTransactionId(artifact, transactionId); if (CANCELLED.equals(status)) samlSession.setBvdStatus(AdAuthenticationStatus.STATUS_CANCELED.label); if (ERROR.equals(status)) samlSession.setBvdStatus(AdAuthenticationStatus.STATUS_FAILED.label); if (artifact == null) artifact = samlSession.getArtifact(); if (sessionId == null || !sessionId.equals(samlSession.getHttpSessionId())) throw new SamlSessionException("Saml session found with invalid sessionId for redirect_with_artifact"); var url = new StringBuilder(samlSession.getAssertionConsumerServiceURL() + "?SAMLart=" + URLEncoder.encode(artifact, "UTF-8")); // append relay-state if (samlSession.getRelayState() != null) url.append("&RelayState=" + URLEncoder.encode(samlSession.getRelayState(), "UTF-8")); samlSession.setResolveBeforeTime(System.currentTimeMillis() + 1000 * 60 * minutesToResolve); samlSessionRepository.save(samlSession); return url.toString(); }
@Test void redirectWithArtifactHappyFlow() throws SamlSessionException, UnsupportedEncodingException { when(samlSessionRepositoryMock.findByArtifact(anyString())).thenReturn(Optional.of(createSamlSession())); String url = assertionConsumerServiceUrlService.generateRedirectUrl("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS", null, "SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS", null); assertEquals("https://sso.afnemer.nl/sp/assertion_consumer_service?SAMLart=AAQAAEotn7wK9jsnzCpL6em5sCpDVvMWlkQ34i%2Fjc4CmqxKKDt4mJxh3%2FvY%3D", url); }
Tuple2<Long, Integer> getHBasePutAccessParallelism(final JavaRDD<WriteStatus> writeStatusRDD) { final JavaPairRDD<Long, Integer> insertOnlyWriteStatusRDD = writeStatusRDD .filter(w -> w.getStat().getNumInserts() > 0).mapToPair(w -> new Tuple2<>(w.getStat().getNumInserts(), 1)); return insertOnlyWriteStatusRDD.fold(new Tuple2<>(0L, 0), (w, c) -> new Tuple2<>(w._1 + c._1, w._2 + c._2)); }
@Test public void testsHBasePutAccessParallelism() { HoodieWriteConfig config = getConfig(); SparkHoodieHBaseIndex index = new SparkHoodieHBaseIndex(config); final JavaRDD<WriteStatus> writeStatusRDD = jsc().parallelize( Arrays.asList( getSampleWriteStatus(0, 2), getSampleWriteStatus(2, 3), getSampleWriteStatus(4, 3), getSampleWriteStatus(6, 3), getSampleWriteStatus(8, 0)), 10); final Tuple2<Long, Integer> tuple = index.getHBasePutAccessParallelism(writeStatusRDD); final int hbasePutAccessParallelism = Integer.parseInt(tuple._2.toString()); final int hbaseNumPuts = Integer.parseInt(tuple._1.toString()); assertEquals(10, writeStatusRDD.getNumPartitions()); assertEquals(4, hbasePutAccessParallelism); assertEquals(20, hbaseNumPuts); }
@Override public String buildContext() { final RoleDO after = (RoleDO) getAfter(); if (Objects.isNull(getBefore())) { return String.format("the role [%s] is %s", after.getRoleName(), StringUtils.lowerCase(getType().getType().toString())); } return String.format("the role [%s] is %s : %s", after.getRoleName(), StringUtils.lowerCase(getType().getType().toString()), contrast()); }
@Test public void testBuildContext() { String roleUpdateStr = StringUtils.lowerCase(EventTypeEnum.ROLE_UPDATE.getType().toString()); String roleNameChangeStr = String.format("name[%s => %s] ", roleDO.getRoleName(), roleDORoleNameChange.getRoleName()); String roleNameChangeExpectedStr = String.format("the role [%s] is %s : %s", roleDORoleNameChange.getRoleName(), roleUpdateStr, roleNameChangeStr); assertEquals(roleNameChangeExpectedStr, roleChangedEventRoleNameChangeTest.buildContext()); String descriptionChangeStr = String.format("disc[%s => %s] ", roleDO.getDescription(), roleDODescriptionChange.getDescription()); String descriptionChangeExpectedStr = String.format("the role [%s] is %s : %s", roleDODescriptionChange.getRoleName(), roleUpdateStr, descriptionChangeStr); assertEquals(descriptionChangeExpectedStr, roleChangedEventDescriptionChangeTest.buildContext()); String changeStr = String.format("name[%s => %s] disc[%s => %s] ", roleDO.getRoleName(), roleDOChange.getRoleName(), roleDO.getDescription(), roleDOChange.getDescription()); String changeExpectedStr = String.format("the role [%s] is %s : %s", roleDOChange.getRoleName(), roleUpdateStr, changeStr); assertEquals(changeExpectedStr, roleChangedEventTest.buildContext()); String beforeNullExpectedStr = String.format("the role [%s] is %s", roleDO.getRoleName(), roleUpdateStr); assertEquals(beforeNullExpectedStr, roleChangedEventBeforeNullTest.buildContext()); String withoutChangeExpectedStr = String.format("the role [%s] is %s : it no change", roleDO.getRoleName(), roleUpdateStr); assertEquals(withoutChangeExpectedStr, roleChangedEventWithoutChangeTest.buildContext()); }
public static Field p(String fieldName) { return SELECT_ALL_FROM_SOURCES_ALL.where(fieldName); }
@Test void fuzzy_with_annotation() { String q = Q.p("f1").fuzzy(A.a("maxEditDistance", 3).append(A.a("prefixLength", 10)), "text to match").build(); assertEquals("yql=select * from sources * where f1 contains ({\"prefixLength\":10,\"maxEditDistance\":3}fuzzy(\"text to match\"))", q); }
@Override public void close() throws UnavailableException { // JournalContext is closed before block deletion context so that file system master changes // are written before block master changes. If a failure occurs between deleting an inode and // remove its blocks, it's better to have an orphaned block than an inode with a missing block. closeQuietly(mJournalContext); closeQuietly(mBlockDeletionContext); if (mThrown != null) { Throwables.propagateIfPossible(mThrown, UnavailableException.class); throw new RuntimeException(mThrown); } }
@Test public void success() throws Throwable { mRpcContext.close(); }
public static Expression generateFilterExpression(SearchArgument sarg) { return translate(sarg.getExpression(), sarg.getLeaves()); }
@Test public void testEqualsOperandRewrite() { SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); SearchArgument arg = builder.startAnd().equals("float", PredicateLeaf.Type.FLOAT, Double.NaN).end().build(); UnboundPredicate expected = Expressions.isNaN("float"); UnboundPredicate actual = (UnboundPredicate) HiveIcebergFilterFactory.generateFilterExpression(arg); assertPredicatesMatch(expected, actual); }
private ExitStatus run() { try { init(); return new Processor().processNamespace().getExitStatus(); } catch (IllegalArgumentException e) { System.out.println(e + ". Exiting ..."); return ExitStatus.ILLEGAL_ARGUMENTS; } catch (IOException e) { System.out.println(e + ". Exiting ..."); LOG.error(e + ". Exiting ..."); return ExitStatus.IO_EXCEPTION; } finally { dispatcher.shutdownNow(); } }
@Test(timeout = 90000) public void testMoverWithPinnedBlocks() throws Exception { final Configuration conf = new HdfsConfiguration(); initConf(conf); // Sets bigger retry max attempts value so that test case will timed out if // block pinning errors are not handled properly during block movement. conf.setInt(DFSConfigKeys.DFS_MOVER_RETRY_MAX_ATTEMPTS_KEY, 10000); final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf) .numDataNodes(3) .build(); try { cluster.waitActive(); final DistributedFileSystem dfs = cluster.getFileSystem(); final String file = "/testMoverWithPinnedBlocks/file"; Path dir = new Path("/testMoverWithPinnedBlocks"); dfs.mkdirs(dir); // write to DISK dfs.setStoragePolicy(dir, "HOT"); final FSDataOutputStream out = dfs.create(new Path(file)); byte[] fileData = StripedFileTestUtil .generateBytes(DEFAULT_BLOCK_SIZE * 3); out.write(fileData); out.close(); // verify before movement LocatedBlock lb = dfs.getClient().getLocatedBlocks(file, 0).get(0); StorageType[] storageTypes = lb.getStorageTypes(); for (StorageType storageType : storageTypes) { Assert.assertTrue(StorageType.DISK == storageType); } // Adding one SSD based data node to the cluster. StorageType[][] newtypes = new StorageType[][] {{StorageType.SSD}}; startAdditionalDNs(conf, 1, newtypes, cluster); // Mock FsDatasetSpi#getPinning to show that the block is pinned. for (int i = 0; i < cluster.getDataNodes().size(); i++) { DataNode dn = cluster.getDataNodes().get(i); LOG.info("Simulate block pinning in datanode {}", dn); InternalDataNodeTestUtils.mockDatanodeBlkPinning(dn, true); } // move file blocks to ONE_SSD policy dfs.setStoragePolicy(dir, "ONE_SSD"); int rc = ToolRunner.run(conf, new Mover.Cli(), new String[] {"-p", dir.toString()}); int exitcode = ExitStatus.NO_MOVE_BLOCK.getExitCode(); Assert.assertEquals("Movement should fail", exitcode, rc); } finally { cluster.shutdown(); } }
public static <K> KStreamHolder<K> build( final KStreamHolder<K> left, final KTableHolder<K> right, final StreamTableJoin<K> join, final RuntimeBuildContext buildContext, final JoinedFactory joinedFactory ) { final Formats leftFormats = join.getInternalFormats(); final QueryContext queryContext = join.getProperties().getQueryContext(); final QueryContext.Stacker stacker = QueryContext.Stacker.of(queryContext); final LogicalSchema leftSchema = left.getSchema(); final PhysicalSchema leftPhysicalSchema = PhysicalSchema.from( leftSchema, leftFormats.getKeyFeatures(), leftFormats.getValueFeatures() ); final Serde<GenericRow> leftSerde = buildContext.buildValueSerde( leftFormats.getValueFormat(), leftPhysicalSchema, stacker.push(SERDE_CTX).getQueryContext() ); final Serde<K> keySerde = left.getExecutionKeyFactory().buildKeySerde( leftFormats.getKeyFormat(), leftPhysicalSchema, queryContext ); final Joined<K, GenericRow, GenericRow> joined = joinedFactory.create( keySerde, leftSerde, null, StreamsUtil.buildOpName(queryContext) ); final LogicalSchema rightSchema = right.getSchema(); final JoinParams joinParams = JoinParamsFactory .create(join.getKeyColName(), leftSchema, rightSchema); final KStream<K, GenericRow> result; switch (join.getJoinType()) { case LEFT: result = left.getStream().leftJoin(right.getTable(), joinParams.getJoiner(), joined); break; case INNER: result = left.getStream().join(right.getTable(), joinParams.getJoiner(), joined); break; default: throw new IllegalStateException("invalid join type"); } return left.withStream(result, joinParams.getSchema()); }
@Test public void shouldReturnCorrectSchema() { // Given: givenInnerJoin(R_KEY); // When: final KStreamHolder<Struct> result = join.build(planBuilder, planInfo); // Then: assertThat( result.getSchema(), is(JoinParamsFactory.create(R_KEY, LEFT_SCHEMA, RIGHT_SCHEMA).getSchema()) ); }
public E set(int index, E value) { if (index >= mSize || index + mSize < 0) { throw new IndexOutOfBoundsException("Index: " + index + ", Size: " + mSize); } return mElements.set(index < 0 ? index + mSize : index, value); }
@Test void testIllegalSet() throws Exception { Assertions.assertThrows(IndexOutOfBoundsException.class, () -> { Stack<String> stack = new Stack<String>(); stack.set(1, "illegal"); }); }
@Override public boolean isDirectory(String path) throws IOException { if (!isEnvironmentPFS()) { return super.isDirectory(path); } if (isRoot(path)) { return true; } String pathKey = stripPrefixIfPresent(path); try { ObjectMetadata meta = mClient.getObjectMetadata(mBucketName, pathKey); if (meta != null && isDirectoryInPFS(meta)) { return true; } return false; } catch (ObsException e) { LOG.warn("Failed to get Object {}", pathKey, e); return false; } }
@Test public void testIsDirectory() throws IOException { Assert.assertTrue(mOBSUnderFileSystem.isDirectory("/")); }
@PostMapping( path = "/admin/change-namespace", consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE ) public ResponseEntity<ResultJson> changeNamespace(@RequestBody ChangeNamespaceJson json) { try { admins.checkAdminUser(); admins.changeNamespace(json); return ResponseEntity.ok(ResultJson.success("Scheduled namespace change from '" + json.oldNamespace + "' to '" + json.newNamespace + "'.\nIt can take 15 minutes to a couple hours for the change to become visible.")); } catch (ErrorResultException exc) { return exc.toResponseEntity(); } }
@Test public void testChangeNamespace() throws Exception { mockAdminUser(); var foo = new Namespace(); foo.setName("foo"); Mockito.when(repositories.findNamespace(foo.getName())).thenReturn(foo); var bar = new Namespace(); bar.setName("bar"); Mockito.when(repositories.findNamespace(bar.getName())).thenReturn(null); var content = "{" + "\"oldNamespace\": \"foo\", " + "\"newNamespace\": \"bar\", " + "\"removeOldNamespace\": false, " + "\"mergeIfNewNamespaceAlreadyExists\": true" + "}"; var json = new ChangeNamespaceJson(); json.oldNamespace = "foo"; json.newNamespace = "bar"; json.removeOldNamespace = false; json.mergeIfNewNamespaceAlreadyExists = true; mockMvc.perform(post("/admin/change-namespace") .with(user("admin_user").authorities(new SimpleGrantedAuthority(("ROLE_ADMIN")))) .with(csrf().asHeader()) .content(content) .contentType(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) .andExpect(content().json(successJson("Scheduled namespace change from 'foo' to 'bar'.\nIt can take 15 minutes to a couple hours for the change to become visible."))) .andExpect(result -> Mockito.verify(scheduler).enqueue(new ChangeNamespaceJobRequest(json))); }
@Override public boolean isDuplicate(Request request, Task task) { boolean isDuplicate = bloomFilter.mightContain(getUrl(request)); if (!isDuplicate) { bloomFilter.put(getUrl(request)); counter.incrementAndGet(); } return isDuplicate; }
@Test public void testRemove() throws Exception { BloomFilterDuplicateRemover bloomFilterDuplicateRemover = new BloomFilterDuplicateRemover(10); boolean isDuplicate = bloomFilterDuplicateRemover.isDuplicate(new Request("a"), null); assertThat(isDuplicate).isFalse(); isDuplicate = bloomFilterDuplicateRemover.isDuplicate(new Request("a"), null); assertThat(isDuplicate).isTrue(); isDuplicate = bloomFilterDuplicateRemover.isDuplicate(new Request("b"), null); assertThat(isDuplicate).isFalse(); isDuplicate = bloomFilterDuplicateRemover.isDuplicate(new Request("b"), null); assertThat(isDuplicate).isTrue(); }
static FinishSpan create(TracingFilter filter, DubboRequest request, Result result, Span span) { if (request instanceof DubboClientRequest) { return new FinishClientSpan( span, result, filter.clientHandler, (DubboClientRequest) request); } return new FinishServerSpan(span, result, filter.serverHandler, (DubboServerRequest) request); }
@Test void create_error_but_null_result_value_DubboClientRequest() { Span span = tracing.tracer().nextSpan().kind(CLIENT).start(); Throwable error = new RuntimeException("melted"); FinishSpan.create(filter, clientRequest, mock(Result.class), span) .accept(null, error); testSpanHandler.takeRemoteSpanWithError(CLIENT, error); }
public static boolean checkEmail(String email) { if (StringUtils.isBlank(email)) { return false; } EmailValidator emailValidator = new EmailValidator(); if (!emailValidator.isValid(email, null)) { return false; } // Email is at least a second-level domain name int indexDomain = email.lastIndexOf("@"); String domainString = email.substring(indexDomain); return domainString.contains("."); }
@Test public void testCheckEmail() { Assertions.assertTrue(CheckUtils.checkEmail("test01@gmail.com")); Assertions.assertFalse(CheckUtils.checkEmail("test01@gmail")); Assertions.assertFalse(CheckUtils.checkEmail("test01@gmail.")); Assertions.assertTrue(CheckUtils.checkEmail("test01@gmail.edu.cn")); }
public Environment getEnvironment() { if (this.environment == null) { String msg = "Environment has not yet been set. This should be done before this broker filter is used."; throw new IllegalStateException(msg); } return environment; }
@Test(expected=IllegalStateException.class) public void testNullEnvironment() { new EnvironmentFilter(){}.getEnvironment(); }
@Override public Collection<LocalDataQueryResultRow> getRows(final ShowStorageUnitsStatement sqlStatement, final ContextManager contextManager) { Collection<LocalDataQueryResultRow> result = new LinkedList<>(); for (Entry<String, StorageUnit> entry : getToBeShownStorageUnits(sqlStatement).entrySet()) { ConnectionProperties connectionProps = entry.getValue().getConnectionProperties(); DataSourcePoolProperties dataSourcePoolProps = getDataSourcePoolProperties(entry.getValue()); Map<String, Object> poolProps = dataSourcePoolProps.getPoolPropertySynonyms().getStandardProperties(); Map<String, Object> customProps = getCustomProperties(dataSourcePoolProps.getCustomProperties().getProperties(), connectionProps.getQueryProperties()); result.add(new LocalDataQueryResultRow(entry.getKey(), entry.getValue().getStorageType().getType(), connectionProps.getHostname(), connectionProps.getPort(), connectionProps.getCatalog(), getStandardProperty(poolProps, "connectionTimeoutMilliseconds"), getStandardProperty(poolProps, "idleTimeoutMilliseconds"), getStandardProperty(poolProps, "maxLifetimeMilliseconds"), getStandardProperty(poolProps, "maxPoolSize"), getStandardProperty(poolProps, "minPoolSize"), getStandardProperty(poolProps, "readOnly"), customProps)); } return result; }
@Test void assertGetRowsWithAllStorageUnits() { Map<Integer, String> nameMap = new HashMap<>(3, 1F); nameMap.put(0, "ds_2"); nameMap.put(1, "ds_1"); nameMap.put(2, "ds_0"); Collection<LocalDataQueryResultRow> actual = executor.getRows(new ShowStorageUnitsStatement(mock(DatabaseSegment.class), null, null), mock(ContextManager.class)); assertThat(actual.size(), is(3)); Iterator<LocalDataQueryResultRow> iterator = actual.iterator(); int index = 0; while (iterator.hasNext()) { LocalDataQueryResultRow row = iterator.next(); assertThat(row.getCell(1), is(nameMap.get(index))); assertThat(row.getCell(2), is("MySQL")); assertThat(row.getCell(3), is("localhost")); assertThat(row.getCell(4), is("3307")); assertThat(row.getCell(5), is(nameMap.get(index))); assertThat(row.getCell(6), is("")); assertThat(row.getCell(7), is("")); assertThat(row.getCell(8), is("")); assertThat(row.getCell(9), is("100")); assertThat(row.getCell(10), is("10")); assertThat(row.getCell(11), is("")); assertThat(row.getCell(12), is("{\"openedConnections\":[],\"closed\":false}")); index++; } }
@VisibleForTesting void validateTemplateParams(MailTemplateDO template, Map<String, Object> templateParams) { template.getParams().forEach(key -> { Object value = templateParams.get(key); if (value == null) { throw exception(MAIL_SEND_TEMPLATE_PARAM_MISS, key); } }); }
@Test public void testValidateTemplateParams_paramMiss() { // 准备参数 MailTemplateDO template = randomPojo(MailTemplateDO.class, o -> o.setParams(Lists.newArrayList("code"))); Map<String, Object> templateParams = new HashMap<>(); // mock 方法 // 调用,并断言异常 assertServiceException(() -> mailSendService.validateTemplateParams(template, templateParams), MAIL_SEND_TEMPLATE_PARAM_MISS, "code"); }
public static String getStackTrace(Thread thread) { return getStackTrace(thread, 3); }
@Test public void testGetStackTrace() { String stackTrace = ReflectionUtils.getStackTrace(Thread.currentThread()); String[] stackTraceLines = stackTrace.split(System.lineSeparator()); // Because the stack trace contains line numbers, and those are implementation specific (i.e. within the // ReflectionUtils or ReflectionUtilsTest classes, or even in the JVM, we can't use a typical assertion // Instead assert the first and last elements look as expected assertThat(stackTraceLines[0]).startsWith("com.hazelcast.jet.impl.util.ReflectionUtilsTest.testGetStackTrace"); assertThat(stackTraceLines[stackTraceLines.length - 1]).startsWith("\tjava.base/java.lang.Thread.run"); }
@Override public ProtobufSystemInfo.Section toProtobuf() { ProtobufSystemInfo.Section.Builder protobuf = ProtobufSystemInfo.Section.newBuilder(); protobuf.setName("Search Indexes"); try { completeIndexAttributes(protobuf); } catch (Exception es) { LoggerFactory.getLogger(EsIndexesSection.class).warn("Failed to retrieve ES attributes. There will be only a single \"Error\" attribute.", es); setAttribute(protobuf, "Error", es.getCause() instanceof ElasticsearchException ? es.getCause().getMessage() : es.getMessage()); } return protobuf.build(); }
@Test public void index_attributes() { ProtobufSystemInfo.Section section = underTest.toProtobuf(); // one index "issues" assertThat(attribute(section, "Index issues - Docs").getLongValue()).isZero(); assertThat(attribute(section, "Index issues - Shards").getLongValue()).isPositive(); assertThat(attribute(section, "Index issues - Store Size").getStringValue()).isNotNull(); }
public RunResponse restartDirectly(WorkflowInstance instance, RunRequest runRequest) { Checks.checkTrue( !runRequest.isFreshRun(), "Cannot restart a workflow instance %s using fresh run policy [%s]", instance.getIdentity(), runRequest.getCurrentPolicy()); if (runRequest.getRestartConfig() != null) { runRequest.validateIdentity(instance); } WorkflowInstance.Status status = instanceDao.getLatestWorkflowInstanceStatus( instance.getWorkflowId(), instance.getWorkflowInstanceId()); if (!status.isTerminal()) { throw new MaestroInvalidStatusException( "Cannot restart workflow instance [%s][%s] as the latest run status is [%s] (non-terminal).", instance.getWorkflowId(), instance.getWorkflowInstanceId(), status); } workflowHelper.updateWorkflowInstance(instance, runRequest); int ret = runStrategyDao.startWithRunStrategy( instance, workflowDao.getRunStrategy(instance.getWorkflowId())); RunResponse runResponse = RunResponse.from(instance, ret); LOG.info("Restart a workflow instance with a response: [{}]", runResponse); return runResponse; }
@Test public void testRestartForeach() { WorkflowInstance wfInstance = new WorkflowInstance(); ForeachInitiator initiator = new ForeachInitiator(); UpstreamInitiator.Info info = new UpstreamInitiator.Info(); info.setWorkflowId("foo"); info.setInstanceId(123L); info.setRunId(2L); info.setStepId("bar"); initiator.setAncestors(Collections.singletonList(info)); wfInstance.setInitiator(initiator); wfInstance.setStatus(WorkflowInstance.Status.SUCCEEDED); wfInstance.setWorkflowInstanceId(10L); wfInstance.setWorkflowRunId(3L); wfInstance.setWorkflowId("test-workflow"); wfInstance.setRuntimeWorkflow(Workflow.builder().build()); RunRequest request = RunRequest.builder() .initiator(new ManualInitiator()) .currentPolicy(RunPolicy.RESTART_FROM_BEGINNING) .restartConfig( RestartConfig.builder().addRestartNode("test-workflow", 10L, null).build()) .build(); when(runStrategyDao.startWithRunStrategy(any(), any())).thenReturn(1); when(instanceDao.getLatestWorkflowInstanceStatus(any(), anyLong())) .thenReturn(WorkflowInstance.Status.SUCCEEDED); doNothing().when(workflowHelper).updateWorkflowInstance(any(), any()); RunResponse response = actionHandler.restartDirectly(wfInstance, request); assertEquals("test-workflow", response.getWorkflowId()); assertEquals(10L, response.getWorkflowInstanceId()); assertEquals(3L, response.getWorkflowRunId()); assertEquals(RunResponse.Status.WORKFLOW_RUN_CREATED, response.getStatus()); verify(workflowHelper, times(1)).updateWorkflowInstance(any(), any()); }
public boolean filterMatchesEntry(String filter, FeedEntry entry) throws FeedEntryFilterException { if (StringUtils.isBlank(filter)) { return true; } Script script; try { script = ENGINE.createScript(filter); } catch (JexlException e) { throw new FeedEntryFilterException("Exception while parsing expression " + filter, e); } JexlContext context = new MapContext(); context.set("title", entry.getContent().getTitle() == null ? "" : Jsoup.parse(entry.getContent().getTitle()).text().toLowerCase()); context.set("author", entry.getContent().getAuthor() == null ? "" : entry.getContent().getAuthor().toLowerCase()); context.set("content", entry.getContent().getContent() == null ? "" : Jsoup.parse(entry.getContent().getContent()).text().toLowerCase()); context.set("url", entry.getUrl() == null ? "" : entry.getUrl().toLowerCase()); context.set("categories", entry.getContent().getCategories() == null ? "" : entry.getContent().getCategories().toLowerCase()); context.set("year", Year.now().getValue()); Callable<Object> callable = script.callable(context); Future<Object> future = executor.submit(callable); Object result; try { result = future.get(config.feedRefresh().filteringExpressionEvaluationTimeout().toMillis(), TimeUnit.MILLISECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new FeedEntryFilterException("interrupted while evaluating expression " + filter, e); } catch (ExecutionException e) { throw new FeedEntryFilterException("Exception while evaluating expression " + filter, e); } catch (TimeoutException e) { throw new FeedEntryFilterException("Took too long evaluating expression " + filter, e); } try { return (boolean) result; } catch (ClassCastException e) { throw new FeedEntryFilterException(e.getMessage(), e); } }
@Test void blankFilterMatchesFilter() throws FeedEntryFilterException { Assertions.assertTrue(service.filterMatchesEntry("", entry)); }
public static RestartBackoffTimeStrategy.Factory createRestartBackoffTimeStrategyFactory( final RestartStrategies.RestartStrategyConfiguration jobRestartStrategyConfiguration, final Configuration jobConfiguration, final Configuration clusterConfiguration, final boolean isCheckpointingEnabled) { checkNotNull(jobRestartStrategyConfiguration); checkNotNull(jobConfiguration); checkNotNull(clusterConfiguration); return getJobRestartStrategyFactory(jobRestartStrategyConfiguration) .orElse( getRestartStrategyFactoryFromConfig(jobConfiguration) .orElse( (getRestartStrategyFactoryFromConfig(clusterConfiguration) .orElse( getDefaultRestartStrategyFactory( isCheckpointingEnabled))))); }
@Test void testExponentialDelayStrategySpecifiedInJobConfig() { final Configuration jobConf = new Configuration(); jobConf.set(RestartStrategyOptions.RESTART_STRATEGY, EXPONENTIAL_DELAY.getMainValue()); final Configuration clusterConf = new Configuration(); clusterConf.set(RestartStrategyOptions.RESTART_STRATEGY, FAILURE_RATE.getMainValue()); final RestartBackoffTimeStrategy.Factory factory = RestartBackoffTimeStrategyFactoryLoader.createRestartBackoffTimeStrategyFactory( DEFAULT_JOB_LEVEL_RESTART_CONFIGURATION, jobConf, clusterConf, false); assertThat(factory) .isInstanceOf( ExponentialDelayRestartBackoffTimeStrategy .ExponentialDelayRestartBackoffTimeStrategyFactory.class); }
static void finish(TracingFilter filter, DubboRequest request, @Nullable Result result, @Nullable Throwable error, Span span) { if (request instanceof RpcClientRequest) { filter.clientHandler.handleReceive( new DubboClientResponse((DubboClientRequest) request, result, error), span); } else { filter.serverHandler.handleSend( new DubboServerResponse((DubboServerRequest) request, result, error), span); } }
@Test void finish_result_but_null_error_DubboClientRequest() { Span span = tracing.tracer().nextSpan().kind(CLIENT).start(); FinishSpan.finish(filter, clientRequest, mock(Result.class), null, span); testSpanHandler.takeRemoteSpan(CLIENT); }
public static boolean validateIsr(int[] replicas, int[] isr) { if (isr.length == 0) return true; if (replicas.length == 0) return false; int[] sortedReplicas = clone(replicas); Arrays.sort(sortedReplicas); int[] sortedIsr = clone(isr); Arrays.sort(sortedIsr); int j = 0; if (sortedIsr[0] < 0) return false; int prevIsr = -1; for (int curIsr : sortedIsr) { if (prevIsr == curIsr) return false; prevIsr = curIsr; while (true) { if (j == sortedReplicas.length) return false; int curReplica = sortedReplicas[j++]; if (curReplica == curIsr) break; } } return true; }
@Test public void testValidateIsr() { assertTrue(Replicas.validateIsr(new int[] {}, new int[] {})); assertTrue(Replicas.validateIsr(new int[] {1, 2, 3}, new int[] {})); assertTrue(Replicas.validateIsr(new int[] {1, 2, 3}, new int[] {1, 2, 3})); assertTrue(Replicas.validateIsr(new int[] {3, 1, 2}, new int[] {2, 1})); assertFalse(Replicas.validateIsr(new int[] {3, 1, 2}, new int[] {4, 1})); assertFalse(Replicas.validateIsr(new int[] {1, 2, 4}, new int[] {4, 4})); }
public static String validateIndexNameIgnoreCase(@Nullable String indexName) { checkDbIdentifier(indexName, "Index name", INDEX_NAME_MAX_SIZE, true); return indexName; }
@Test public void fail_when_index_name_is_an_SQL_reserved_keyword_ignoring_case() { assertThatThrownBy(() -> validateIndexNameIgnoreCase("VALUES")) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Index name must not be an SQL reserved keyword, got 'VALUES'"); assertThatThrownBy(() -> validateIndexNameIgnoreCase("values")) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Index name must not be an SQL reserved keyword, got 'values'"); }
public static MessageWrapper convert(Message message) { MessageWrapper wrapper = new MessageWrapper(); wrapper.setResponseClass(message.getClass()); wrapper.setMessageType(message.getMessageType()); List<Map<String, Object>> messageMapList = new ArrayList<>(); List<Message> messages = message.getMessages(); messages.forEach(each -> { String eachVal = JSONUtil.toJSONString(each); Map mapObj = JSONUtil.parseObject(eachVal, Map.class); messageMapList.add(mapObj); }); wrapper.setContentParams(messageMapList); return wrapper; }
@Test public void testMessageWrapperConvertException() { Assertions.assertThrows(Exception.class, () -> { Map<String, Object> data1 = new HashMap<>(); data1.put("key1", "value1"); data1.put("key2", 123); Map<String, Object> data2 = new HashMap<>(); data2.put("key3", true); data2.put("key4", 3.14); List<Map<String, Object>> contentParams = Arrays.asList(data1, data2); Class responseClass = String.class; MessageTypeEnum messageType = MessageTypeEnum.DEFAULT; MessageWrapper messageWrapper = new MessageWrapper(contentParams, responseClass, messageType); MessageConvert.convert(messageWrapper); }); }
@Override protected void channelRead0(ChannelHandlerContext ctx, HttpRequest msg) throws Exception { CommandContext commandContext = HttpCommandDecoder.decode(msg); // return 404 when fail to construct command context if (commandContext == null) { log.warn(QOS_UNEXPECTED_EXCEPTION, "", "", "can not found commandContext, url: " + msg.uri()); FullHttpResponse response = http(404); ctx.writeAndFlush(response).addListener(ChannelFutureListener.CLOSE); } else { commandContext.setRemote(ctx.channel()); commandContext.setQosConfiguration(qosConfiguration); try { String result = commandExecutor.execute(commandContext); int httpCode = commandContext.getHttpCode(); FullHttpResponse response = http(httpCode, result); ctx.writeAndFlush(response).addListener(ChannelFutureListener.CLOSE); } catch (NoSuchCommandException ex) { log.error(QOS_COMMAND_NOT_FOUND, "", "", "can not find command: " + commandContext, ex); FullHttpResponse response = http(404); ctx.writeAndFlush(response).addListener(ChannelFutureListener.CLOSE); } catch (PermissionDenyException ex) { log.error( QOS_PERMISSION_DENY_EXCEPTION, "", "", "permission deny to access command: " + commandContext, ex); FullHttpResponse response = http(403); ctx.writeAndFlush(response).addListener(ChannelFutureListener.CLOSE); } catch (Exception qosEx) { log.error( QOS_UNEXPECTED_EXCEPTION, "", "", "execute commandContext: " + commandContext + " got exception", qosEx); FullHttpResponse response = http(500, qosEx.getMessage()); ctx.writeAndFlush(response).addListener(ChannelFutureListener.CLOSE); } } }
@Test void test1() throws Exception { ChannelHandlerContext context = mock(ChannelHandlerContext.class); ChannelFuture future = mock(ChannelFuture.class); when(context.writeAndFlush(any(FullHttpResponse.class))).thenReturn(future); HttpRequest message = Mockito.mock(HttpRequest.class); when(message.uri()).thenReturn("test"); HttpProcessHandler handler = new HttpProcessHandler( FrameworkModel.defaultModel(), QosConfiguration.builder().build()); handler.channelRead0(context, message); verify(future).addListener(ChannelFutureListener.CLOSE); ArgumentCaptor<FullHttpResponse> captor = ArgumentCaptor.forClass(FullHttpResponse.class); verify(context).writeAndFlush(captor.capture()); FullHttpResponse response = captor.getValue(); assertThat(response.status().code(), equalTo(404)); }
public List<SuppressionRule> getSuppressionRules() { return suppressionRules; }
@Test public void testHandler() throws Exception { File file = BaseTest.getResourceAsFile(this, "suppressions.xml"); InputStream schemaStream = BaseTest.getResourceAsStream(this, "schema/suppression.xsd"); SuppressionHandler handler = new SuppressionHandler(); SAXParser saxParser = XmlUtils.buildSecureSaxParser(schemaStream); XMLReader xmlReader = saxParser.getXMLReader(); xmlReader.setErrorHandler(new SuppressionErrorHandler()); xmlReader.setContentHandler(handler); InputStream inputStream = new FileInputStream(file); Reader reader = new InputStreamReader(inputStream, StandardCharsets.UTF_8); InputSource in = new InputSource(reader); //in.setEncoding(StandardCharsets.UTF_8); xmlReader.parse(in); List<SuppressionRule> result = handler.getSuppressionRules(); assertTrue(result.size() > 3); int baseCount = 0; for (SuppressionRule r : result) { if (r.isBase()) { baseCount++; } } assertTrue(baseCount > 0); }
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) { return api.send(request); }
@Test public void promoteChatMember() { BaseResponse response = bot.execute( new PromoteChatMember(groupId, memberBot) .isAnonymous(false) .canChangeInfo(false) .canPostMessages(false) .canEditMessages(false) .canDeleteMessages(false) .canInviteUsers(false) .canRestrictMembers(false) .canPinMessages(false) .canManageChat(false) .canManageVoiceChats(false) .canManageVideoChats(false) .canManageTopics(false) .canPostStories(false) .canEditStories(false) .canDeleteStories(false) .canPromoteMembers(true)); assertTrue(response.isOk()); }
@Override public float floatValue() { return value; }
@Test public void testFloatValue() { assertEquals(100f, MilliPct.ofMilliPct(100).floatValue(), 0.0001f); assertEquals(-100f, MilliPct.ofMilliPct(-100).floatValue(), 0.0001f); }
@Benchmark @Threads(16) // Use several threads since we expect contention during bundle processing. public void testTinyBundle(TrivialTransform trivialTransform) throws Exception { Map<String, ? super Coder<WindowedValue<?>>> remoteOutputCoders = trivialTransform.descriptor.getRemoteOutputCoders(); Map<String, RemoteOutputReceiver<?>> outputReceivers = new HashMap<>(); AtomicInteger outputValuesCount = new AtomicInteger(); for (Entry<String, ? super Coder<WindowedValue<?>>> remoteOutputCoder : remoteOutputCoders.entrySet()) { outputReceivers.put( remoteOutputCoder.getKey(), RemoteOutputReceiver.of( (Coder) remoteOutputCoder.getValue(), (FnDataReceiver<? super WindowedValue<?>>) (WindowedValue<?> value) -> outputValuesCount.incrementAndGet())); } try (RemoteBundle bundle = trivialTransform.processor.newBundle(outputReceivers, BundleProgressHandler.ignored())) { Iterables.getOnlyElement(bundle.getInputReceivers().values()) .accept(valueInGlobalWindow(new byte[0])); } assertEquals(3, outputValuesCount.getAndSet(0)); }
@Test public void testTinyBundle() throws Exception { TrivialTransform transform = new TrivialTransform(); transform.elementsEmbedding = elementsEmbedding; new ProcessBundleBenchmark().testTinyBundle(transform); transform.tearDown(); }
@Override public Health check(Set<NodeHealth> nodeHealths) { ClusterHealthResponse esClusterHealth = this.getEsClusterHealth(); if (esClusterHealth != null) { Health minimumNodes = checkMinimumNodes(esClusterHealth); Health clusterStatus = extractStatusHealth(esClusterHealth); return HealthReducer.merge(minimumNodes, clusterStatus); } return RED_HEALTH_UNAVAILABLE; }
@Test public void check_ignores_NodeHealth_arg_and_returns_RED_with_cause_if_an_exception_occurs_checking_ES_cluster_status() { Set<NodeHealth> nodeHealths = ImmutableSet.of(newNodeHealth(NodeHealth.Status.GREEN)); when(esClient.clusterHealth(any())).thenThrow(new RuntimeException("Faking an exception occurring while using the EsClient")); Health health = new EsStatusClusterCheck(esClient).check(nodeHealths); assertThat(health.getStatus()).isEqualTo(Health.Status.RED); assertThat(health.getCauses()).containsOnly("Elasticsearch status is RED (unavailable)"); }
public static String toJson(MetadataUpdate metadataUpdate) { return toJson(metadataUpdate, false); }
@Test public void testSetDefaultPartitionSpecToJson() { String action = MetadataUpdateParser.SET_DEFAULT_PARTITION_SPEC; int specId = 4; String expected = String.format("{\"action\":\"%s\",\"spec-id\":%d}", action, specId); MetadataUpdate update = new MetadataUpdate.SetDefaultPartitionSpec(specId); String actual = MetadataUpdateParser.toJson(update); assertThat(actual) .as("Set default partition spec should serialize to the correct JSON value") .isEqualTo(expected); }
public boolean containsShardingTable(final Collection<String> logicTableNames) { for (String each : logicTableNames) { if (isShardingTable(each)) { return true; } } return false; }
@Test void assertNotContainsShardingTable() { assertFalse(createMinimumShardingRule().containsShardingTable(Collections.singleton("table_0"))); }
@Override public void resetConfigStats(RedisClusterNode node) { RedisClient entry = getEntry(node); RFuture<Void> f = executorService.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.CONFIG_RESETSTAT); syncFuture(f); }
@Test public void testResetConfigStats() { testInCluster(connection -> { RedisClusterNode master = getFirstMaster(connection); connection.resetConfigStats(master); }); }
private static boolean canSatisfyConstraints(ApplicationId appId, PlacementConstraint constraint, SchedulerNode node, AllocationTagsManager atm, Optional<DiagnosticsCollector> dcOpt) throws InvalidAllocationTagsQueryException { if (constraint == null) { LOG.debug("Constraint is found empty during constraint validation for" + " app:{}", appId); return true; } // If this is a single constraint, transform to SingleConstraint SingleConstraintTransformer singleTransformer = new SingleConstraintTransformer(constraint); constraint = singleTransformer.transform(); AbstractConstraint sConstraintExpr = constraint.getConstraintExpr(); // TODO handle other type of constraints, e.g CompositeConstraint if (sConstraintExpr instanceof SingleConstraint) { SingleConstraint single = (SingleConstraint) sConstraintExpr; return canSatisfySingleConstraint(appId, single, node, atm, dcOpt); } else if (sConstraintExpr instanceof And) { And and = (And) sConstraintExpr; return canSatisfyAndConstraint(appId, and, node, atm, dcOpt); } else if (sConstraintExpr instanceof Or) { Or or = (Or) sConstraintExpr; return canSatisfyOrConstraint(appId, or, node, atm, dcOpt); } else { throw new InvalidAllocationTagsQueryException( "Unsupported type of constraint: " + sConstraintExpr.getClass().getSimpleName()); } }
@Test public void testGlobalAppConstraints() throws InvalidAllocationTagsQueryException { AllocationTagsManager tm = new AllocationTagsManager(rmContext); PlacementConstraintManagerService pcm = new MemoryPlacementConstraintManager(); rmContext.setAllocationTagsManager(tm); rmContext.setPlacementConstraintManager(pcm); long ts = System.currentTimeMillis(); ApplicationId application1 = BuilderUtils.newApplicationId(ts, 100); ApplicationId application2 = BuilderUtils.newApplicationId(ts, 101); ApplicationId application3 = BuilderUtils.newApplicationId(ts, 102); // Register App1 with anti-affinity constraint map. RMNode n0r1 = rmNodes.get(0); RMNode n1r1 = rmNodes.get(1); RMNode n2r2 = rmNodes.get(2); RMNode n3r2 = rmNodes.get(3); /** * Place container: * n0: app1/A(1), app2/A(1) * n1: app3/A(3) * n2: app1/A(2) * n3: "" */ tm.addContainer(n0r1.getNodeID(), newContainerId(application1, 0), ImmutableSet.of("A")); tm.addContainer(n0r1.getNodeID(), newContainerId(application2, 1), ImmutableSet.of("A")); tm.addContainer(n1r1.getNodeID(), newContainerId(application3, 2), ImmutableSet.of("A")); tm.addContainer(n1r1.getNodeID(), newContainerId(application3, 3), ImmutableSet.of("A")); tm.addContainer(n1r1.getNodeID(), newContainerId(application3, 4), ImmutableSet.of("A")); tm.addContainer(n2r2.getNodeID(), newContainerId(application1, 5), ImmutableSet.of("A")); tm.addContainer(n2r2.getNodeID(), newContainerId(application1, 6), ImmutableSet.of("A")); SchedulerNode schedulerNode0 = newSchedulerNode(n0r1.getHostName(), n0r1.getRackName(), n0r1.getNodeID()); SchedulerNode schedulerNode1 = newSchedulerNode(n1r1.getHostName(), n1r1.getRackName(), n1r1.getNodeID()); SchedulerNode schedulerNode2 = newSchedulerNode(n2r2.getHostName(), n2r2.getRackName(), n2r2.getNodeID()); SchedulerNode schedulerNode3 = newSchedulerNode(n3r2.getHostName(), n3r2.getRackName(), n3r2.getNodeID()); TargetApplicationsNamespace namespaceAll = new TargetApplicationsNamespace.All(); //*************************** // 1) all, anti-affinity //*************************** // Anti-affinity with "A" from any application including itself. PlacementConstraint constraint1 = PlacementConstraints.targetNotIn( NODE, allocationTagWithNamespace(namespaceAll.toString(), "A")) .build(); Map<Set<String>, PlacementConstraint> constraintMap = new HashMap<>(); Set<String> srcTags1 = ImmutableSet.of("A"); constraintMap.put(srcTags1, constraint1); pcm.registerApplication(application1, constraintMap); Assert.assertFalse(PlacementConstraintsUtil.canSatisfyConstraints( application1, createSchedulingRequest(srcTags1), schedulerNode0, pcm, tm)); Assert.assertFalse(PlacementConstraintsUtil.canSatisfyConstraints( application1, createSchedulingRequest(srcTags1), schedulerNode1, pcm, tm)); Assert.assertFalse(PlacementConstraintsUtil.canSatisfyConstraints( application1, createSchedulingRequest(srcTags1), schedulerNode2, pcm, tm)); Assert.assertTrue(PlacementConstraintsUtil.canSatisfyConstraints( application1, createSchedulingRequest(srcTags1), schedulerNode3, pcm, tm)); pcm.unregisterApplication(application1); //*************************** // 2) all, max cardinality //*************************** PlacementConstraint constraint2 = PlacementConstraints .maxCardinality(NODE, namespaceAll.toString(), 2, "A") .build(); constraintMap.clear(); Set<String> srcTags2 = ImmutableSet.of("foo"); constraintMap.put(srcTags2, constraint2); pcm.registerApplication(application2, constraintMap); Assert.assertTrue(PlacementConstraintsUtil.canSatisfyConstraints( application2, createSchedulingRequest(srcTags2), schedulerNode0, pcm, tm)); Assert.assertFalse(PlacementConstraintsUtil.canSatisfyConstraints( application2, createSchedulingRequest(srcTags2), schedulerNode1, pcm, tm)); Assert.assertTrue(PlacementConstraintsUtil.canSatisfyConstraints( application2, createSchedulingRequest(srcTags2), schedulerNode2, pcm, tm)); Assert.assertTrue(PlacementConstraintsUtil.canSatisfyConstraints( application2, createSchedulingRequest(srcTags2), schedulerNode3, pcm, tm)); pcm.unregisterApplication(application2); //*************************** // 3) all, min cardinality //*************************** PlacementConstraint constraint3 = PlacementConstraints .minCardinality(NODE, namespaceAll.toString(), 3, "A") .build(); constraintMap.clear(); Set<String> srcTags3 = ImmutableSet.of("foo"); constraintMap.put(srcTags3, constraint3); pcm.registerApplication(application3, constraintMap); Assert.assertFalse(PlacementConstraintsUtil.canSatisfyConstraints( application3, createSchedulingRequest(srcTags3), schedulerNode0, pcm, tm)); Assert.assertTrue(PlacementConstraintsUtil.canSatisfyConstraints( application3, createSchedulingRequest(srcTags3), schedulerNode1, pcm, tm)); Assert.assertFalse(PlacementConstraintsUtil.canSatisfyConstraints( application3, createSchedulingRequest(srcTags3), schedulerNode2, pcm, tm)); Assert.assertFalse(PlacementConstraintsUtil.canSatisfyConstraints( application3, createSchedulingRequest(srcTags3), schedulerNode3, pcm, tm)); pcm.unregisterApplication(application3); }
public static boolean matches(MetricsFilter filter, MetricKey key) { if (filter == null) { return true; } @Nullable String stepName = key.stepName(); if (stepName == null) { if (!filter.steps().isEmpty()) { // The filter specifies steps, but the metric is not associated with a step. return false; } } else if (!matchesScope(stepName, filter.steps())) { // The filter specifies steps that differ from the metric's step return false; } // The filter's steps match the metric's step. return matchesName(key.metricName(), filter.names()); }
@Test public void testMatchClassNamespaceFilters() { // MetricsFilter with a Class-namespace + name filter. Without step filter. // Successful match. assertTrue( MetricFiltering.matches( MetricsFilter.builder() .addNameFilter(MetricNameFilter.named(MetricFilteringTest.class, "myMetricName")) .build(), MetricKey.create( "anyStep", MetricName.named(MetricFilteringTest.class, "myMetricName")))); // Unsuccessful match. assertFalse( MetricFiltering.matches( MetricsFilter.builder() .addNameFilter(MetricNameFilter.named(MetricFilteringTest.class, "myMetricName")) .build(), MetricKey.create("anyStep", MetricName.named(MetricFiltering.class, "myMetricName")))); }
@Override protected MemberData memberData(Subscription subscription) { // In ConsumerProtocolSubscription v2 or higher, we can take member data from fields directly if (subscription.generationId().isPresent()) { return new MemberData(subscription.ownedPartitions(), subscription.generationId()); } ByteBuffer buffer = subscription.userData(); Optional<Integer> encodedGeneration; if (buffer == null) { encodedGeneration = Optional.empty(); } else { try { Struct struct = COOPERATIVE_STICKY_ASSIGNOR_USER_DATA_V0.read(buffer); encodedGeneration = Optional.of(struct.getInt(GENERATION_KEY_NAME)); } catch (Exception e) { encodedGeneration = Optional.of(DEFAULT_GENERATION); } } return new MemberData(subscription.ownedPartitions(), encodedGeneration, subscription.rackId()); }
@Test public void testMemberDataWithEmptyPartitionsAndHigherGeneration() { List<String> topics = topics(topic); List<TopicPartition> ownedPartitions = partitions(tp(topic1, 0), tp(topic2, 1)); // subscription containing empty owned partitions and a higher generation id, and non-empty owned partition in user data, // member data should honor the one in subscription since generation id is higher Subscription subscription = new Subscription(topics, generateUserData(topics, ownedPartitions, generationId - 1), Collections.emptyList(), generationId, Optional.empty()); AbstractStickyAssignor.MemberData memberData = memberData(subscription); assertEquals(Collections.emptyList(), memberData.partitions, "subscription: " + subscription + " doesn't have expected owned partition"); assertEquals(generationId, memberData.generation.orElse(-1), "subscription: " + subscription + " doesn't have expected generation id"); }
public abstract int status(HttpServletResponse response);
@Test void servlet25_status_zeroOnException_cached() { servlet25.status(new ExceptionResponse()); assertThat(servlet25.status(new ExceptionResponse())) .isZero(); }
public WorkProcessor<Page> merge(List<Type> keyTypes, List<Type> allTypes, List<WorkProcessor<Page>> pages, DriverYieldSignal driverYieldSignal) { return merge(keyTypes, null, allTypes, pages, driverYieldSignal); }
@Test public void testBinaryMergeIteratorOverEmptyPageAndNonEmptyPage() { Page emptyPage = new Page(0, BIGINT.createFixedSizeBlockBuilder(0).build()); Page page = rowPagesBuilder(BIGINT).row(42).build().get(0); WorkProcessor<Page> mergedPage = new MergeHashSort(newSimpleAggregatedMemoryContext()).merge( ImmutableList.of(BIGINT), ImmutableList.of(BIGINT), ImmutableList.of(ImmutableList.of(emptyPage, page).iterator()).stream() .map(WorkProcessor::fromIterator) .collect(toImmutableList()), new DriverYieldSignal()); assertTrue(mergedPage.process()); Page actualPage = mergedPage.getResult(); assertEquals(actualPage.getPositionCount(), 1); assertEquals(actualPage.getChannelCount(), 1); assertEquals(actualPage.getBlock(0).getLong(0), 42); assertFinishes(mergedPage); }
public boolean offer(Serializable event) { if (queue == null) { throw new IllegalStateException("client has no event queue"); } return queue.offer(event); }
@Test public void testOfferEventAndRun() throws Exception { client.offer(TEST_EVENT); Thread thread = new Thread(client); thread.start(); // MockEventQueue will interrupt the thread when the queue is drained thread.join(1000); assertFalse(thread.isAlive()); ObjectInputStream ois = new ObjectInputStream( new ByteArrayInputStream(outputStream.toByteArray())); assertEquals(TEST_EVENT, ois.readObject()); }
@Override public Map<String, StepTransition> translate(WorkflowInstance workflowInstance) { WorkflowInstance instance = objectMapper.convertValue(workflowInstance, WorkflowInstance.class); if (instance.getRunConfig() != null) { if (instance.getRunConfig().getPolicy() == RunPolicy.RESTART_FROM_INCOMPLETE || instance.getRunConfig().getPolicy() == RunPolicy.RESTART_FROM_SPECIFIC) { Map<String, StepInstance.Status> statusMap = instance.getAggregatedInfo().getStepAggregatedViews().entrySet().stream() .collect( Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().getStatus())); if (!statusMap.isEmpty()) { instance .getRunConfig() .setStartStepIds( statusMap.entrySet().stream() .filter( entry -> !entry.getValue().isComplete() && (entry.getValue().isTerminal() || entry.getValue() == StepInstance.Status.NOT_CREATED)) .map(Map.Entry::getKey) .collect(Collectors.toList())); } // handle the special case of restarting from a completed step if (instance.getRunConfig().getPolicy() == RunPolicy.RESTART_FROM_SPECIFIC) { String restartStepId = RunRequest.getCurrentNode(instance.getRunConfig().getRestartConfig()).getStepId(); if (!instance.getRunConfig().getStartStepIds().contains(restartStepId)) { instance.getRunConfig().getStartStepIds().add(restartStepId); } } } else { if (workflowInstance.getRunConfig().getStartStepIds() != null) { instance .getRunConfig() .setStartStepIds(new ArrayList<>(workflowInstance.getRunConfig().getStartStepIds())); } if (workflowInstance.getRunConfig().getEndStepIds() != null) { instance .getRunConfig() .setEndStepIds(new ArrayList<>(workflowInstance.getRunConfig().getEndStepIds())); } } } List<String> startStepIds = instance.getRunConfig() != null && instance.getRunConfig().getStartStepIds() != null ? instance.getRunConfig().getStartStepIds() : null; List<String> endStepIds = instance.getRunConfig() != null && instance.getRunConfig().getEndStepIds() != null ? instance.getRunConfig().getEndStepIds() : null; return WorkflowGraph.computeDag(instance.getRuntimeWorkflow(), startStepIds, endStepIds); }
@Test public void testTranslateForRestartFromSpecificWithTwoBranches() { instance.getRuntimeWorkflow().getSteps().get(2).getTransition().getSuccessors().remove("job.2"); instance .getAggregatedInfo() .getStepAggregatedViews() .put("job.2", StepAggregatedView.builder().status(StepInstance.Status.STOPPED).build()); instance.getRunConfig().setPolicy(RunPolicy.RESTART_FROM_SPECIFIC); instance .getRunConfig() .setRestartConfig( RestartConfig.builder() .addRestartNode("sample-dag-test-3", 1, "job3") .restartPolicy(RunPolicy.RESTART_FROM_SPECIFIC) .build()); Map<String, StepTransition> dag = translator.translate(instance); Assert.assertEquals(new HashSet<>(Arrays.asList("job.2", "job3", "job4")), dag.keySet()); StepTransition jobTransition = new StepTransition(); jobTransition.setSuccessors(Collections.singletonMap("job4", "true")); Assert.assertEquals(jobTransition, dag.get("job.2")); jobTransition.setPredecessors(Collections.emptyList()); jobTransition.setSuccessors(Collections.singletonMap("job4", "true")); Assert.assertEquals(jobTransition, dag.get("job3")); jobTransition.setPredecessors(Arrays.asList("job3", "job.2")); jobTransition.setSuccessors(Collections.emptyMap()); Assert.assertEquals(jobTransition, dag.get("job4")); }
public static Object replace(Object root, DataIterator it, Object value) { return transform(root, it, Transforms.constantValue(value)); }
@Test public void testReplaceByNameNested() throws Exception { SimpleTestData data = IteratorTestData.createSimpleTestData(); Builder.create(data.getDataElement(), IterationOrder.PRE_ORDER) .filterBy(Predicates.nameEquals("foo")) .replace(new DataList()); assertEquals(Builder.create(data.getDataElement(), IterationOrder.PRE_ORDER) .filterBy(Predicates.pathMatchesPathSpec(new PathSpec("nested", "nested", "foo", PathSpec.WILDCARD))) .count(), 0); assertEquals(Builder.create(data.getDataElement(), IterationOrder.PRE_ORDER) .filterBy(Predicates.pathMatchesPathSpec(new PathSpec("foo", PathSpec.WILDCARD))) .count(), 0); }
public static List<TriStateSelection> forAgentsResources(Set<ResourceConfig> resourceConfigs, Agents agents) { return convert(resourceConfigs, agents, new Assigner<>() { @Override public boolean shouldAssociate(Agent agent, ResourceConfig resourceConfig) { return agent.getResourcesAsList().contains(resourceConfig.getName()); } @Override public String identifier(ResourceConfig resourceConfig) { return resourceConfig.getName(); } @Override public boolean shouldEnable(Agent agent, ResourceConfig resourceConfig) { return true; } }); }
@Test public void shouldHaveActionAddIfAllAgentsHaveThatResource() { resourceConfigs.add(new ResourceConfig("all")); agents.add(new Agent("uuid1", "host1", "127.0.0.1", List.of("all"))); agents.add(new Agent("uuid2", "host2", "127.0.0.2", List.of("all"))); List<TriStateSelection> selections = TriStateSelection.forAgentsResources(resourceConfigs, agents); assertThat(selections, hasItem(new TriStateSelection("all", TriStateSelection.Action.add))); }
public File getUniqueFilenameForClass(String className) throws IOException { //class names should be passed in the normal dalvik style, with a leading L, a trailing ;, and using //'/' as a separator. if (className.charAt(0) != 'L' || className.charAt(className.length()-1) != ';') { throw new RuntimeException("Not a valid dalvik class name"); } int packageElementCount = 1; for (int i=1; i<className.length()-1; i++) { if (className.charAt(i) == '/') { packageElementCount++; } } String[] packageElements = new String[packageElementCount]; int elementIndex = 0; int elementStart = 1; for (int i=1; i<className.length()-1; i++) { if (className.charAt(i) == '/') { //if the first char after the initial L is a '/', or if there are //two consecutive '/' if (i-elementStart==0) { throw new RuntimeException("Not a valid dalvik class name"); } packageElements[elementIndex++] = className.substring(elementStart, i); elementStart = ++i; } } //at this point, we have added all the package elements to packageElements, but still need to add //the final class name. elementStart should point to the beginning of the class name //this will be true if the class ends in a '/', i.e. Lsome/package/className/; if (elementStart >= className.length()-1) { throw new RuntimeException("Not a valid dalvik class name"); } packageElements[elementIndex] = className.substring(elementStart, className.length()-1); return addUniqueChild(top, packageElements, 0); }
@Test public void testCaseSensitiveFilesystem() throws IOException { File tempDir = Files.createTempDir().getCanonicalFile(); if (!PathUtil.testCaseSensitivity(tempDir)) { // Test can only be performed on case sensitive systems return; } ClassFileNameHandler handler = new ClassFileNameHandler(tempDir, ".smali", true, false); File file = handler.getUniqueFilenameForClass("La/b/c;"); checkFilename(tempDir, file, "a", "b", "c.smali"); file = handler.getUniqueFilenameForClass("La/b/C;"); checkFilename(tempDir, file, "a", "b", "C.smali"); file = handler.getUniqueFilenameForClass("La/B/c;"); checkFilename(tempDir, file, "a", "B", "c.smali"); }
@Override public void writeRecord(Tuple2<K, V> record) throws IOException { try { this.recordWriter.write(record.f0, record.f1); } catch (InterruptedException e) { throw new IOException("Could not write Record.", e); } }
@Test void testWriteRecord() throws Exception { RecordWriter<String, Long> recordWriter = mock(DummyRecordWriter.class); HadoopOutputFormat<String, Long> hadoopOutputFormat = setupHadoopOutputFormat( new DummyOutputFormat(), Job.getInstance(), recordWriter, null, new Configuration()); hadoopOutputFormat.writeRecord(new Tuple2<String, Long>()); verify(recordWriter, times(1)).write(nullable(String.class), nullable(Long.class)); }
@Override public void createSecurityGroup(SecurityGroup sg) { checkNotNull(sg, ERR_NULL_SG); checkArgument(!Strings.isNullOrEmpty(sg.getId()), ERR_NULL_SG_ID); osSecurityGroupStore.createSecurityGroup(sg); log.info(String.format(MSG_SG, sg.getId(), MSG_CREATED)); }
@Test(expected = NullPointerException.class) public void testCreateNullSecurityGroup() { target.createSecurityGroup(null); }
public Result runExtractor(String value) { final Matcher matcher = pattern.matcher(value); final boolean found = matcher.find(); if (!found) { return null; } final int start = matcher.groupCount() > 0 ? matcher.start(1) : -1; final int end = matcher.groupCount() > 0 ? matcher.end(1) : -1; final String s; try { s = replaceAll ? matcher.replaceAll(replacement) : matcher.replaceFirst(replacement); } catch (Exception e) { throw new RuntimeException("Error while trying to replace string", e); } return new Result(s, start, end); }
@Test public void testReplacementWithReplaceAll() throws Exception { final Message message = messageFactory.createMessage("Foobar 123 Foobaz 456", "source", Tools.nowUTC()); final RegexReplaceExtractor extractor = new RegexReplaceExtractor( metricRegistry, "id", "title", 0L, Extractor.CursorStrategy.COPY, "message", "message", ImmutableMap.<String, Object>of("regex", "(\\w+) (\\d+)", "replacement", "$2/$1", "replace_all", true), "user", Collections.<Converter>emptyList(), Extractor.ConditionType.NONE, null); extractor.runExtractor(message); assertThat(message.getMessage()).isEqualTo("123/Foobar 456/Foobaz"); }
@Override public ProductSpuDO getSpu(Long id) { return productSpuMapper.selectById(id); }
@Test void getSpu() { // 准备参数 ProductSpuDO createReqVO = randomPojo(ProductSpuDO.class,o->{ o.setCategoryId(generateId()); o.setBrandId(generateId()); o.setDeliveryTemplateId(generateId()); o.setSort(RandomUtil.randomInt(1,100)); // 限制排序范围 o.setGiveIntegral(generaInt()); // 限制范围为正整数 o.setVirtualSalesCount(generaInt()); // 限制范围为正整数 o.setPrice(generaInt()); // 限制范围为正整数 o.setMarketPrice(generaInt()); // 限制范围为正整数 o.setCostPrice(generaInt()); // 限制范围为正整数 o.setStock(generaInt()); // 限制范围为正整数 o.setGiveIntegral(generaInt()); // 限制范围为正整数 o.setSalesCount(generaInt()); // 限制范围为正整数 o.setBrowseCount(generaInt()); // 限制范围为正整数 }); productSpuMapper.insert(createReqVO); ProductSpuDO spu = productSpuService.getSpu(createReqVO.getId()); assertPojoEquals(createReqVO, spu); }
String storeSink() { return storeSink; }
@Test public void testStoreSinkIsEscaped() { Queries queries = new Queries(mappingEscape, idColumnEscape, columnMetadataEscape); String result = queries.storeSink(); assertEquals("SINK INTO \"my\"\"mapping\" (\"i\"\"d\", \"na\"\"me\", \"add\"\"ress\") VALUES (?, ?, ?)", result); }
@Override @Cacheable(cacheNames = RedisKeyConstants.SMS_TEMPLATE, key = "#code", unless = "#result == null") public SmsTemplateDO getSmsTemplateByCodeFromCache(String code) { return smsTemplateMapper.selectByCode(code); }
@Test public void testGetSmsTemplateByCodeFromCache() { // mock 数据 SmsTemplateDO dbSmsTemplate = randomSmsTemplateDO(); smsTemplateMapper.insert(dbSmsTemplate);// @Sql: 先插入出一条存在的数据 // 准备参数 String code = dbSmsTemplate.getCode(); // 调用 SmsTemplateDO smsTemplate = smsTemplateService.getSmsTemplateByCodeFromCache(code); // 校验 assertPojoEquals(dbSmsTemplate, smsTemplate); }
@Override public Map<String, List<TopicPartition>> assign(Map<String, Integer> partitionsPerTopic, Map<String, Subscription> subscriptions) { Map<String, List<TopicPartition>> assignment = new HashMap<>(); List<MemberInfo> memberInfoList = new ArrayList<>(); for (Map.Entry<String, Subscription> memberSubscription : subscriptions.entrySet()) { assignment.put(memberSubscription.getKey(), new ArrayList<>()); memberInfoList.add(new MemberInfo(memberSubscription.getKey(), memberSubscription.getValue().groupInstanceId())); } CircularIterator<MemberInfo> assigner = new CircularIterator<>(Utils.sorted(memberInfoList)); for (TopicPartition partition : allPartitionsSorted(partitionsPerTopic, subscriptions)) { final String topic = partition.topic(); while (!subscriptions.get(assigner.peek().memberId).topics().contains(topic)) assigner.next(); assignment.get(assigner.next().memberId).add(partition); } return assignment; }
@Test public void testTwoDynamicConsumersTwoTopicsSixPartitions() { String topic1 = "topic1"; String topic2 = "topic2"; String consumer1 = "consumer1"; String consumer2 = "consumer2"; Map<String, Integer> partitionsPerTopic = setupPartitionsPerTopicWithTwoTopics(3, 3); Map<String, Subscription> consumers = new HashMap<>(); consumers.put(consumer1, new Subscription(topics(topic1, topic2))); consumers.put(consumer2, new Subscription(topics(topic1, topic2))); Map<String, List<TopicPartition>> assignment = assignor.assign(partitionsPerTopic, consumers); assertEquals(partitions(tp(topic1, 0), tp(topic1, 2), tp(topic2, 1)), assignment.get(consumer1)); assertEquals(partitions(tp(topic1, 1), tp(topic2, 0), tp(topic2, 2)), assignment.get(consumer2)); }
public void cleanupLeakedQueries(final List<PersistentQueryMetadata> persistentQueries) { final Set<String> stateStoreNames = persistentQueries .stream() .flatMap(s -> { final List<String> doNotDelete = new ArrayList<>( Collections.singletonList(s.getQueryApplicationId())); if (s instanceof BinPackedPersistentQueryMetadataImpl) { doNotDelete.add(s.getQueryApplicationId() + "/__" + s.getQueryId().toString() + "__"); } return doNotDelete.stream(); }) .collect(Collectors.toSet()); final String[] stateDirFileNames = new File(stateDir).list(); if (stateDirFileNames == null) { LOG.info("No state stores to clean up"); } else { final Set<String> allStateStores = Arrays.stream(stateDirFileNames) .flatMap(f -> { final String[] fileNames = new File(stateDir + "/" + f).list(); if (null == fileNames) { return Stream.of(f); } else if (Arrays.stream(fileNames).anyMatch(t -> t.matches("__*__"))) { return Arrays.stream(fileNames) .filter(t -> t.matches("__*__")) .map(s -> f + "/" + s); } else { return Stream.of(f); } }) .collect(Collectors.toSet()); allStateStores.removeAll(stateStoreNames); allStateStores.forEach((storeName) -> queryCleanupService.addCleanupTask( new QueryCleanupService.QueryCleanupTask( serviceContext, storeName.split("/")[0], 1 < storeName.split("__").length ? Optional.of(storeName.split("__")[1]) : Optional.empty(), false, stateDir, ksqlConfig.getString(KsqlConfig.KSQL_SERVICE_ID_CONFIG), ksqlConfig.getString(KsqlConfig.KSQL_PERSISTENT_QUERY_NAME_PREFIX_CONFIG)))); } }
@Test public void shouldNotDeleteSharedRuntimesWhenTheyHaveAQuery() { // Given: when(binPackedPersistentQueryMetadata.getQueryApplicationId()).thenReturn("testQueryID"); File fakeStateStore = new File(tempFile.getAbsolutePath() + "testQueryID"); if (!fakeStateStore.exists()) { assertTrue(fakeStateStore.mkdirs()); } // When: cleanup.cleanupLeakedQueries(ImmutableList.of(binPackedPersistentQueryMetadata)); awaitCleanupComplete(); // Then: assertTrue(fakeStateStore.exists()); assertTrue(tempFile.exists()); }
public static ImmutableSet<HttpUrl> allSubPaths(String url) { return allSubPaths(HttpUrl.parse(url)); }
@Test public void allSubPaths_whenValidQueryParamsAndFragments_removesParamsAndFragments() { assertThat(allSubPaths("http://localhost/?param=value&param2=value2#abc")) .containsExactly(HttpUrl.parse("http://localhost/")); }
@Override public boolean replace(String key, V oldValue, V newValue) { return Map.super.replace(key.toLowerCase(), oldValue, newValue); }
@Test void testReplace() { Map<String, Object> map = new LowerCaseLinkHashMap<>(lowerCaseLinkHashMap); Object replace = map.replace("key", "replace"); Assertions.assertEquals("Value", replace); Assertions.assertEquals("replace",map.get("key")); boolean result = map.replace("key2", "value2", "replace"); Assertions.assertFalse(result); Assertions.assertEquals("Value2", map.get("key2")); result = map.replace("key2", "Value2", "replace"); Assertions.assertTrue(result); Assertions.assertEquals("replace", map.get("key2")); }
public int available() { if (position < end) { return end - position; } else { return 0; } }
@Test void testAvailable() throws Exception { byte[] bytes; DataInputDeserializer dis; bytes = new byte[] {}; dis = new DataInputDeserializer(bytes, 0, bytes.length); assertThat(dis.available()).isEqualTo(bytes.length); bytes = new byte[] {1, 2, 3}; dis = new DataInputDeserializer(bytes, 0, bytes.length); assertThat(dis.available()).isEqualTo(bytes.length); dis.readByte(); assertThat(dis.available()).isEqualTo(2); dis.readByte(); assertThat(dis.available()).isOne(); dis.readByte(); assertThat(dis.available()).isZero(); assertThatThrownBy(dis::readByte).isInstanceOf(IOException.class); assertThat(dis.available()).isZero(); }
public static Predicate parse(String expression) { final Stack<Predicate> predicateStack = new Stack<>(); final Stack<Character> operatorStack = new Stack<>(); final String trimmedExpression = TRIMMER_PATTERN.matcher(expression).replaceAll(""); final StringTokenizer tokenizer = new StringTokenizer(trimmedExpression, OPERATORS, true); boolean isTokenMode = true; while (true) { final Character operator; final String token; if (isTokenMode) { if (tokenizer.hasMoreTokens()) { token = tokenizer.nextToken(); } else { break; } if (OPERATORS.contains(token)) { operator = token.charAt(0); } else { operator = null; } } else { operator = operatorStack.pop(); token = null; } isTokenMode = true; if (operator == null) { try { predicateStack.push(Class.forName(token).asSubclass(Predicate.class).getDeclaredConstructor().newInstance()); } catch (ClassCastException e) { throw new RuntimeException(token + " must implement " + Predicate.class.getName(), e); } catch (Exception e) { throw new RuntimeException(e); } } else { if (operatorStack.empty() || operator == '(') { operatorStack.push(operator); } else if (operator == ')') { while (operatorStack.peek() != '(') { evaluate(predicateStack, operatorStack); } operatorStack.pop(); } else { if (OPERATOR_PRECEDENCE.get(operator) < OPERATOR_PRECEDENCE.get(operatorStack.peek())) { evaluate(predicateStack, operatorStack); isTokenMode = false; } operatorStack.push(operator); } } } while (!operatorStack.empty()) { evaluate(predicateStack, operatorStack); } if (predicateStack.size() > 1) { throw new RuntimeException("Invalid logical expression"); } return predicateStack.pop(); }
@Test public void testOrNot() { final Predicate parsed = PredicateExpressionParser.parse("com.linkedin.data.it.AlwaysTruePredicate | !com.linkedin.data.it.AlwaysFalsePredicate"); Assert.assertEquals(parsed.getClass(), OrPredicate.class); final List<Predicate> orChildren = ((OrPredicate) parsed).getChildPredicates(); Assert.assertEquals(orChildren.get(0).getClass(), AlwaysTruePredicate.class); Assert.assertEquals(orChildren.get(1).getClass(), NotPredicate.class); final Predicate notChild = ((NotPredicate) orChildren.get(1)).getChildPredicate(); Assert.assertEquals(notChild.getClass(), AlwaysFalsePredicate.class); }
@SuppressWarnings("unchecked") @Override public void configure(final Map<String, ?> configs, final boolean isKey) { final String windowedInnerClassSerdeConfig = (String) configs.get(StreamsConfig.WINDOWED_INNER_CLASS_SERDE); Serde<T> windowInnerClassSerde = null; if (windowedInnerClassSerdeConfig != null) { try { windowInnerClassSerde = Utils.newInstance(windowedInnerClassSerdeConfig, Serde.class); } catch (final ClassNotFoundException e) { throw new ConfigException(StreamsConfig.WINDOWED_INNER_CLASS_SERDE, windowedInnerClassSerdeConfig, "Serde class " + windowedInnerClassSerdeConfig + " could not be found."); } } if (inner != null && windowedInnerClassSerdeConfig != null) { if (!inner.getClass().getName().equals(windowInnerClassSerde.deserializer().getClass().getName())) { throw new IllegalArgumentException("Inner class deserializer set using constructor " + "(" + inner.getClass().getName() + ")" + " is different from the one set in windowed.inner.class.serde config " + "(" + windowInnerClassSerde.deserializer().getClass().getName() + ")."); } } else if (inner == null && windowedInnerClassSerdeConfig == null) { throw new IllegalArgumentException("Inner class deserializer should be set either via constructor " + "or via the windowed.inner.class.serde config"); } else if (inner == null) inner = windowInnerClassSerde.deserializer(); }
@Test public void shouldThrowErrorIfDeserialisersConflictInConstructorAndConfig() { props.put(StreamsConfig.WINDOWED_INNER_CLASS_SERDE, Serdes.ByteArraySerde.class.getName()); assertThrows(IllegalArgumentException.class, () -> sessionWindowedDeserializer.configure(props, false)); }
public Response get(URL url, Request request) throws IOException { return call(HttpMethods.GET, url, request); }
@Test public void testRetries() throws IOException { HttpServer server = HttpServer.create(new InetSocketAddress(0), 1); AtomicBoolean failed = new AtomicBoolean(); server .createContext("/") .setHandler( exchange -> exchange.sendResponseHeaders(failed.compareAndSet(false, true) ? 123 : 200, -1)); try { server.start(); int port = server.getAddress().getPort(); List<LogEvent> events = new ArrayList<>(); int returnCode = new FailoverHttpClient(true, true, events::add) .get(new URL("http://localhost:" + port), Request.builder().build()) .getStatusCode(); assertThat(returnCode).isEqualTo(200); assertThat(failed.get()).isTrue(); assertThat(events) .containsExactly( LogEvent.warn("GET http://localhost:" + port + " failed and will be retried")); } finally { server.stop(0); } }
@Override public void preflight(final Path workdir, final String filename) throws BackgroundException { if(workdir.isRoot() || new DeepboxPathContainerService(session).isDeepbox(workdir) || new DeepboxPathContainerService(session).isBox(workdir)) { throw new AccessDeniedException(MessageFormat.format(LocaleFactory.localizedString("Cannot create {0}", "Error"), filename)).withFile(workdir); } final Acl acl = workdir.attributes().getAcl(); if(Acl.EMPTY == acl) { // Missing initialization log.warn(String.format("Unknown ACLs on %s", workdir)); return; } if(!acl.get(new Acl.CanonicalUser()).contains(CANADDCHILDREN)) { if(log.isWarnEnabled()) { log.warn(String.format("ACL %s for %s does not include %s", acl, workdir, CANADDCHILDREN)); } throw new AccessDeniedException(MessageFormat.format(LocaleFactory.localizedString("Cannot create {0}", "Error"), filename)).withFile(workdir); } }
@Test public void testNoDuplicates() throws Exception { final DeepboxIdProvider fileid = new DeepboxIdProvider(session); final Path documents = new Path("/ORG 4 - DeepBox Desktop App/ORG3:Box1/Documents/", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path test = new DeepboxTouchFeature(session, fileid).touch(new Path(documents, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); new DeepboxTouchFeature(session, fileid).preflight(documents.withAttributes(new DeepboxAttributesFinderFeature(session, fileid).find(documents)), test.getName()); assertTrue(new DeepboxFindFeature(session, fileid).find(test)); new DeepboxDeleteFeature(session, fileid).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static InetSocketAddress getServerAddress(Object proxy) { return getConnectionIdForProxy(proxy).getAddress(); }
@Test public void testProxyAddress() throws Exception { Server server = null; TestRpcService proxy = null; try { server = setupTestServer(conf, -1); // create a client proxy = getClient(addr, conf); assertEquals(addr, RPC.getServerAddress(proxy)); } finally { stop(server, proxy); } }
public long earliestMsgStoreTime(MessageQueue mq) throws MQClientException { String brokerAddr = this.mQClientFactory.findBrokerAddressInPublish(this.mQClientFactory.getBrokerNameFromMessageQueue(mq)); if (null == brokerAddr) { this.mQClientFactory.updateTopicRouteInfoFromNameServer(mq.getTopic()); brokerAddr = this.mQClientFactory.findBrokerAddressInPublish(this.mQClientFactory.getBrokerNameFromMessageQueue(mq)); } if (brokerAddr != null) { try { return this.mQClientFactory.getMQClientAPIImpl().getEarliestMsgStoretime(brokerAddr, mq, timeoutMillis); } catch (Exception e) { throw new MQClientException("Invoke Broker[" + brokerAddr + "] exception", e); } } throw new MQClientException("The broker[" + mq.getBrokerName() + "] not exist", null); }
@Test public void assertEarliestMsgStoreTime() throws MQClientException { assertEquals(0, mqAdminImpl.earliestMsgStoreTime(new MessageQueue())); }
public static String formatSql(final AstNode root) { final StringBuilder builder = new StringBuilder(); new Formatter(builder).process(root, 0); return StringUtils.stripEnd(builder.toString(), "\n"); }
@Test public void shouldFormatAlterStatement() { // Given: final String statementString = "ALTER STREAM FOO ADD COLUMN A STRING, ADD COLUMN B INT;"; final Statement statement = parseSingle(statementString); // When: final String result = SqlFormatter.formatSql(statement); // Then: assertThat(result, is("ALTER STREAM FOO\n" + "ADD COLUMN A STRING,\n" + "ADD COLUMN B INTEGER;")); }
public void begin(InterpretationContext ec, String localName, Attributes attributes) { if ("substitutionProperty".equals(localName)) { addWarn("[substitutionProperty] element has been deprecated. Please use the [property] element instead."); } String name = attributes.getValue(NAME_ATTRIBUTE); String value = attributes.getValue(VALUE_ATTRIBUTE); String scopeStr = attributes.getValue(SCOPE_ATTRIBUTE); Scope scope = ActionUtil.stringToScope(scopeStr); if (checkFileAttributeSanity(attributes)) { String file = attributes.getValue(FILE_ATTRIBUTE); file = ec.subst(file); try { FileInputStream istream = new FileInputStream(file); loadAndSetProperties(ec, istream, scope); } catch (FileNotFoundException e) { addError("Could not find properties file [" + file + "].", e); } catch (IOException e1) { addError("Could not read properties file [" + file + "].", e1); } } else if (checkResourceAttributeSanity(attributes)) { String resource = attributes.getValue(RESOURCE_ATTRIBUTE); resource = ec.subst(resource); URL resourceURL = Loader.getResourceBySelfClassLoader(resource); if (resourceURL == null) { addError("Could not find resource [" + resource + "]."); } else { try { InputStream istream = resourceURL.openStream(); loadAndSetProperties(ec, istream, scope); } catch (IOException e) { addError("Could not read resource file [" + resource + "].", e); } } } else if (checkValueNameAttributesSanity(attributes)) { value = RegularEscapeUtil.basicEscape(value); // now remove both leading and trailing spaces value = value.trim(); value = ec.subst(value); ActionUtil.setProperty(ec, name, value, scope); } else { addError(INVALID_ATTRIBUTES); } }
@Test public void nameValuePair() { atts.setValue("name", "v1"); atts.setValue("value", "work"); propertyAction.begin(ec, null, atts); assertEquals("work", ec.getProperty("v1")); }
public void setClockRefreshPeriod(int clockRefreshPeriod) { this.clockRefreshPeriod = checkNotNegative(clockRefreshPeriod, "clockRefreshPeriod"); }
@Test public void test_setClockRefreshPeriod_whenNegative() { ReactorBuilder builder = newBuilder(); assertThrows(IllegalArgumentException.class, () -> builder.setClockRefreshPeriod(-1)); }
@Override public void reset() { value.set(0L); dirty.reset(); }
@Test public void testReset() { CounterCell counterCell = new CounterCell(MetricName.named("namespace", "name")); counterCell.inc(1); Assert.assertNotEquals(counterCell.getDirty(), new DirtyState()); assertThat(counterCell.getCumulative(), equalTo(1L)); counterCell.reset(); assertThat(counterCell.getCumulative(), equalTo(0L)); assertThat(counterCell.getDirty(), equalTo(new DirtyState())); }
public MijnDigidSession createSession(String appSessionId){ MijnDigidSession session = new MijnDigidSession(15 * 60); Optional<AppSession> appSession = appClient.getAppSession(appSessionId); if(appSession.isPresent()) { session.setAccountId(appSession.get().getAccountId()); session.setAuthenticated(appSession.get().isAuthenticated()); } mijnDigiDSessionRepository.save(session); return session; }
@Test void testCreateNonExistingAppSession() { String appSessionId = "appSessionId"; when(appClient.getAppSession(eq(appSessionId))).thenReturn(Optional.empty()); MijnDigidSession session = mijnDigiDSessionService.createSession(appSessionId); verify(appClient, times(1)).getAppSession(eq(appSessionId)); verify(mijnDigiDSessionRepository, times(1)).save(any()); assertFalse(session.isAuthenticated()); }
public static String normalizeUri(String uri) throws URISyntaxException { // try to parse using the simpler and faster Camel URI parser String[] parts = CamelURIParser.fastParseUri(uri); if (parts != null) { // we optimized specially if an empty array is returned if (parts == URI_ALREADY_NORMALIZED) { return uri; } // use the faster and more simple normalizer return doFastNormalizeUri(parts); } else { // use the legacy normalizer as the uri is complex and may have unsafe URL characters return doComplexNormalizeUri(uri); } }
@Test public void testNormalizeIPv6HttpEndpoint() throws Exception { String result = URISupport.normalizeUri("http://[2a00:8a00:6000:40::1413]:30300/test"); assertEquals("http://[2a00:8a00:6000:40::1413]:30300/test", result); }
@Deprecated public static boolean isEmpty( String val ) { return Utils.isEmpty( val ); }
@Test public void testIsEmptyList() { assertTrue( Const.isEmpty( (List) null ) ); assertTrue( Const.isEmpty( new ArrayList() ) ); assertFalse( Const.isEmpty( Arrays.asList( "test", 1 ) ) ); }
@Override public List<URL> build(ServiceInstance serviceInstance) { Map<String, String> paramsMap = getMetadataServiceURLsParams(serviceInstance); String serviceName = serviceInstance.getServiceName(); String host = serviceInstance.getHost(); URL url; if (paramsMap.isEmpty()) { // ServiceInstance Metadata is empty. Happened when registry not support metadata write. url = generateUrlWithoutMetadata(serviceName, host, serviceInstance.getPort()); } else { url = generateWithMetadata(serviceName, host, paramsMap); } url = url.setScopeModel(serviceInstance.getApplicationModel().getInternalModule()); return Collections.singletonList(url); }
@Test void testBuild() { ExtensionLoader<MetadataServiceURLBuilder> loader = ApplicationModel.defaultModel().getExtensionLoader(MetadataServiceURLBuilder.class); MetadataServiceURLBuilder builder = loader.getExtension(StandardMetadataServiceURLBuilder.NAME); // test generateUrlWithoutMetadata List<URL> urls = builder.build(new DefaultServiceInstance("test", "127.0.0.1", 8080, ApplicationModel.defaultModel())); assertEquals(1, urls.size()); URL url = urls.get(0); assertEquals("dubbo", url.getProtocol()); assertEquals("127.0.0.1", url.getHost()); assertEquals(7001, url.getPort()); assertEquals(MetadataService.class.getName(), url.getServiceInterface()); assertEquals("test", url.getGroup()); assertEquals("consumer", url.getSide()); assertEquals("1.0.0", url.getVersion()); // assertEquals(url.getParameters().get("getAndListenInstanceMetadata.1.callback"), "true"); assertEquals("false", url.getParameters().get("reconnect")); assertEquals("5000", url.getParameters().get("timeout")); assertEquals(ApplicationModel.defaultModel(), url.getApplicationModel()); // test generateWithMetadata urls = builder.build(serviceInstance); assertEquals(1, urls.size()); url = urls.get(0); assertEquals("rest", url.getProtocol()); assertEquals("127.0.0.1", url.getHost()); assertEquals(20880, url.getPort()); assertEquals(MetadataService.class.getName(), url.getServiceInterface()); assertEquals("test", url.getGroup()); assertEquals("consumer", url.getSide()); assertEquals("1.0.0", url.getVersion()); assertEquals("dubbo-provider-demo", url.getApplication()); assertEquals("5000", url.getParameters().get("timeout")); }
public static ByteBuf buffer() { return ALLOC.heapBuffer(); }
@Test public void littleEndianWriteOnDefaultBufferMustStoreLittleEndianValue() { ByteBuf b = buffer(1024); b.writeShortLE(0x0102); assertEquals((short) 0x0102, b.getShortLE(0)); assertEquals((short) 0x0201, b.getShort(0)); b.clear(); b.writeMediumLE(0x010203); assertEquals(0x010203, b.getMediumLE(0)); assertEquals(0x030201, b.getMedium(0)); b.clear(); b.writeIntLE(0x01020304); assertEquals(0x01020304, b.getIntLE(0)); assertEquals(0x04030201, b.getInt(0)); b.clear(); b.writeLongLE(0x0102030405060708L); assertEquals(0x0102030405060708L, b.getLongLE(0)); assertEquals(0x0807060504030201L, b.getLong(0)); }
public void writeCounterSummaryPerClass(Collector collector, Counter counter, String requestId, Range range) throws IOException { final List<CounterRequest> requestList = new CounterRequestAggregation(counter) .getRequestsAggregatedOrFilteredByClassName(requestId); try { document.open(); final String counterLabel = getString(counter.getName() + "Label"); final String title = getFormattedString("Statistiques_compteur", counterLabel) + " - " + range.getLabel(); addParagraph(title, counter.getIconName()); new PdfCounterReport(collector, counter, range, false, document) .writeRequests(counter.getChildCounterName(), requestList); } catch (final DocumentException e) { throw createIOException(e); } document.close(); }
@Test public void testWriteCounterSummaryPerClass() throws IOException { final ByteArrayOutputStream output = new ByteArrayOutputStream(); final PdfOtherReport pdfOtherReport = new PdfOtherReport(TEST_APP, output); final Counter counter = new Counter("services", null); final Collector collector = new Collector(TEST_APP, List.of(counter)); pdfOtherReport.writeCounterSummaryPerClass(collector, counter, null, Period.TOUT.getRange()); assertNotEmptyAndClear(output); }
@Override public KeyGroupRangeInputSplit[] createInputSplits(int minNumSplits) throws IOException { final int maxParallelism = operatorState.getMaxParallelism(); final List<KeyGroupRange> keyGroups = sortedKeyGroupRanges(minNumSplits, maxParallelism); return CollectionUtil.mapWithIndex( keyGroups, (keyGroupRange, index) -> createKeyGroupRangeInputSplit( operatorState, maxParallelism, keyGroupRange, index)) .toArray(KeyGroupRangeInputSplit[]::new); }
@Test(expected = IOException.class) public void testInvalidProcessReaderFunctionFails() throws Exception { OperatorID operatorID = OperatorIDGenerator.fromUid("uid"); OperatorSubtaskState state = createOperatorSubtaskState(new StreamFlatMap<>(new StatefulFunction())); OperatorState operatorState = new OperatorState(operatorID, 1, 128); operatorState.putState(0, state); KeyedStateInputFormat<?, ?, ?> format = new KeyedStateInputFormat<>( operatorState, new MemoryStateBackend(), new Configuration(), new KeyedStateReaderOperator<>(new ReaderFunction(), Types.INT), new ExecutionConfig()); KeyGroupRangeInputSplit split = format.createInputSplits(1)[0]; KeyedStateReaderFunction<Integer, Integer> userFunction = new InvalidReaderFunction(); readInputSplit(split, userFunction); Assert.fail("KeyedStateReaderFunction did not fail on invalid RuntimeContext use"); }
@Override protected ExecuteContext doBefore(ExecuteContext context) throws Exception { LogUtils.printHttpRequestBeforePoint(context); final InvokerService invokerService = PluginServiceManager.getPluginService(InvokerService.class); URI uri = (URI) context.getArguments()[0]; HttpMethod httpMethod = (HttpMethod) context.getArguments()[1]; if (!PlugEffectWhiteBlackUtils.isHostEqualRealmName(uri.getHost())) { return context; } Map<String, String> hostAndPath = RequestInterceptorUtils.recoverHostAndPath(uri.getPath()); if (!PlugEffectWhiteBlackUtils.isPlugEffect(hostAndPath.get(HttpConstants.HTTP_URI_SERVICE))) { return context; } RequestInterceptorUtils.printRequestLog("restTemplate", hostAndPath); Optional<Object> result = invokerService.invoke( buildInvokerFunc(uri, hostAndPath, context, httpMethod), ex -> ex, hostAndPath.get(HttpConstants.HTTP_URI_SERVICE)); if (result.isPresent()) { Object obj = result.get(); if (obj instanceof Exception) { LOGGER.log(Level.SEVERE, "request is error, uri is " + uri, (Exception) obj); context.setThrowableOut((Exception) obj); return context; } context.skip(obj); } return context; }
@Test public void testRestTemplateInterceptor() throws Exception { ExecuteContext context = ExecuteContext.forMemberMethod(new Object(), null, arguments, null, null); URI uri = createURI(url); arguments[0] = uri; arguments[1] = HttpMethod.GET; //Contains domain names, sets multiple domain names, and does not set blacklist or whitelist discoveryPluginConfig.setRealmName(realmNames); interceptor.doBefore(context); URI uriNew = (URI) context.getArguments()[0]; Assert.assertEquals(url, uriNew.toString()); discoveryPluginConfig.setRealmName(realmName); //Contains domain names, and sets all through policies initStrategy(PlugEffectWhiteBlackConstants.STRATEGY_ALL, "zookeeper-provider-demo"); Mockito.when(invokerService.invoke(null, null, "zookeeper-provider-demo")) .thenReturn(Optional.ofNullable(new Object())); interceptor.doBefore(context); uriNew = (URI) context.getArguments()[0]; Assert.assertEquals(url, uriNew.toString()); }