focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static File generate(String content, int width, int height, File targetFile) { String extName = FileUtil.extName(targetFile); switch (extName) { case QR_TYPE_SVG: String svg = generateAsSvg(content, new QrConfig(width, height)); FileUtil.writeString(svg, targetFile, StandardCharsets.UTF_8); break; case QR_TYPE_TXT: String txt = generateAsAsciiArt(content, new QrConfig(width, height)); FileUtil.writeString(txt, targetFile, StandardCharsets.UTF_8); break; default: final BufferedImage image = generate(content, width, height); ImgUtil.write(image, targetFile); break; } return targetFile; }
@Test @Disabled public void comparePngAndSvgAndAsciiArtTest() { final QrConfig qrConfig = QrConfig.create() .setForeColor(null) .setBackColor(Color.WHITE) .setWidth(200) .setHeight(200).setMargin(1); QrCodeUtil.generate("https://hutool.cn", qrConfig, FileUtil.touch("d:/test/compare/config_null_color.jpg")); QrCodeUtil.generate("https://hutool.cn", qrConfig, FileUtil.touch("d:/test/compare/config_null_color.txt")); QrCodeUtil.generate("https://hutool.cn", qrConfig, FileUtil.touch("d:/test/compare/config_null_color.png")); QrCodeUtil.generate("https://hutool.cn", qrConfig, FileUtil.touch("d:/test/compare/config_null_color.svg")); }
@Override protected Response filter(Request request, RequestMeta meta, Class handlerClazz) { Method method; try { method = getHandleMethod(handlerClazz); } catch (NacosException e) { return null; } if (method.isAnnotationPresent(TpsControl.class) && TpsControlConfig.isTpsControlEnabled()) { try { TpsControl tpsControl = method.getAnnotation(TpsControl.class); String pointName = tpsControl.pointName(); TpsCheckRequest tpsCheckRequest = null; String parseName = StringUtils.isBlank(tpsControl.name()) ? pointName : tpsControl.name(); RemoteTpsCheckRequestParser parser = RemoteTpsCheckRequestParserRegistry.getParser(parseName); if (parser != null) { tpsCheckRequest = parser.parse(request, meta); } if (tpsCheckRequest == null) { tpsCheckRequest = new TpsCheckRequest(); } if (StringUtils.isBlank(tpsCheckRequest.getPointName())) { tpsCheckRequest.setPointName(pointName); } initTpsControlManager(); TpsCheckResponse check = tpsControlManager.check(tpsCheckRequest); if (!check.isSuccess()) { Response response; try { response = super.getDefaultResponseInstance(handlerClazz); response.setErrorInfo(NacosException.OVER_THRESHOLD, "Tps Flow restricted:" + check.getMessage()); return response; } catch (Exception e) { com.alibaba.nacos.plugin.control.Loggers.TPS.warn("Tps check fail , request: {},exception:{}", request.getClass().getSimpleName(), e); return null; } } } catch (Throwable throwable) { com.alibaba.nacos.plugin.control.Loggers.TPS.warn("Tps check exception , request: {},exception:{}", request.getClass().getSimpleName(), throwable); } } return null; }
@Test void testRejected() { HealthCheckRequest healthCheckRequest = new HealthCheckRequest(); RequestMeta requestMeta = new RequestMeta(); TpsCheckResponse tpsCheckResponse = new TpsCheckResponse(false, 5031, "rejected"); Mockito.when(tpsControlManager.check(any(TpsCheckRequest.class))).thenReturn(tpsCheckResponse); Response filterResponse = tpsControlRequestFilter.filter(healthCheckRequest, requestMeta, HealthCheckRequestHandler.class); assertNotNull(filterResponse); assertEquals(NacosException.OVER_THRESHOLD, filterResponse.getErrorCode()); assertEquals("Tps Flow restricted:" + tpsCheckResponse.getMessage(), filterResponse.getMessage()); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { final AttributedList<ch.cyberduck.core.Path> paths = new AttributedList<>(); final java.nio.file.Path p = session.toPath(directory); if(!Files.exists(p)) { throw new LocalExceptionMappingService().map("Listing directory {0} failed", new NoSuchFileException(directory.getAbsolute()), directory); } try (DirectoryStream<java.nio.file.Path> stream = Files.newDirectoryStream(p)) { for(java.nio.file.Path n : stream) { if(null == n.getFileName()) { continue; } try { final PathAttributes attributes = feature.toAttributes(n); final EnumSet<Path.Type> type = EnumSet.noneOf(Path.Type.class); if(Files.isDirectory(n)) { type.add(Path.Type.directory); } else { type.add(Path.Type.file); } final Path file = new Path(directory, n.getFileName().toString(), type, attributes); if(this.post(n, file)) { paths.add(file); listener.chunk(directory, paths); } } catch(IOException e) { log.warn(String.format("Failure reading attributes for %s", n)); } } } catch(IOException ex) { throw new LocalExceptionMappingService().map("Listing directory {0} failed", ex, directory); } return paths; }
@Test(expected = NotfoundException.class) public void testListNotfound() throws Exception { final LocalSession session = new LocalSession(new Host(new LocalProtocol(), new LocalProtocol().getDefaultHostname())); session.open(new DisabledProxyFinder(), new DisabledHostKeyCallback(), new DisabledLoginCallback(), new DisabledCancelCallback()); session.login(new DisabledLoginCallback(), new DisabledCancelCallback()); final Path f = new Path(UUID.randomUUID().toString(), EnumSet.of(Path.Type.directory)); final LocalListService service = new LocalListService(session); service.list(f, new DisabledListProgressListener()); }
public boolean getUseAwsDefaultCredentials() { if ( ValueMetaBase.convertStringToBoolean( Const.NVL( EnvUtil.getSystemProperty( Const.KETTLE_USE_AWS_DEFAULT_CREDENTIALS ), "N" ) ) ) { return true; } else if ( StringUtil.isEmpty( awsAccessKey ) && StringUtil.isEmpty( awsSecretKey ) ) { return true; } return false; }
@Test public void getUseAwsDefaultCredentialsWithoutCredentials() { S3CsvInputMeta meta = new S3CsvInputMeta(); assertTrue( meta.getUseAwsDefaultCredentials() ); }
public static List<Integer> getJoinOrder(JoinGraph graph) { ImmutableList.Builder<PlanNode> joinOrder = ImmutableList.builder(); Map<PlanNodeId, Integer> priorities = new HashMap<>(); for (int i = 0; i < graph.size(); i++) { priorities.put(graph.getNode(i).getId(), i); } PriorityQueue<PlanNode> nodesToVisit = new PriorityQueue<>( graph.size(), comparing(node -> priorities.get(node.getId()))); Set<PlanNode> visited = new HashSet<>(); nodesToVisit.add(graph.getNode(0)); while (!nodesToVisit.isEmpty()) { PlanNode node = nodesToVisit.poll(); if (!visited.contains(node)) { visited.add(node); joinOrder.add(node); for (JoinGraph.Edge edge : graph.getEdges(node)) { nodesToVisit.add(edge.getTargetNode()); } } if (nodesToVisit.isEmpty() && visited.size() < graph.size()) { // disconnected graph, find new starting point Optional<PlanNode> firstNotVisitedNode = graph.getNodes().stream() .filter(graphNode -> !visited.contains(graphNode)) .findFirst(); if (firstNotVisitedNode.isPresent()) { nodesToVisit.add(firstNotVisitedNode.get()); } } } checkState(visited.size() == graph.size()); return joinOrder.build().stream() .map(node -> priorities.get(node.getId())) .collect(toImmutableList()); }
@Test public void testDoNotReorderCrossJoins() { PlanNode plan = joinNode( joinNode( values(variable("a")), values(variable("b"))), values(variable("c")), variable("c"), variable("b")); JoinGraph joinGraph = getOnlyElement(JoinGraph.buildFrom(plan)); assertEquals( getJoinOrder(joinGraph), ImmutableList.of(0, 1, 2)); }
public static FEEL_1_1Parser parse(FEELEventListenersManager eventsManager, String source, Map<String, Type> inputVariableTypes, Map<String, Object> inputVariables, Collection<FEELFunction> additionalFunctions, List<FEELProfile> profiles, FEELTypeRegistry typeRegistry) { CharStream input = CharStreams.fromString(source); FEEL_1_1Lexer lexer = new FEEL_1_1Lexer( input ); CommonTokenStream tokens = new CommonTokenStream( lexer ); FEEL_1_1Parser parser = new FEEL_1_1Parser( tokens ); ParserHelper parserHelper = new ParserHelper(eventsManager); additionalFunctions.forEach(f -> parserHelper.getSymbolTable().getBuiltInScope().define(f.getSymbol())); parser.setHelper(parserHelper); parser.setErrorHandler( new FEELErrorHandler() ); parser.removeErrorListeners(); // removes the error listener that prints to the console parser.addErrorListener( new FEELParserErrorListener( eventsManager ) ); // pre-loads the parser with symbols defineVariables( inputVariableTypes, inputVariables, parser ); if (typeRegistry != null) { parserHelper.setTypeRegistry(typeRegistry); } return parser; }
@Test void positiveFloatLiteral() { String inputExpression = "+10.5"; BaseNode number = parse( inputExpression ); assertThat( number).isInstanceOf(SignedUnaryNode.class); assertThat( number.getResultType()).isEqualTo(BuiltInType.NUMBER); assertLocation( inputExpression, number ); SignedUnaryNode sun = (SignedUnaryNode) number; assertThat( sun.getSign()).isEqualTo(SignedUnaryNode.Sign.POSITIVE); assertThat( sun.getExpression()).isInstanceOf(NumberNode.class); assertThat( sun.getExpression().getText()).isEqualTo("10.5"); }
public Host deserialize(final T serialized) { final Deserializer<T> dict = factory.create(serialized); Object protocolObj = dict.stringForKey("Protocol"); if(protocolObj == null) { log.warn(String.format("Missing protocol key in %s", dict)); return null; } final Protocol protocol; final String identifier = protocolObj.toString(); final Object providerObj = dict.stringForKey("Provider"); if(null == providerObj) { protocol = protocols.forName(identifier); } else { protocol = protocols.forName(identifier, providerObj.toString()); } if(null != protocol) { final Host bookmark = new Host(protocol); final Object hostnameObj = dict.stringForKey("Hostname"); if(hostnameObj != null) { bookmark.setHostname(hostnameObj.toString()); } final Object uuidObj = dict.stringForKey("UUID"); if(uuidObj != null) { bookmark.setUuid(uuidObj.toString()); } final Object usernameObj = dict.stringForKey("Username"); if(usernameObj != null) { bookmark.getCredentials().setUsername(usernameObj.toString()); } final Object cdnCredentialsObj = dict.stringForKey("CDN Credentials"); if(cdnCredentialsObj != null) { bookmark.getCdnCredentials().setUsername(cdnCredentialsObj.toString()); } // Legacy final String keyObjDeprecated = dict.stringForKey("Private Key File"); if(keyObjDeprecated != null) { bookmark.getCredentials().setIdentity(LocalFactory.get(keyObjDeprecated)); } final T keyObj = dict.objectForKey("Private Key File Dictionary"); if(keyObj != null) { bookmark.getCredentials().setIdentity(new LocalDictionary<>(factory).deserialize(keyObj)); } final Object certObj = dict.stringForKey("Client Certificate"); if(certObj != null) { bookmark.getCredentials().setCertificate(certObj.toString()); } final Object portObj = dict.stringForKey("Port"); if(portObj != null) { bookmark.setPort(Integer.parseInt(portObj.toString())); } final Object pathObj = dict.stringForKey("Path"); if(pathObj != null) { bookmark.setDefaultPath(pathObj.toString()); } // Legacy final Object workdirObjDeprecated = dict.stringForKey("Workdir"); if(workdirObjDeprecated != null) { bookmark.setWorkdir(new Path(workdirObjDeprecated.toString(), EnumSet.of(Path.Type.directory))); } final T workdirObj = dict.objectForKey("Workdir Dictionary"); if(workdirObj != null) { bookmark.setWorkdir(new PathDictionary<>(factory).deserialize(workdirObj)); } final Object nicknameObj = dict.stringForKey("Nickname"); if(nicknameObj != null) { bookmark.setNickname(nicknameObj.toString()); } final Object encodingObj = dict.stringForKey("Encoding"); if(encodingObj != null) { bookmark.setEncoding(encodingObj.toString()); } final Object connectModeObj = dict.stringForKey("FTP Connect Mode"); if(connectModeObj != null) { bookmark.setFTPConnectMode(FTPConnectMode.valueOf(connectModeObj.toString())); } final Object transferObj = dict.stringForKey("Transfer Connection"); if(transferObj != null) { final Host.TransferType transfer = Host.TransferType.valueOf(transferObj.toString()); if(PreferencesFactory.get().getList("queue.transfer.type.enabled").contains(transfer.name())) { bookmark.setTransfer(transfer); } } else { // Legacy Object connObj = dict.stringForKey("Maximum Connections"); if(connObj != null) { if(1 == Integer.parseInt(connObj.toString())) { bookmark.setTransfer(Host.TransferType.browser); } } } // Legacy final Object downloadObjDeprecated = dict.stringForKey("Download Folder"); if(downloadObjDeprecated != null) { bookmark.setDownloadFolder(LocalFactory.get(downloadObjDeprecated.toString())); } final T downloadObj = dict.objectForKey("Download Folder Dictionary"); if(downloadObj != null) { bookmark.setDownloadFolder(new LocalDictionary<>(factory).deserialize(downloadObj)); } final T uploadObj = dict.objectForKey("Upload Folder Dictionary"); if(uploadObj != null) { bookmark.setUploadFolder(new LocalDictionary<>(factory).deserialize(uploadObj)); } final Object timezoneObj = dict.stringForKey("Timezone"); if(timezoneObj != null) { bookmark.setTimezone(TimeZone.getTimeZone(timezoneObj.toString())); } final Object commentObj = dict.stringForKey("Comment"); if(commentObj != null) { bookmark.setComment(commentObj.toString()); } final Object urlObj = dict.stringForKey("Web URL"); if(urlObj != null) { bookmark.setWebURL(urlObj.toString()); } final Object accessObj = dict.stringForKey("Access Timestamp"); if(accessObj != null) { bookmark.setTimestamp(new Date(Long.parseLong(accessObj.toString()))); } final Object volumeObj = dict.stringForKey("Volume"); if(volumeObj != null) { bookmark.setVolume(LocalFactory.get(volumeObj.toString())); } final Object readonlyObj = dict.stringForKey("Readonly"); if(readonlyObj != null) { bookmark.setReadonly(Boolean.valueOf(readonlyObj.toString())); } final Map customObj = dict.mapForKey("Custom"); if(customObj != null) { bookmark.setCustom(customObj); } final Object labelObj = dict.stringForKey("Labels"); if(labelObj != null) { bookmark.setLabels(new HashSet<>(dict.listForKey("Labels"))); } return bookmark; } else { log.warn(String.format("No protocol registered for identifier %s", protocolObj)); return null; } }
@Test public void testDeserialize() { final Serializer<NSDictionary> dict = SerializerFactory.get(); dict.setStringForKey("test", "Protocol"); dict.setStringForKey("unknown provider", "Provider"); dict.setStringForKey("h", "Hostname"); dict.setStringListForKey(Arrays.asList("a", "b"), "Labels"); final Host host = new HostDictionary<>(new DeserializerFactory<>()).deserialize(dict.getSerialized()); assertEquals(new TestProtocol(), host.getProtocol()); assertEquals(new HashSet<>(Arrays.asList("a", "b")), host.getLabels()); }
public void fromShort(short n) { Bits[] v = Bits.values(); set(v[(n >>> 6) & 7], v[(n >>> 3) & 7], v[n & 7]); }
@Test public void fromShort() { Mode mode = new Mode((short) 0777); assertEquals(Mode.Bits.ALL, mode.getOwnerBits()); assertEquals(Mode.Bits.ALL, mode.getGroupBits()); assertEquals(Mode.Bits.ALL, mode.getOtherBits()); mode = new Mode((short) 0644); assertEquals(Mode.Bits.READ_WRITE, mode.getOwnerBits()); assertEquals(Mode.Bits.READ, mode.getGroupBits()); assertEquals(Mode.Bits.READ, mode.getOtherBits()); mode = new Mode((short) 0755); assertEquals(Mode.Bits.ALL, mode.getOwnerBits()); assertEquals(Mode.Bits.READ_EXECUTE, mode.getGroupBits()); assertEquals(Mode.Bits.READ_EXECUTE, mode.getOtherBits()); }
@SuppressWarnings("deprecation") public static void setClasspath(Map<String, String> environment, Configuration conf) throws IOException { boolean userClassesTakesPrecedence = conf.getBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, false); String classpathEnvVar = conf.getBoolean(MRJobConfig.MAPREDUCE_JOB_CLASSLOADER, false) ? Environment.APP_CLASSPATH.name() : Environment.CLASSPATH.name(); MRApps.addToEnvironment(environment, classpathEnvVar, crossPlatformifyMREnv(conf, Environment.PWD), conf); if (!userClassesTakesPrecedence) { MRApps.setMRFrameworkClasspath(environment, conf); } /* * We use "*" for the name of the JOB_JAR instead of MRJobConfig.JOB_JAR for * the case where the job jar is not necessarily named "job.jar". This can * happen, for example, when the job is leveraging a resource from the YARN * shared cache. */ MRApps.addToEnvironment( environment, classpathEnvVar, MRJobConfig.JOB_JAR + Path.SEPARATOR + "*", conf); MRApps.addToEnvironment( environment, classpathEnvVar, MRJobConfig.JOB_JAR + Path.SEPARATOR + "classes" + Path.SEPARATOR, conf); MRApps.addToEnvironment( environment, classpathEnvVar, MRJobConfig.JOB_JAR + Path.SEPARATOR + "lib" + Path.SEPARATOR + "*", conf); MRApps.addToEnvironment( environment, classpathEnvVar, crossPlatformifyMREnv(conf, Environment.PWD) + Path.SEPARATOR + "*", conf); // a * in the classpath will only find a .jar, so we need to filter out // all .jars and add everything else addToClasspathIfNotJar(JobContextImpl.getFileClassPaths(conf), JobContextImpl.getCacheFiles(conf), conf, environment, classpathEnvVar); addToClasspathIfNotJar(JobContextImpl.getArchiveClassPaths(conf), JobContextImpl.getCacheArchives(conf), conf, environment, classpathEnvVar); if (userClassesTakesPrecedence) { MRApps.setMRFrameworkClasspath(environment, conf); } }
@Test @Timeout(3000000) public void testSetClasspathWithFramework() throws IOException { final String FRAMEWORK_NAME = "some-framework-name"; final String FRAMEWORK_PATH = "some-framework-path#" + FRAMEWORK_NAME; Configuration conf = new Configuration(); conf.setBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, true); conf.set(MRJobConfig.MAPREDUCE_APPLICATION_FRAMEWORK_PATH, FRAMEWORK_PATH); Map<String, String> env = new HashMap<String, String>(); try { MRApps.setClasspath(env, conf); fail("Failed to catch framework path set without classpath change"); } catch (IllegalArgumentException e) { assertTrue(e.getMessage().contains("Could not locate MapReduce framework name '" + FRAMEWORK_NAME + "'"), "Unexpected IllegalArgumentException"); } env.clear(); final String FRAMEWORK_CLASSPATH = FRAMEWORK_NAME + "/*.jar"; conf.set(MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH, FRAMEWORK_CLASSPATH); MRApps.setClasspath(env, conf); final String stdClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR, Arrays.asList("job.jar/*", "job.jar/classes/", "job.jar/lib/*", ApplicationConstants.Environment.PWD.$$() + "/*")); String expectedClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR, Arrays.asList(ApplicationConstants.Environment.PWD.$$(), FRAMEWORK_CLASSPATH, stdClasspath)); assertEquals(expectedClasspath, env.get("CLASSPATH"), "Incorrect classpath with framework and no user precedence"); env.clear(); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true); MRApps.setClasspath(env, conf); expectedClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR, Arrays.asList(ApplicationConstants.Environment.PWD.$$(), stdClasspath, FRAMEWORK_CLASSPATH)); assertEquals(expectedClasspath, env.get("CLASSPATH"), "Incorrect classpath with framework and user precedence"); }
@Udf(schema = "ARRAY<STRUCT<K STRING, V INT>>") public List<Struct> entriesInt( @UdfParameter(description = "The map to create entries from") final Map<String, Integer> map, @UdfParameter(description = "If true then the resulting entries are sorted by key") final boolean sorted ) { return entries(map, INT_STRUCT_SCHEMA, sorted); }
@Test public void shouldComputeIntEntriesSorted() { final Map<String, Integer> map = createMap(i -> i); shouldComputeEntriesSorted(map, () -> entriesUdf.entriesInt(map, true)); }
@Override public Iterable<DiscoveryNode> discoverNodes() { try { List<GcpAddress> gcpAddresses = gcpClient.getAddresses(); logGcpAddresses(gcpAddresses); List<DiscoveryNode> result = new ArrayList<>(); for (GcpAddress gcpAddress : gcpAddresses) { for (int port = portRange.getFromPort(); port <= portRange.getToPort(); port++) { result.add(createDiscoveryNode(gcpAddress, port)); } } return result; } catch (Exception e) { LOGGER.warning("Cannot discover nodes, returning empty list", e); return Collections.emptyList(); } }
@Test public void discoverNodesEmpty() { // given given(gcpClient.getAddresses()).willReturn(new ArrayList<>()); // when Iterable<DiscoveryNode> nodes = gcpDiscoveryStrategy.discoverNodes(); // then assertFalse(nodes.iterator().hasNext()); }
@Override public NSImage documentIcon(final String extension, final Integer size) { NSImage image = this.load(extension, size); if(null == image) { return this.cache(extension, this.convert(extension, workspace.iconForFileType(extension), size), size); } return image; }
@Test public void testDocumentIcon() { final NSImage icon = new NSImageIconCache().documentIcon("txt", 64); assertNotNull(icon); assertTrue(icon.isValid()); assertFalse(icon.isTemplate()); assertEquals(64, icon.size().width.intValue()); assertEquals(64, icon.size().height.intValue()); assertNotNull(NSImage.imageNamed("txt (64px)")); }
@Override public void stopTrackingDeploymentOf(ExecutionAttemptID executionAttemptId) { pendingDeployments.remove(executionAttemptId); ResourceID host = hostByExecution.remove(executionAttemptId); if (host != null) { executionsByHost.computeIfPresent( host, (resourceID, executionAttemptIds) -> { executionAttemptIds.remove(executionAttemptId); return executionAttemptIds.isEmpty() ? null : executionAttemptIds; }); } }
@Test void testStopTrackingUnknownExecutionDoesNotThrowException() { final DefaultExecutionDeploymentTracker tracker = new DefaultExecutionDeploymentTracker(); final ExecutionAttemptID attemptId2 = createExecutionAttemptId(); tracker.stopTrackingDeploymentOf(attemptId2); }
@Override public boolean add(String str) { boolean flag = false; for (BloomFilter filter : filters) { flag |= filter.add(str); } return flag; }
@Test @Disabled public void testIntMap(){ IntMap intMap = new IntMap(); for (int i = 0 ; i < 32; i++) { intMap.add(i); } intMap.remove(30); for (int i = 0; i < 32; i++) { System.out.println(i + "是否存在-->" + intMap.contains(i)); } }
public String nonNullValue(String key) { String value = value(key); if (value == null) { throw new IllegalArgumentException("Missing property: " + key); } return value; }
@Test @UseDataProvider("beforeAndAfterBlanks") public void nonNullValue(String blankBefore, String blankAfter) { Properties p = new Properties(); p.setProperty("foo", blankBefore + "bar" + blankAfter); Props props = new Props(p); assertThat(props.nonNullValue("foo")).isEqualTo("bar"); }
public static SpectralClustering fit(Matrix W, int k) { return fit(W, k, 100, 1E-4); }
@Test public void testUSPS() throws Exception { System.out.println("USPS"); MathEx.setSeed(19650218); // to get repeatable results. double[][] x = USPS.x; int[] y = USPS.y; SpectralClustering model = SpectralClustering.fit(x, 10, 8.0); System.out.println(model); double r = RandIndex.of(y, model.y); double r2 = AdjustedRandIndex.of(y, model.y); System.out.format("Training rand index = %.2f%%\tadjusted rand index = %.2f%%%n", 100.0 * r, 100.0 * r2); assertEquals(0.9128, r, 1E-4); assertEquals(0.5371, r2, 1E-4); System.out.format("MI = %.2f%n", MutualInformation.of(y, model.y)); System.out.format("NMI.joint = %.2f%%%n", 100 * NormalizedMutualInformation.joint(y, model.y)); System.out.format("NMI.max = %.2f%%%n", 100 * NormalizedMutualInformation.max(y, model.y)); System.out.format("NMI.min = %.2f%%%n", 100 * NormalizedMutualInformation.min(y, model.y)); System.out.format("NMI.sum = %.2f%%%n", 100 * NormalizedMutualInformation.sum(y, model.y)); System.out.format("NMI.sqrt = %.2f%%%n", 100 * NormalizedMutualInformation.sqrt(y, model.y)); }
@Override public String createToken(Authentication authentication) throws AccessException { return createToken(authentication.getName()); }
@Test void testCreateToken1() throws AccessException { assertEquals("token", cachedJwtTokenManager.createToken(authentication)); }
static void cleanStackTrace(Throwable throwable) { new StackTraceCleaner(throwable).clean(Sets.<Throwable>newIdentityHashSet()); }
@Test public void allFramesAboveStandardSubjectBuilderCleaned() { Throwable throwable = createThrowableWithStackTrace( "com.google.random.Package", "com.google.common.base.collection.ImmutableMap", "com.google.common.truth.StandardSubjectBuilder", "com.google.example.SomeClass"); StackTraceCleaner.cleanStackTrace(throwable); assertThat(throwable.getStackTrace()) .isEqualTo( new StackTraceElement[] { createStackTraceElement("com.google.example.SomeClass"), }); }
@Override public Path copy(final Path source, final Path target, final TransferStatus status, final ConnectionCallback callback, final StreamListener listener) throws BackgroundException { if(null == status.getStorageClass()) { // Keep same storage class status.setStorageClass(new S3StorageClassFeature(session, acl).getClass(source)); } if(Encryption.Algorithm.NONE == status.getEncryption()) { // Keep encryption setting status.setEncryption(new S3EncryptionFeature(session, acl).getEncryption(source)); } if(Acl.EMPTY == status.getAcl()) { // Apply non-standard ACL try { // Verify target bucket allows ACLs if(acl.getPermission(containerService.getContainer(target)).isEditable()) { status.setAcl(acl.getPermission(source)); } } catch(AccessDeniedException | InteroperabilityException e) { log.warn(String.format("Ignore failure %s", e)); } } final S3Object destination = new S3WriteFeature(session, acl).getDetails(target, status); destination.setAcl(acl.toAcl(status.getAcl())); final Path bucket = containerService.getContainer(target); destination.setBucketName(bucket.isRoot() ? StringUtils.EMPTY : bucket.getName()); destination.replaceAllMetadata(new HashMap<>(new S3MetadataFeature(session, acl).getMetadata(source))); final String versionId = this.copy(source, destination, status, listener); return target.withAttributes(new PathAttributes(source.attributes()).withVersionId(versionId)); }
@Test public void testCopyFile() throws Exception { final Path container = new Path("test-eu-central-1-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); final TransferStatus status = new TransferStatus(); status.setMetadata(Collections.singletonMap("cyberduck", "m")); final Path test = new S3TouchFeature(session, new S3AccessControlListFeature(session)).touch(new Path(container, new AsciiRandomStringService().random(), EnumSet.of(Path.Type.file)), status); final Path copy = new S3CopyFeature(session, new S3AccessControlListFeature(session)).copy(test, new Path(container, new AsciiRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus(), new DisabledConnectionCallback(), new DisabledStreamListener()); assertTrue(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(test)); assertNull(copy.attributes().getVersionId()); assertEquals("m", new S3MetadataFeature(session, new S3AccessControlListFeature(session)).getMetadata(copy).get("cyberduck")); new S3DefaultDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); assertTrue(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(copy)); new S3DefaultDeleteFeature(session).delete(Collections.singletonList(copy), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public InterpreterResult interpret(String st, InterpreterContext context) { String[] lines = splitAndRemoveEmpty(st, "\n"); return interpret(lines, context); }
@Test void copyToLocalTest() throws IOException { FileSystemTestUtils.createByteFile(fs, "/testFile", WritePType.MUST_CACHE, 10, 10); InterpreterResult output = alluxioInterpreter.interpret("copyToLocal /testFile " + mLocalAlluxioCluster.getAlluxioHome() + "/testFile", null); assertEquals( "Copied /testFile to file://" + mLocalAlluxioCluster.getAlluxioHome() + "/testFile\n\n", output.message().get(0).getData()); fileReadTest("/testFile", 10); }
public static VerificationMode atLeast(final int count) { checkArgument(count > 0, "Times count must be greater than zero"); return new AtLeastVerification(count); }
@Test public void should_verify_expected_request_for_at_least() throws Exception { final HttpServer server = httpServer(port(), hit); server.get(by(uri("/foo"))).response("bar"); running(server, () -> { assertThat(helper.get(remoteUrl("/foo")), is("bar")); assertThat(helper.get(remoteUrl("/foo")), is("bar")); }); hit.verify(by(uri("/foo")), atLeast(1)); }
@Override public void run() { final Instant now = time.get(); try { final Collection<PersistentQueryMetadata> queries = engine.getPersistentQueries(); final Optional<Double> saturation = queries.stream() .collect(Collectors.groupingBy(PersistentQueryMetadata::getQueryApplicationId)) .entrySet() .stream() .map(e -> measure(now, e.getKey(), e.getValue())) .max(PersistentQuerySaturationMetrics::compareSaturation) .orElse(Optional.of(0.0)); saturation.ifPresent(s -> report(now, s)); final Set<String> appIds = queries.stream() .map(PersistentQueryMetadata::getQueryApplicationId) .collect(Collectors.toSet()); for (final String appId : Sets.difference(new HashSet<>(perKafkaStreamsStats.keySet()), appIds)) { perKafkaStreamsStats.get(appId).cleanup(reporter); perKafkaStreamsStats.remove(appId); } } catch (final RuntimeException e) { LOGGER.error("Error collecting saturation", e); throw e; } }
@Test public void shouldCleanupQueryMetricWhenRuntimeRemoved() { // Given: final Instant start = Instant.now(); when(clock.get()).thenReturn(start); givenMetrics(kafkaStreams1) .withThreadStartTime("t1", start) .withBlockedTime("t1", Duration.ofMinutes(0)); collector.run(); when(engine.getPersistentQueries()).thenReturn(ImmutableList.of(query2)); // When: collector.run(); // Then: verify(reporter).cleanup("node-query-saturation", ImmutableMap.of("query-id", "hootie")); }
public int writeSliInt64(long value) { ensure(writerIndex + 9); return _unsafeWriteSliInt64(value); }
@Test public void testWriteSliInt64() { MemoryBuffer buf = MemoryUtils.buffer(8); checkSliInt64(buf, -1, 4); for (int i = 0; i < 10; i++) { for (int j = 0; j < i; j++) { checkSliInt64(buf(i), -1, 4); checkSliInt64(buf(i), 1, 4); checkSliInt64(buf(i), 1L << 6, 4); checkSliInt64(buf(i), 1L << 7, 4); checkSliInt64(buf(i), -(2 << 5), 4); checkSliInt64(buf(i), -(2 << 6), 4); checkSliInt64(buf(i), 1L << 28, 4); checkSliInt64(buf(i), Integer.MAX_VALUE / 2, 4); checkSliInt64(buf(i), Integer.MIN_VALUE / 2, 4); checkSliInt64(buf(i), -1L << 30, 4); checkSliInt64(buf(i), 1L << 30, 9); checkSliInt64(buf(i), Integer.MAX_VALUE, 9); checkSliInt64(buf(i), Integer.MIN_VALUE, 9); checkSliInt64(buf(i), -1L << 31, 9); checkSliInt64(buf(i), 1L << 31, 9); checkSliInt64(buf(i), -1L << 32, 9); checkSliInt64(buf(i), 1L << 32, 9); checkSliInt64(buf(i), Long.MAX_VALUE, 9); checkSliInt64(buf(i), Long.MIN_VALUE, 9); } } }
@Override public void startMediaRequest( @NonNull String[] mimeTypes, int requestId, @NonNull InsertionRequestCallback callback) { mCurrentRunningLocalProxy.dispose(); mCurrentRequest = requestId; mCurrentCallback = callback; final Intent pickingIntent = getMediaInsertRequestIntent(mimeTypes, requestId); mContext.startActivity(pickingIntent); }
@Test public void testIncorrectRequestBroadcast() { mUnderTest.startMediaRequest(new String[] {"media/png"}, 123, mCallback); mShadowApplication.getRegisteredReceivers().stream() .filter( wrapper -> wrapper.broadcastReceiver instanceof RemoteInsertionImpl.MediaInsertionAvailableReceiver) .map(ShadowApplication.Wrapper::getBroadcastReceiver) .findFirst() .get() .onReceive( ApplicationProvider.getApplicationContext(), createReceiverIntent(null, new String[] {"media/png"}, 2)); Mockito.verifyZeroInteractions(mCallback); }
public static List<String> toStringLines(String text) { return new BufferedReader(new StringReader(text)).lines().collect(Collectors.toList()); }
@Test void testToStringLines() { List<String> expected = Arrays.asList("foo", "bar"); assertEquals(expected, StringUtils.toStringLines("foo\nbar\n")); }
@Override public void accumulate(Object value) { if (value != null && !values.add(value)) { return; } delegate.accumulate(value); }
@Test public void test_accumulate() { SqlAggregation aggregation = new DistinctSqlAggregation(delegate); aggregation.accumulate("1"); aggregation.accumulate("2"); aggregation.accumulate("1"); verify(delegate).accumulate("1"); verify(delegate).accumulate("2"); verifyNoMoreInteractions(delegate); }
public static Object value(String strValue, Field field) { requireNonNull(field); // if field is not primitive type Type fieldType = field.getGenericType(); if (fieldType instanceof ParameterizedType) { Class<?> clazz = (Class<?>) ((ParameterizedType) field.getGenericType()).getActualTypeArguments()[0]; if (field.getType().equals(List.class)) { // convert to list return stringToList(strValue, clazz); } else if (field.getType().equals(Set.class)) { // covert to set return stringToSet(strValue, clazz); } else if (field.getType().equals(Map.class)) { Class<?> valueClass = (Class<?>) ((ParameterizedType) field.getGenericType()).getActualTypeArguments()[1]; return stringToMap(strValue, clazz, valueClass); } else if (field.getType().equals(Optional.class)) { Type typeClazz = ((ParameterizedType) fieldType).getActualTypeArguments()[0]; if (typeClazz instanceof ParameterizedType) { throw new IllegalArgumentException(format("unsupported non-primitive Optional<%s> for %s", typeClazz.getClass(), field.getName())); } return Optional.ofNullable(convert(strValue, (Class) typeClazz)); } else { throw new IllegalArgumentException( format("unsupported field-type %s for %s", field.getType(), field.getName())); } } else { return convert(strValue, field.getType()); } }
@Test public void testNullStrValue() throws Exception { class TestMap { public List<String> list; public Set<String> set; public Map<String, String> map; public Optional<String> optional; } Field listField = TestMap.class.getField("list"); Object listValue = FieldParser.value(null, listField); assertNull(listValue); listValue = FieldParser.value("null", listField); assertTrue(listValue instanceof List); assertEquals(((List) listValue).size(), 1); assertEquals(((List) listValue).get(0), "null"); Field setField = TestMap.class.getField("set"); Object setValue = FieldParser.value(null, setField); assertNull(setValue); setValue = FieldParser.value("null", setField); assertTrue(setValue instanceof Set); assertEquals(((Set) setValue).size(), 1); assertEquals(((Set) setValue).iterator().next(), "null"); Field mapField = TestMap.class.getField("map"); Object mapValue = FieldParser.value(null, mapField); assertNull(mapValue); try { FieldParser.value("null", mapField); } catch (IllegalArgumentException iae) { assertTrue(iae.getMessage().contains("null map-value is not in correct format key1=value,key2=value2")); } Field optionalField = TestMap.class.getField("optional"); Object optionalValue = FieldParser.value(null, optionalField); assertEquals(optionalValue, Optional.empty()); }
@ProcessElement public ProcessContinuation processElement( @Element KafkaSourceDescriptor kafkaSourceDescriptor, RestrictionTracker<OffsetRange, Long> tracker, WatermarkEstimator<Instant> watermarkEstimator, MultiOutputReceiver receiver) throws Exception { final LoadingCache<TopicPartition, AverageRecordSize> avgRecordSize = Preconditions.checkStateNotNull(this.avgRecordSize); final Deserializer<K> keyDeserializerInstance = Preconditions.checkStateNotNull(this.keyDeserializerInstance); final Deserializer<V> valueDeserializerInstance = Preconditions.checkStateNotNull(this.valueDeserializerInstance); final Distribution rawSizes = Metrics.distribution( METRIC_NAMESPACE, RAW_SIZE_METRIC_PREFIX + kafkaSourceDescriptor.getTopicPartition().toString()); for (Map.Entry<String, Long> backlogSplit : perPartitionBacklogMetrics.entrySet()) { Gauge backlog = Metrics.gauge( METRIC_NAMESPACE, RAW_SIZE_METRIC_PREFIX + "backlogBytes_" + backlogSplit.getKey()); backlog.set(backlogSplit.getValue()); } // Stop processing current TopicPartition when it's time to stop. if (checkStopReadingFn != null && checkStopReadingFn.apply(kafkaSourceDescriptor.getTopicPartition())) { // Attempt to claim the last element in the restriction, such that the restriction tracker // doesn't throw an exception when checkDone is called tracker.tryClaim(tracker.currentRestriction().getTo() - 1); return ProcessContinuation.stop(); } Map<String, Object> updatedConsumerConfig = overrideBootstrapServersConfig(consumerConfig, kafkaSourceDescriptor); // If there is a timestampPolicyFactory, create the TimestampPolicy for current // TopicPartition. TimestampPolicy<K, V> timestampPolicy = null; if (timestampPolicyFactory != null) { timestampPolicy = timestampPolicyFactory.createTimestampPolicy( kafkaSourceDescriptor.getTopicPartition(), Optional.ofNullable(watermarkEstimator.currentWatermark())); } LOG.info( "Creating Kafka consumer for process continuation for {}", kafkaSourceDescriptor.getTopicPartition()); try (Consumer<byte[], byte[]> consumer = consumerFactoryFn.apply(updatedConsumerConfig)) { ConsumerSpEL.evaluateAssign( consumer, ImmutableList.of(kafkaSourceDescriptor.getTopicPartition())); long startOffset = tracker.currentRestriction().getFrom(); long expectedOffset = startOffset; consumer.seek(kafkaSourceDescriptor.getTopicPartition(), startOffset); ConsumerRecords<byte[], byte[]> rawRecords = ConsumerRecords.empty(); while (true) { rawRecords = poll(consumer, kafkaSourceDescriptor.getTopicPartition()); // When there are no records available for the current TopicPartition, self-checkpoint // and move to process the next element. if (rawRecords.isEmpty()) { if (!topicPartitionExists( kafkaSourceDescriptor.getTopicPartition(), consumer.listTopics())) { return ProcessContinuation.stop(); } if (timestampPolicy != null) { updateWatermarkManually(timestampPolicy, watermarkEstimator, tracker); } return ProcessContinuation.resume(); } for (ConsumerRecord<byte[], byte[]> rawRecord : rawRecords) { if (!tracker.tryClaim(rawRecord.offset())) { return ProcessContinuation.stop(); } try { KafkaRecord<K, V> kafkaRecord = new KafkaRecord<>( rawRecord.topic(), rawRecord.partition(), rawRecord.offset(), ConsumerSpEL.getRecordTimestamp(rawRecord), ConsumerSpEL.getRecordTimestampType(rawRecord), ConsumerSpEL.hasHeaders() ? rawRecord.headers() : null, ConsumerSpEL.deserializeKey(keyDeserializerInstance, rawRecord), ConsumerSpEL.deserializeValue(valueDeserializerInstance, rawRecord)); int recordSize = (rawRecord.key() == null ? 0 : rawRecord.key().length) + (rawRecord.value() == null ? 0 : rawRecord.value().length); avgRecordSize .getUnchecked(kafkaSourceDescriptor.getTopicPartition()) .update(recordSize, rawRecord.offset() - expectedOffset); rawSizes.update(recordSize); expectedOffset = rawRecord.offset() + 1; Instant outputTimestamp; // The outputTimestamp and watermark will be computed by timestampPolicy, where the // WatermarkEstimator should be a manual one. if (timestampPolicy != null) { TimestampPolicyContext context = updateWatermarkManually(timestampPolicy, watermarkEstimator, tracker); outputTimestamp = timestampPolicy.getTimestampForRecord(context, kafkaRecord); } else { Preconditions.checkStateNotNull(this.extractOutputTimestampFn); outputTimestamp = extractOutputTimestampFn.apply(kafkaRecord); } receiver .get(recordTag) .outputWithTimestamp(KV.of(kafkaSourceDescriptor, kafkaRecord), outputTimestamp); } catch (SerializationException e) { // This exception should only occur during the key and value deserialization when // creating the Kafka Record badRecordRouter.route( receiver, rawRecord, null, e, "Failure deserializing Key or Value of Kakfa record reading from Kafka"); if (timestampPolicy != null) { updateWatermarkManually(timestampPolicy, watermarkEstimator, tracker); } } } } } }
@Test public void testProcessElementWithEmptyPoll() throws Exception { MockMultiOutputReceiver receiver = new MockMultiOutputReceiver(); consumer.setNumOfRecordsPerPoll(-1); OffsetRangeTracker tracker = new OffsetRangeTracker(new OffsetRange(0L, Long.MAX_VALUE)); ProcessContinuation result = dofnInstance.processElement( KafkaSourceDescriptor.of(topicPartition, null, null, null, null, null), tracker, null, receiver); assertEquals(ProcessContinuation.resume(), result); assertTrue(receiver.getGoodRecords().isEmpty()); }
@Override public Optional<String> getReturnTo(HttpRequest request) { return getParameter(request, RETURN_TO_PARAMETER) .flatMap(OAuth2AuthenticationParametersImpl::sanitizeRedirectUrl); }
@Test public void get_return_to_is_empty_when_no_cookie() { when(request.getCookies()).thenReturn(new Cookie[]{}); Optional<String> redirection = underTest.getReturnTo(request); assertThat(redirection).isEmpty(); }
private synchronized boolean validateClientAcknowledgement(long h) { if (h < 0) { throw new IllegalArgumentException("Argument 'h' cannot be negative, but was: " + h); } if (h > MASK) { throw new IllegalArgumentException("Argument 'h' cannot be larger than 2^32 -1, but was: " + h); } final long oldH = clientProcessedStanzas.get(); final Long lastUnackedX = unacknowledgedServerStanzas.isEmpty() ? null : unacknowledgedServerStanzas.getLast().x; return validateClientAcknowledgement(h, oldH, lastUnackedX); }
@Test public void testValidateClientAcknowledgement_rollover_edgecase1() throws Exception { // Setup test fixture. final long MAX = new BigInteger( "2" ).pow( 32 ).longValue() - 1; final long h = MAX; final long oldH = MAX; final Long lastUnackedX = null; // Execute system under test. final boolean result = StreamManager.validateClientAcknowledgement(h, oldH, lastUnackedX); // Verify results. assertTrue(result); }
@VisibleForTesting List<String> getIpAddressFields(Message message) { return message.getFieldNames() .stream() .filter(e -> (!enforceGraylogSchema || ipAddressFields.containsKey(e)) && !e.startsWith(Message.INTERNAL_FIELD_PREFIX)) .collect(Collectors.toList()); }
@Test public void testGetIpAddressFieldsEnforceGraylogSchemaFalse() { GeoIpResolverConfig conf = config.toBuilder().enforceGraylogSchema(false).build(); final GeoIpResolverEngine engine = new GeoIpResolverEngine(geoIpVendorResolverService, conf, s3GeoIpFileService, metricRegistry); Map<String, Object> fields = new HashMap<>(); fields.put("_id", java.util.UUID.randomUUID().toString()); fields.put("source_ip", "127.0.0.1"); fields.put("src_ip", "127.0.0.1"); fields.put("destination_ip", "127.0.0.1"); fields.put("dest_ip", "127.0.0.1"); fields.put("gl2_test", "127.0.0.1"); Message message = messageFactory.createMessage(fields); List<String> ipFields = engine.getIpAddressFields(message); //without enforcing the Graylog Schema, all but the gl2_* fields should be returned. Assertions.assertEquals(5, ipFields.size()); }
public static String escape(final String raw) { if (raw == null) { return null; } final String escapedCurlyBrackets = CURLY_BRACKET_ESCAPE.matcher(raw).replaceAll("\\\\$1\\}"); return URL_ESCAPE.matcher(escapedCurlyBrackets).replaceAll("\\\\$1"); }
@Test public void shouldBeRobustAtEscaping() { assertThat(MvelHelper.escape(null)).isNull(); assertThat(MvelHelper.escape("")).isEmpty(); assertThat(MvelHelper.escape(" ")).isEqualTo(" "); }
@Override public void putTransient(K key, V value, long ttl, TimeUnit timeunit) { map.putTransient(key, value, ttl, timeunit); }
@Test public void testPutTransient() { adapter.putTransient(42, "value", 1000, TimeUnit.MILLISECONDS); String value = map.get(42); if (value != null) { assertEquals("value", value); sleepMillis(1100); assertNull(map.get(42)); } }
public static String name(String name, String... names) { final StringBuilder builder = new StringBuilder(); append(builder, name); if (names != null) { for (String s : names) { append(builder, s); } } return builder.toString(); }
@Test public void concatenatesClassNamesWithStringsToFormADottedName() { assertThat(name(MetricRegistryTest.class, "one", "two")) .isEqualTo("com.codahale.metrics.MetricRegistryTest.one.two"); }
static void scan(Class<?> aClass, BiConsumer<Method, Annotation> consumer) { // prevent unnecessary checking of Object methods if (Object.class.equals(aClass)) { return; } if (!isInstantiable(aClass)) { return; } for (Method method : safelyGetMethods(aClass)) { scan(consumer, aClass, method); } }
@Test void loadGlue_fails_when_class_is_not_method_declaring_class() { InvalidMethodException exception = assertThrows(InvalidMethodException.class, () -> MethodScanner.scan(ExtendedSteps.class, backend)); assertThat(exception.getMessage(), is( "You're not allowed to extend classes that define Step Definitions or hooks. " + "class io.cucumber.java.MethodScannerTest$ExtendedSteps extends class io.cucumber.java.MethodScannerTest$BaseSteps")); }
public static void doRegister(final String json, final String url, final String type, final String accessToken) throws IOException { if (StringUtils.isBlank(accessToken)) { LOGGER.error("{} client register error accessToken is null, please check the config : {} ", type, json); return; } Headers headers = new Headers.Builder().add(Constants.X_ACCESS_TOKEN, accessToken).build(); String result = OkHttpTools.getInstance().post(url, json, headers); if (Objects.equals(SUCCESS, result)) { LOGGER.info("{} client register success: {} ", type, json); } else { LOGGER.error("{} client register error: {} ", type, json); } }
@Test public void testDoRegisterWhenError() throws IOException { when(okHttpTools.post(url, json)).thenReturn("Error parameter!"); Headers headers = new Headers.Builder().add(Constants.X_ACCESS_TOKEN, accessToken).build(); when(okHttpTools.post(url, json, headers)).thenReturn("Error parameter!"); try (MockedStatic<OkHttpTools> okHttpToolsMockedStatic = mockStatic(OkHttpTools.class)) { okHttpToolsMockedStatic.when(OkHttpTools::getInstance).thenReturn(okHttpTools); RegisterUtils.doRegister(json, url, RegisterTypeEnum.DUBBO.getName()); verify(okHttpTools, times(1)).post(eq(url), eq(json)); RegisterUtils.doRegister(json, url, RegisterTypeEnum.DUBBO.getName(), accessToken); verify(okHttpTools, times(1)).post(eq(url), eq(json)); RegisterUtils.doRegister(json, url, RegisterTypeEnum.DUBBO.getName(), null); } }
@SuppressWarnings("unchecked") public static <S, F> S visit(final Schema schema, final Visitor<S, F> visitor) { final BiFunction<Visitor<?, ?>, Schema, Object> handler = HANDLER.get(schema.type()); if (handler == null) { throw new UnsupportedOperationException("Unsupported schema type: " + schema.type()); } return (S) handler.apply(visitor, schema); }
@Test public void shouldThrowByDefaultFromStructured() { // Given: visitor = new Visitor<String, Integer>() { @Override public String visitPrimitive(final Schema schema) { return null; } }; structuredSchemas().forEach(schema -> { try { // When: SchemaWalker.visit(schema, visitor); fail(); } catch (final UnsupportedOperationException e) { // Then: assertThat(e.getMessage(), is("Unsupported schema type: " + schema)); } }); }
public static Builder builder() { return new Builder(); }
@Test public void testBuilder_missingValues() throws CacheDirectoryCreationException { // Target image is missing try { BuildContext.builder() .setBaseImageConfiguration( ImageConfiguration.builder(Mockito.mock(ImageReference.class)).build()) .setBaseImageLayersCacheDirectory(Paths.get("ignored")) .setContainerConfiguration(ContainerConfiguration.builder().build()) .setApplicationLayersCacheDirectory(Paths.get("ignored")) .build(); Assert.fail("BuildContext should not be built with missing values"); } catch (IllegalStateException ex) { Assert.assertEquals("target image configuration is required but not set", ex.getMessage()); } // Two required fields missing try { BuildContext.builder() .setBaseImageLayersCacheDirectory(Paths.get("ignored")) .setApplicationLayersCacheDirectory(Paths.get("ignored")) .setContainerConfiguration(ContainerConfiguration.builder().build()) .build(); Assert.fail("BuildContext should not be built with missing values"); } catch (IllegalStateException ex) { Assert.assertEquals( "base image configuration and target image configuration are required but not set", ex.getMessage()); } // All required fields missing try { BuildContext.builder().build(); Assert.fail("BuildContext should not be built with missing values"); } catch (IllegalStateException ex) { Assert.assertEquals( "base image configuration, target image configuration, container configuration, base " + "image layers cache directory, and application layers cache directory are required " + "but not set", ex.getMessage()); } }
@Override public PMML_MODEL getPMMLModelType() { return PMML_MODEL_TYPE; }
@Test void getPMMLModelType() { assertThat(PROVIDER.getPMMLModelType()).isEqualTo(PMML_MODEL.MINING_MODEL); }
@Override public String getNString(final int columnIndex) throws SQLException { return getString(columnIndex); }
@Test void assertGetNStringWithColumnIndex() throws SQLException { when(mergeResultSet.getValue(1, String.class)).thenReturn("value"); assertThat(shardingSphereResultSet.getNString(1), is("value")); }
@Override public T getValue() { return value; }
@Test public void newSettableGaugeWithDefaultReturnsDefault() { DefaultSettableGauge<String> gauge = new DefaultSettableGauge<>("default"); assertThat(gauge.getValue()).isEqualTo("default"); }
@Nullable @Override public byte[] chunk(@NonNull final byte[] message, @IntRange(from = 0) final int index, @IntRange(from = 20) final int maxLength) { final int offset = index * maxLength; final int length = Math.min(maxLength, message.length - offset); if (length <= 0) return null; final byte[] data = new byte[length]; System.arraycopy(message, offset, data, 0, length); return data; }
@Test public void chunk_23() { final int MTU = 23; final DefaultMtuSplitter splitter = new DefaultMtuSplitter(); final byte[] result = splitter.chunk(text.getBytes(), 1, MTU - 3); assertArrayEquals(text.substring(MTU - 3, 2 * (MTU - 3)).getBytes(), result); }
public static BasicAuthorizationHeader fromString(final String header) throws InvalidAuthorizationHeaderException { try { if (StringUtils.isBlank(header)) { throw new InvalidAuthorizationHeaderException("Blank header"); } final int spaceIndex = header.indexOf(' '); if (spaceIndex == -1) { throw new InvalidAuthorizationHeaderException("Invalid authorization header: " + header); } final String authorizationType = header.substring(0, spaceIndex); if (!"Basic".equals(authorizationType)) { throw new InvalidAuthorizationHeaderException("Unsupported authorization method: " + authorizationType); } final String credentials; try { credentials = new String(Base64.getDecoder().decode(header.substring(spaceIndex + 1))); } catch (final IndexOutOfBoundsException e) { throw new InvalidAuthorizationHeaderException("Missing credentials"); } if (StringUtils.isEmpty(credentials)) { throw new InvalidAuthorizationHeaderException("Bad decoded value: " + credentials); } final int credentialSeparatorIndex = credentials.indexOf(':'); if (credentialSeparatorIndex == -1) { throw new InvalidAuthorizationHeaderException("Badly-formatted credentials: " + credentials); } final String usernameComponent = credentials.substring(0, credentialSeparatorIndex); final String username; final byte deviceId; { final Pair<String, Byte> identifierAndDeviceId = AccountAuthenticator.getIdentifierAndDeviceId(usernameComponent); username = identifierAndDeviceId.first(); deviceId = identifierAndDeviceId.second(); } final String password = credentials.substring(credentialSeparatorIndex + 1); if (StringUtils.isAnyBlank(username, password)) { throw new InvalidAuthorizationHeaderException("Username or password were blank"); } return new BasicAuthorizationHeader(username, deviceId, password); } catch (final IllegalArgumentException | IndexOutOfBoundsException e) { throw new InvalidAuthorizationHeaderException(e); } }
@Test void fromString() throws InvalidAuthorizationHeaderException { { final BasicAuthorizationHeader header = BasicAuthorizationHeader.fromString("Basic YWxhZGRpbjpvcGVuc2VzYW1l"); assertEquals("aladdin", header.getUsername()); assertEquals("opensesame", header.getPassword()); assertEquals(Device.PRIMARY_ID, header.getDeviceId()); } { final BasicAuthorizationHeader header = BasicAuthorizationHeader.fromString("Basic " + Base64.getEncoder().encodeToString("username.7:password".getBytes(StandardCharsets.UTF_8))); assertEquals("username", header.getUsername()); assertEquals("password", header.getPassword()); assertEquals(7, header.getDeviceId()); } }
public PrivateKey convertPrivateKey(final String privatePemKey) { StringReader keyReader = new StringReader(privatePemKey); try { PrivateKeyInfo privateKeyInfo = PrivateKeyInfo .getInstance(new PEMParser(keyReader).readObject()); return new JcaPEMKeyConverter().getPrivateKey(privateKeyInfo); } catch (IOException exception) { throw new RuntimeException(exception); } }
@Test void givenMalformedPrivateKey_whenConvertPrivateKey_thenThrowRuntimeException() { // Given String malformedPrivatePemKey = "-----BEGIN PRIVATE KEY-----\n" + "malformedkey\n" + "-----END PRIVATE KEY-----"; // When & Then assertThatThrownBy(() -> KeyConverter.convertPrivateKey(malformedPrivatePemKey)) .isInstanceOf(RuntimeException.class) .hasCauseInstanceOf(PEMException.class) .hasMessageContaining("PEMException"); }
@Override public List<ValidationMessage> validate(ValidationContext context) { return context.query().tokens().stream() .filter(this::isInvalidOperator) .map(token -> { final String errorMessage = String.format(Locale.ROOT, "Query contains invalid operator \"%s\". All AND / OR / NOT operators have to be written uppercase", token.image()); return ValidationMessage.builder(ValidationStatus.WARNING, ValidationType.INVALID_OPERATOR) .errorMessage(errorMessage) .relatedProperty(token.image()) .position(QueryPosition.from(token)) .build(); }).collect(Collectors.toList()); }
@Test void testLowercaseNegation() { final ValidationContext context = TestValidationContext.create("not(foo:bar)") .build(); final List<ValidationMessage> messages = sut.validate(context); assertThat(messages.size()).isEqualTo(1); final ValidationMessage message = messages.iterator().next(); assertThat(message.validationType()).isEqualTo(ValidationType.INVALID_OPERATOR); assertThat(message.relatedProperty()).hasValue("not"); }
Object getEventuallyWeightedResult(Object rawObject, MULTIPLE_MODEL_METHOD multipleModelMethod, double weight) { switch (multipleModelMethod) { case MAJORITY_VOTE: case MODEL_CHAIN: case SELECT_ALL: case SELECT_FIRST: return rawObject; case MAX: case SUM: case MEDIAN: case AVERAGE: case WEIGHTED_SUM: case WEIGHTED_MEDIAN: case WEIGHTED_AVERAGE: if (!(rawObject instanceof Number)) { throw new KiePMMLException("Expected a number, retrieved " + rawObject.getClass().getName()); } return new KiePMMLValueWeight(((Number) rawObject).doubleValue(), weight); case WEIGHTED_MAJORITY_VOTE: throw new KiePMMLException(multipleModelMethod + " not implemented, yet"); default: throw new KiePMMLException("Unrecognized MULTIPLE_MODEL_METHOD " + multipleModelMethod); } }
@Test void getEventuallyWeightedResultValueWeightNumber() { final Integer rawObject = 24; final double weight = 2.23; VALUE_WEIGHT_METHODS.forEach(multipleModelMethod -> { Object retrieved = evaluator.getEventuallyWeightedResult(rawObject, multipleModelMethod, weight); assertThat(retrieved).isNotNull(); assertThat(retrieved).isInstanceOf(KiePMMLValueWeight.class); KiePMMLValueWeight kiePMMLValueWeight = (KiePMMLValueWeight) retrieved; assertThat(kiePMMLValueWeight.getValue()).isCloseTo(rawObject.doubleValue(), Offset.offset(0.0)); assertThat(kiePMMLValueWeight.getWeight()).isCloseTo(weight, Offset.offset(0.0)); }); }
public static Bech32Data decode(final String str) throws AddressFormatException { boolean lower = false, upper = false; if (str.length() < 8) throw new AddressFormatException.InvalidDataLength("Input too short: " + str.length()); if (str.length() > 90) throw new AddressFormatException.InvalidDataLength("Input too long: " + str.length()); for (int i = 0; i < str.length(); ++i) { char c = str.charAt(i); if (c < 33 || c > 126) throw new AddressFormatException.InvalidCharacter(c, i); if (c >= 'a' && c <= 'z') { if (upper) throw new AddressFormatException.InvalidCharacter(c, i); lower = true; } if (c >= 'A' && c <= 'Z') { if (lower) throw new AddressFormatException.InvalidCharacter(c, i); upper = true; } } final int pos = str.lastIndexOf('1'); if (pos < 1) throw new AddressFormatException.InvalidPrefix("Missing human-readable part"); final int dataPartLength = str.length() - 1 - pos; if (dataPartLength < 6) throw new AddressFormatException.InvalidDataLength("Data part too short: " + dataPartLength); byte[] values = new byte[dataPartLength]; for (int i = 0; i < dataPartLength; ++i) { char c = str.charAt(i + pos + 1); if (CHARSET_REV[c] == -1) throw new AddressFormatException.InvalidCharacter(c, i + pos + 1); values[i] = CHARSET_REV[c]; } String hrp = str.substring(0, pos).toLowerCase(Locale.ROOT); Encoding encoding = verifyChecksum(hrp, values); if (encoding == null) throw new AddressFormatException.InvalidChecksum(); return new Bech32Data(encoding, hrp, Arrays.copyOfRange(values, 0, values.length - 6)); }
@Test(expected = AddressFormatException.InvalidChecksum.class) public void decode_invalidNetwork() { Bech32.decode("A12UEL5X"); }
public static String truncateUtf8(String str, int maxBytes) { Charset charset = StandardCharsets.UTF_8; //UTF-8编码单个字符最大长度4 return truncateByByteLength(str, charset, maxBytes, 4, true); }
@Test public void truncateUtf8Test() { final String str = "这是This一段中英文"; String ret = StrUtil.truncateUtf8(str, 12); assertEquals("这是Thi...", ret); ret = StrUtil.truncateUtf8(str, 13); assertEquals("这是This...", ret); ret = StrUtil.truncateUtf8(str, 14); assertEquals("这是This...", ret); ret = StrUtil.truncateUtf8(str, 999); assertEquals(str, ret); }
@PublicAPI(usage = ACCESS) public static <T extends Comparable<T>> DescribedPredicate<T> greaterThanOrEqualTo(T value) { return new GreaterThanOrEqualToPredicate<>(value); }
@Test public void greaterThanOrEqualTo_works() { assertThat(greaterThanOrEqualTo(5)) .accepts(6) .hasDescription("greater than or equal to '5'") .accepts(5) .rejects(4); assertThat(greaterThanOrEqualTo(Foo.SECOND)) .rejects(Foo.FIRST) .accepts(Foo.SECOND) .accepts(Foo.THIRD); }
public RouteResult<T> route(HttpMethod method, String path) { return route(method, path, Collections.emptyMap()); }
@Test void testIgnoreSlashesAtBothEnds() { assertThat(router.route(GET, "articles").target()).isEqualTo("index"); assertThat(router.route(GET, "/articles").target()).isEqualTo("index"); assertThat(router.route(GET, "//articles").target()).isEqualTo("index"); assertThat(router.route(GET, "articles/").target()).isEqualTo("index"); assertThat(router.route(GET, "articles//").target()).isEqualTo("index"); assertThat(router.route(GET, "/articles/").target()).isEqualTo("index"); assertThat(router.route(GET, "//articles//").target()).isEqualTo("index"); }
public static Request.Builder buildRequestBuilder(final String url, final Map<String, ?> form, final HTTPMethod method) { switch (method) { case GET: return new Request.Builder() .url(buildHttpUrl(url, form)) .get(); case HEAD: return new Request.Builder() .url(buildHttpUrl(url, form)) .head(); case PUT: return new Request.Builder() .url(buildHttpUrl(url)) .put(buildFormBody(form)); case DELETE: return new Request.Builder() .url(buildHttpUrl(url)) .delete(buildFormBody(form)); default: return new Request.Builder() .url(buildHttpUrl(url)) .post(buildFormBody(form)); } }
@Test public void buildRequestBuilderForDELETETest() { Request.Builder builder = HttpUtils.buildRequestBuilder(TEST_URL, formMap, HttpUtils.HTTPMethod.DELETE); Assert.assertNotNull(builder); Assert.assertNotNull(builder.build().body()); Assert.assertEquals(builder.build().method(), HttpUtils.HTTPMethod.DELETE.value()); Assert.assertEquals(builder.build().url().toString(), TEST_URL); }
@VisibleForTesting void put(DirectoryEntry entry) { put(entry, false); }
@Test public void testPutEntryForExistingNameIsIllegal() { dir.put(entry("foo")); try { dir.put(entry("foo")); fail(); } catch (IllegalArgumentException expected) { } }
@Override public void enableAutoTrack(List<AutoTrackEventType> eventTypeList) { }
@Test public void testEnableAutoTrack() { List<SensorsDataAPI.AutoTrackEventType> types = new ArrayList<>(); types.add(SensorsDataAPI.AutoTrackEventType.APP_START); types.add(SensorsDataAPI.AutoTrackEventType.APP_END); mSensorsAPI.enableAutoTrack(types); Assert.assertFalse(mSensorsAPI.isAutoTrackEnabled()); }
@Override @SuppressWarnings("unchecked") public TypeSerializer<T> restoreSerializer() { final int numFields = snapshotData.getFieldSerializerSnapshots().size(); final ArrayList<Field> restoredFields = new ArrayList<>(numFields); final ArrayList<TypeSerializer<?>> restoredFieldSerializers = new ArrayList<>(numFields); snapshotData .getFieldSerializerSnapshots() .forEach( (fieldName, field, fieldSerializerSnapshot) -> { restoredFields.add(field); checkState( fieldSerializerSnapshot != null, "field serializer snapshots should be present."); restoredFieldSerializers.add( fieldSerializerSnapshot.restoreSerializer()); }); final LinkedHashMap<Class<?>, TypeSerializer<?>> registeredSubclassSerializers = restoreSerializers( snapshotData.getRegisteredSubclassSerializerSnapshots().unwrapOptionals()); final Tuple2<LinkedHashMap<Class<?>, Integer>, TypeSerializer<Object>[]> decomposedSubclassSerializerRegistry = decomposeSubclassSerializerRegistry(registeredSubclassSerializers); final LinkedHashMap<Class<?>, TypeSerializer<?>> nonRegisteredSubclassSerializers = restoreSerializers( snapshotData .getNonRegisteredSubclassSerializerSnapshots() .unwrapOptionals()); return new PojoSerializer<>( snapshotData.getPojoClass(), restoredFields.toArray(new Field[numFields]), restoredFieldSerializers.toArray(new TypeSerializer[numFields]), decomposedSubclassSerializerRegistry.f0, decomposedSubclassSerializerRegistry.f1, nonRegisteredSubclassSerializers, new SerializerConfigImpl()); }
@Test void testRestoreSerializerWithNewFields() { final PojoSerializerSnapshot<TestPojo> testSnapshot = buildTestSnapshot(Collections.singletonList(HEIGHT_FIELD)); final TypeSerializer<TestPojo> restoredSerializer = testSnapshot.restoreSerializer(); assertThat(restoredSerializer).isInstanceOf(PojoSerializer.class); final PojoSerializer<TestPojo> restoredPojoSerializer = (PojoSerializer<TestPojo>) restoredSerializer; final Field[] restoredFields = restoredPojoSerializer.getFields(); assertThat(restoredFields).containsExactly(HEIGHT_FIELD.field); final TypeSerializer<?>[] restoredFieldSerializers = restoredPojoSerializer.getFieldSerializers(); assertThat(restoredFieldSerializers).containsExactly(DoubleSerializer.INSTANCE); }
@VisibleForTesting void clearStartTimeCache() { startTimeWriteCache.clear(); startTimeReadCache.clear(); }
@Test public void testGetSingleEntity() throws IOException { super.testGetSingleEntity(); ((LeveldbTimelineStore)store).clearStartTimeCache(); super.testGetSingleEntity(); loadTestEntityData(); }
boolean handleCorruption(final Set<TaskId> corruptedTasks) { final Set<TaskId> activeTasks = new HashSet<>(tasks.activeTaskIds()); // We need to stop all processing, since we need to commit non-corrupted tasks as well. maybeLockTasks(activeTasks); final Set<Task> corruptedActiveTasks = new HashSet<>(); final Set<Task> corruptedStandbyTasks = new HashSet<>(); for (final TaskId taskId : corruptedTasks) { final Task task = tasks.task(taskId); if (task.isActive()) { corruptedActiveTasks.add(task); } else { corruptedStandbyTasks.add(task); } } // Make sure to clean up any corrupted standby tasks in their entirety before committing // since TaskMigrated can be thrown and the resulting handleLostAll will only clean up active tasks closeDirtyAndRevive(corruptedStandbyTasks, true); // We need to commit before closing the corrupted active tasks since this will force the ongoing txn to abort try { final Collection<Task> tasksToCommit = tasks.allTasksPerId() .values() .stream() .filter(t -> t.state() == Task.State.RUNNING) .filter(t -> !corruptedTasks.contains(t.id())) .collect(Collectors.toSet()); commitTasksAndMaybeUpdateCommittableOffsets(tasksToCommit, new HashMap<>()); } catch (final TaskCorruptedException e) { log.info("Some additional tasks were found corrupted while trying to commit, these will be added to the " + "tasks to clean and revive: {}", e.corruptedTasks()); corruptedActiveTasks.addAll(tasks.tasks(e.corruptedTasks())); } catch (final TimeoutException e) { log.info("Hit TimeoutException when committing all non-corrupted tasks, these will be closed and revived"); final Collection<Task> uncorruptedTasks = new HashSet<>(tasks.activeTasks()); uncorruptedTasks.removeAll(corruptedActiveTasks); // Those tasks which just timed out can just be closed dirty without marking changelogs as corrupted closeDirtyAndRevive(uncorruptedTasks, false); } closeDirtyAndRevive(corruptedActiveTasks, true); maybeUnlockTasks(activeTasks); return !corruptedActiveTasks.isEmpty(); }
@Test public void shouldNotCommitNonCorruptedRestoringActiveTasksAndNotCommitRunningStandbyTasksWithStateUpdaterEnabled() { final StreamTask activeRestoringTask = statefulTask(taskId00, taskId00ChangelogPartitions) .withInputPartitions(taskId00Partitions) .inState(State.RESTORING).build(); final StandbyTask standbyTask = standbyTask(taskId01, taskId01ChangelogPartitions) .withInputPartitions(taskId01Partitions) .inState(State.RUNNING).build(); final StreamTask corruptedTask = statefulTask(taskId02, taskId02ChangelogPartitions) .withInputPartitions(taskId02Partitions) .inState(State.RUNNING).build(); final TasksRegistry tasks = mock(TasksRegistry.class); when(tasks.allTasksPerId()).thenReturn(mkMap(mkEntry(taskId02, corruptedTask))); when(tasks.task(taskId02)).thenReturn(corruptedTask); final TaskManager taskManager = setUpTaskManager(ProcessingMode.AT_LEAST_ONCE, tasks, true); when(consumer.assignment()).thenReturn(intersection(HashSet::new, taskId00Partitions, taskId01Partitions, taskId02Partitions)); taskManager.handleCorruption(mkSet(taskId02)); verify(activeRestoringTask, never()).commitNeeded(); verify(activeRestoringTask, never()).prepareCommit(); verify(activeRestoringTask, never()).postCommit(anyBoolean()); verify(standbyTask, never()).commitNeeded(); verify(standbyTask, never()).prepareCommit(); verify(standbyTask, never()).postCommit(anyBoolean()); }
void setRequestPath(ServletRequest req, final String destinationPath) { if (req instanceof AwsProxyHttpServletRequest) { ((AwsProxyHttpServletRequest) req).getAwsProxyRequest().setPath(dispatchTo); return; } if (req instanceof AwsHttpApiV2ProxyHttpServletRequest) { ((AwsHttpApiV2ProxyHttpServletRequest) req).getRequest().setRawPath(destinationPath); return; } log.debug("Request is not an proxy request generated by this library, attempting to extract the proxy event type from the request attributes"); if (req.getAttribute(API_GATEWAY_EVENT_PROPERTY) != null && req.getAttribute(API_GATEWAY_EVENT_PROPERTY) instanceof AwsProxyRequest) { ((AwsProxyRequest)req.getAttribute(API_GATEWAY_EVENT_PROPERTY)).setPath(dispatchTo); return; } if (req.getAttribute(HTTP_API_EVENT_PROPERTY) != null && req.getAttribute(HTTP_API_EVENT_PROPERTY) instanceof HttpApiV2ProxyRequest) { ((HttpApiV2ProxyRequest)req.getAttribute(HTTP_API_EVENT_PROPERTY)).setRawPath(destinationPath); return; } throw new IllegalStateException("Could not set new target path for the given ServletRequest object"); }
@Test void setPathForWrappedRequestWithoutGatewayEvent_forwardByPath_throwsException() { AwsProxyRequest proxyRequest = new AwsProxyRequestBuilder("/hello", "GET").build(); AwsProxyHttpServletRequest servletRequest = new AwsProxyHttpServletRequest(proxyRequest, new MockLambdaContext(), null); SecurityContextHolderAwareRequestWrapper springSecurityRequest = new SecurityContextHolderAwareRequestWrapper(servletRequest, "ADMIN"); AwsProxyRequestDispatcher dispatcher = new AwsProxyRequestDispatcher(FORWARD_PATH, false, null); try { dispatcher.setRequestPath(springSecurityRequest, FORWARD_PATH); } catch (Exception e) { assertTrue(e instanceof IllegalStateException); return; } fail(); }
@Override public void updateTableSchema(String tableName, MessageType schema, SchemaDifference schemaDifference) { try (RestEmitter emitter = config.getRestEmitter()) { DatahubResponseLogger responseLogger = new DatahubResponseLogger(); MetadataChangeProposalWrapper schemaChange = createSchemaMetadataUpdate(tableName); emitter.emit(schemaChange, responseLogger).get(); // When updating an entity, it is necessary to set its soft-delete status to false, or else the update won't get // reflected in the UI. MetadataChangeProposalWrapper softDeleteUndoProposal = createUndoSoftDelete(); emitter.emit(softDeleteUndoProposal, responseLogger).get(); } catch (Exception e) { throw new HoodieDataHubSyncException("Fail to change schema for Dataset " + datasetUrn, e); } }
@Test public void testUpdateTableSchemaInvokesRestEmitter() throws IOException { Properties props = new Properties(); props.put(META_SYNC_PARTITION_EXTRACTOR_CLASS.key(), DummyPartitionValueExtractor.class.getName()); props.put(META_SYNC_BASE_PATH.key(), tableBasePath); Mockito.when( restEmitterMock.emit(any(MetadataChangeProposalWrapper.class), Mockito.any()) ).thenReturn( CompletableFuture.completedFuture(MetadataWriteResponse.builder().build()) ); DatahubSyncConfigStub configStub = new DatahubSyncConfigStub(props, restEmitterMock); DataHubSyncClientStub dhClient = new DataHubSyncClientStub(configStub); dhClient.updateTableSchema("some_table", null, null); verify(restEmitterMock, times(2)).emit(any(MetadataChangeProposalWrapper.class), Mockito.any()); }
public <OutputT extends @NonNull Object> CsvIOParse<T> withCustomRecordParsing( String fieldName, SerializableFunction<String, OutputT> customRecordParsingFn) { Map<String, SerializableFunction<String, Object>> customProcessingMap = getConfigBuilder().getOrCreateCustomProcessingMap(); customProcessingMap.put(fieldName, customRecordParsingFn::apply); getConfigBuilder().setCustomProcessingMap(customProcessingMap); return this; }
@Test public void givenSingleCustomParsingLambda_parsesPOJOs() { PCollection<String> records = csvRecords( pipeline, "instant,instantList", "2024-01-23T10:00:05.000Z,10-00-05-2024-01-23;12-59-59-2024-01-24"); TimeContaining want = timeContaining( Instant.parse("2024-01-23T10:00:05.000Z"), Arrays.asList( Instant.parse("2024-01-23T10:00:05.000Z"), Instant.parse("2024-01-24T12:59:59.000Z"))); CsvIOParse<TimeContaining> underTest = underTest( TIME_CONTAINING_SCHEMA, CSVFormat.DEFAULT .withHeader("instant", "instantList") .withAllowDuplicateHeaderNames(false), new HashMap<>(), timeContainingFromRowFn(), TIME_CONTAINING_CODER) .withCustomRecordParsing("instantList", instantListParsingLambda()); CsvIOParseResult<TimeContaining> result = records.apply(underTest); PAssert.that(result.getOutput()).containsInAnyOrder(want); PAssert.that(result.getErrors()).empty(); pipeline.run(); }
@Override public OAuth2AccessTokenDO getAccessToken(String accessToken) { // 优先从 Redis 中获取 OAuth2AccessTokenDO accessTokenDO = oauth2AccessTokenRedisDAO.get(accessToken); if (accessTokenDO != null) { return accessTokenDO; } // 获取不到,从 MySQL 中获取 accessTokenDO = oauth2AccessTokenMapper.selectByAccessToken(accessToken); // 如果在 MySQL 存在,则往 Redis 中写入 if (accessTokenDO != null && !DateUtils.isExpired(accessTokenDO.getExpiresTime())) { oauth2AccessTokenRedisDAO.set(accessTokenDO); } return accessTokenDO; }
@Test public void testGetAccessToken() { // mock 数据(访问令牌) OAuth2AccessTokenDO accessTokenDO = randomPojo(OAuth2AccessTokenDO.class) .setExpiresTime(LocalDateTime.now().plusDays(1)); oauth2AccessTokenMapper.insert(accessTokenDO); // 准备参数 String accessToken = accessTokenDO.getAccessToken(); // 调用 OAuth2AccessTokenDO result = oauth2TokenService.getAccessToken(accessToken); // 断言 assertPojoEquals(accessTokenDO, result, "createTime", "updateTime", "deleted", "creator", "updater"); assertPojoEquals(accessTokenDO, oauth2AccessTokenRedisDAO.get(accessToken), "createTime", "updateTime", "deleted", "creator", "updater"); }
@Override public Optional<ReadError> read(DbFileSources.Line.Builder lineBuilder) { if (readError == null) { try { processSymbols(lineBuilder); } catch (RangeOffsetConverter.RangeOffsetConverterException e) { readError = new ReadError(Data.SYMBOLS, lineBuilder.getLine()); LOG.warn(format("Inconsistency detected in Symbols data. Symbols will be ignored for file '%s'", file.getKey()), e); } } return Optional.ofNullable(readError); }
@Test public void symbol_declaration_should_be_sorted_by_line() { SymbolsLineReader symbolsLineReader = newReader( newSymbol( newSingleLineTextRangeWithExpectedLabel(LINE_2, OFFSET_0, OFFSET_1, RANGE_LABEL_1), newSingleLineTextRangeWithExpectedLabel(LINE_3, OFFSET_2, OFFSET_3, RANGE_LABEL_2)), newSymbol( newSingleLineTextRangeWithExpectedLabel(LINE_1, OFFSET_0, OFFSET_1, RANGE_LABEL_1), newSingleLineTextRangeWithExpectedLabel(LINE_3, OFFSET_0, OFFSET_1, RANGE_LABEL_1))); assertThat(symbolsLineReader.read(line1)).isEmpty(); symbolsLineReader.read(line2); symbolsLineReader.read(line3); assertThat(line1.getSymbols()).isEqualTo(RANGE_LABEL_1 + ",1"); assertThat(line2.getSymbols()).isEqualTo(RANGE_LABEL_1 + ",2"); assertThat(line3.getSymbols()).isEqualTo(RANGE_LABEL_1 + ",1;" + RANGE_LABEL_2 + ",2"); }
@Override public void roleChanged(DeviceId deviceId, MastershipRole newRole) { switch (newRole) { case MASTER: controller.setRole(dpid(deviceId.uri()), RoleState.MASTER); break; case STANDBY: controller.setRole(dpid(deviceId.uri()), RoleState.EQUAL); break; case NONE: controller.setRole(dpid(deviceId.uri()), RoleState.SLAVE); break; default: LOG.error("Unknown Mastership state : {}", newRole); } LOG.debug("Accepting mastership role change to {} for device {}", newRole, deviceId); }
@Test public void roleChanged() { provider.roleChanged(DID1, MASTER); assertEquals("Should be MASTER", RoleState.MASTER, controller.roleMap.get(DPID1)); provider.roleChanged(DID1, STANDBY); assertEquals("Should be EQUAL", RoleState.EQUAL, controller.roleMap.get(DPID1)); provider.roleChanged(DID1, NONE); assertEquals("Should be SLAVE", RoleState.SLAVE, controller.roleMap.get(DPID1)); }
public static boolean matchIpRange(String pattern, String host, int port) throws UnknownHostException { if (pattern == null || host == null) { throw new IllegalArgumentException( "Illegal Argument pattern or hostName. Pattern:" + pattern + ", Host:" + host); } pattern = pattern.trim(); if ("*.*.*.*".equals(pattern) || "*".equals(pattern)) { return true; } InetAddress inetAddress = InetAddress.getByName(host); boolean isIpv4 = isValidV4Address(inetAddress); String[] hostAndPort = getPatternHostAndPort(pattern, isIpv4); if (hostAndPort[1] != null && !hostAndPort[1].equals(String.valueOf(port))) { return false; } pattern = hostAndPort[0]; String splitCharacter = SPLIT_IPV4_CHARACTER; if (!isIpv4) { splitCharacter = SPLIT_IPV6_CHARACTER; } String[] mask = pattern.split(splitCharacter); // check format of pattern checkHostPattern(pattern, mask, isIpv4); host = inetAddress.getHostAddress(); if (pattern.equals(host)) { return true; } // short name condition if (!ipPatternContainExpression(pattern)) { InetAddress patternAddress = InetAddress.getByName(pattern); return patternAddress.getHostAddress().equals(host); } String[] ipAddress = host.split(splitCharacter); for (int i = 0; i < mask.length; i++) { if ("*".equals(mask[i]) || mask[i].equals(ipAddress[i])) { continue; } else if (mask[i].contains("-")) { String[] rangeNumStrs = StringUtils.split(mask[i], '-'); if (rangeNumStrs.length != 2) { throw new IllegalArgumentException("There is wrong format of ip Address: " + mask[i]); } Integer min = getNumOfIpSegment(rangeNumStrs[0], isIpv4); Integer max = getNumOfIpSegment(rangeNumStrs[1], isIpv4); Integer ip = getNumOfIpSegment(ipAddress[i], isIpv4); if (ip < min || ip > max) { return false; } } else if ("0".equals(ipAddress[i]) && ("0".equals(mask[i]) || "00".equals(mask[i]) || "000".equals(mask[i]) || "0000".equals(mask[i]))) { continue; } else if (!mask[i].equals(ipAddress[i])) { return false; } } return true; }
@Test void testMatchIpv6WithIpPort() throws UnknownHostException { assertTrue(NetUtils.matchIpRange("[234e:0:4567::3d:ee]", "234e:0:4567::3d:ee", 8090)); assertTrue(NetUtils.matchIpRange("[234e:0:4567:0:0:0:3d:ee]", "234e:0:4567::3d:ee", 8090)); assertTrue(NetUtils.matchIpRange("[234e:0:4567:0:0:0:3d:ee]:8090", "234e:0:4567::3d:ee", 8090)); assertTrue(NetUtils.matchIpRange("[234e:0:4567:0:0:0:3d:0-ee]:8090", "234e:0:4567::3d:ee", 8090)); assertTrue(NetUtils.matchIpRange("[234e:0:4567:0:0:0:3d:ee-ff]:8090", "234e:0:4567::3d:ee", 8090)); assertTrue(NetUtils.matchIpRange("[234e:0:4567:0:0:0:3d:*]:90", "234e:0:4567::3d:ff", 90)); assertFalse(NetUtils.matchIpRange("[234e:0:4567:0:0:0:3d:ee]:7289", "234e:0:4567::3d:ee", 8090)); assertFalse(NetUtils.matchIpRange("[234e:0:4567:0:0:0:3d:ee-ff]:8090", "234e:0:4567::3d:ee", 9090)); }
public Site addCookie(String name, String value) { defaultCookies.put(name, value); return this; }
@Test public void addCookieTest(){ Site site=Site.me().setDefaultCharset(StandardCharsets.UTF_8.name()); site.addCookie("cookieDefault","cookie-webmagicDefault"); String firstDomain="example.com"; String secondDomain="exampleCopy.com"; site.addCookie(firstDomain, "cookie", "cookie-webmagic"); site.addCookie(firstDomain, "cookieCopy", "cookie-webmagicCopy"); site.addCookie(secondDomain, "cookie", "cookie-webmagic"); Map<String, Map<String, String>> allCookies = site.getAllCookies(); List<String> domains=new ArrayList<>(); for(String key : allCookies.keySet()){ domains.add(key); } assertEquals("cookie-webmagic", allCookies.get(firstDomain).get("cookie")); assertEquals("cookie-webmagicCopy", allCookies.get(firstDomain).get("cookieCopy")); assertEquals("cookie-webmagic", allCookies.get(secondDomain).get("cookie")); assertEquals(2, domains.size()); }
Optional<TextRange> mapRegion(@Nullable Region region, InputFile file) { if (region == null) { return Optional.empty(); } int startLine = Objects.requireNonNull(region.getStartLine(), "No start line defined for the region."); int endLine = Optional.ofNullable(region.getEndLine()).orElse(startLine); int startColumn = Optional.ofNullable(region.getStartColumn()).map(RegionMapper::adjustSarifColumnIndexToSqIndex).orElse(0); int endColumn = Optional.ofNullable(region.getEndColumn()).map(RegionMapper::adjustSarifColumnIndexToSqIndex) .orElseGet(() -> file.selectLine(endLine).end().lineOffset()); if (rangeIsEmpty(startLine, endLine, startColumn, endColumn)) { return Optional.of(file.selectLine(startLine)); } else { return Optional.of(file.newRange(startLine, startColumn, endLine, endColumn)); } }
@Test public void mapRegion_whenStartEndLinesDefined() { Region fullRegion = mockRegion(null, null, 3, 8); Optional<TextRange> optTextRange = regionMapper.mapRegion(fullRegion, INPUT_FILE); assertThat(optTextRange).isPresent(); TextRange textRange = optTextRange.get(); assertThat(textRange.start().line()).isEqualTo(fullRegion.getStartLine()); assertThat(textRange.start().lineOffset()).isZero(); assertThat(textRange.end().line()).isEqualTo(fullRegion.getEndLine()); assertThat(textRange.end().lineOffset()).isEqualTo(LINE_END_OFFSET); }
public void verifyPassword(AttendeePassword other) { this.password.verifyPassword(other); }
@DisplayName("참가자의 비밀번호가 일치하지 않으면 예외를 발생시킨다.") @Test void throwsExceptionIfPasswordDoesNotMatch() { Meeting meeting = MeetingFixture.DINNER.create(); Attendee attendee = new Attendee(meeting, "jazz", "1111", Role.GUEST); AttendeePassword other = new AttendeePassword("1234"); assertThatThrownBy(() -> attendee.verifyPassword(other)) .isInstanceOf(MomoException.class) .hasMessage(AttendeeErrorCode.PASSWORD_MISMATCHED.message()); }
static ParseResult parse(String expression, NameValidator validator, ClassHelper helper) { ParseResult result = new ParseResult(); try { Parser parser = new Parser(new Scanner("ignore", new StringReader(expression))); Java.Atom atom = parser.parseConditionalExpression(); // after parsing the expression the input should end (otherwise it is not "simple") if (parser.peek().type == TokenType.END_OF_INPUT) { result.guessedVariables = new LinkedHashSet<>(); ConditionalExpressionVisitor visitor = new ConditionalExpressionVisitor(result, validator, helper); result.ok = atom.accept(visitor); result.invalidMessage = visitor.invalidMessage; if (result.ok) { result.converted = new StringBuilder(expression.length()); int start = 0; for (Replacement replace : visitor.replacements.values()) { result.converted.append(expression, start, replace.start).append(replace.newString); start = replace.start + replace.oldLength; } result.converted.append(expression.substring(start)); } } } catch (Exception ex) { } return result; }
@Test public void protectUsFromStuff() { NameValidator allNamesInvalid = s -> false; for (String toParse : Arrays.asList( "", "new Object()", "java.lang.Object", "Test.class", "new Object(){}.toString().length", "{ 5}", "{ 5, 7 }", "Object.class", "System.out.println(\"\")", "something.newInstance()", "e.getClass ( )", "edge.getDistance()*7/*test", "edge.getDistance()//*test", "edge . getClass()", "(edge = edge) == edge", ") edge (", "in(area_blup(), edge)", "s -> truevalue")) { ParseResult res = parse(toParse, allNamesInvalid, k -> ""); assertFalse(res.ok, "should not be simple condition: " + toParse); assertTrue(res.guessedVariables == null || res.guessedVariables.isEmpty()); } assertFalse(parse("edge; getClass()", allNamesInvalid, k -> "").ok); }
@Override protected Mono<Void> doFilter(final ServerWebExchange exchange) { return dispatcherHandler.handle(exchange); }
@Test public void testDoFilter() { ServerWebExchange webExchange = MockServerWebExchange.from(MockServerHttpRequest .post("http://localhost:8080/")); Mono<Void> filter = fallbackFilter.doFilter(webExchange); StepVerifier.create(filter).verifyComplete(); }
public static String calculateSha256Hex(@Nonnull byte[] data) throws NoSuchAlgorithmException { return calculateSha256Hex(data, data.length); }
@Test public void test_exception_whenDirectory() { Path path = Paths.get("src", "test", "resources"); assertThrows(IOException.class, () -> Sha256Util.calculateSha256Hex(path)); }
@Override public boolean hasContentLength() { switch (super.getVersion()) { case DEFAULT_VERSION: return true; default: return true; } }
@Test public void testHasContentLength() { assertTrue(verDefault.hasContentLength()); assertTrue(verCurrent.hasContentLength()); }
@Override public Buffer allocate() { return allocate(this.pageSize); }
@Test public void testDoubleRelease() throws Exception { final PooledBufferAllocatorImpl allocator = new PooledBufferAllocatorImpl(4096); final Buffer buffer = allocator.allocate(10000); buffer.release(); buffer.release(); // To printStackTrace of the first release, but no errors. }
@Override public Optional<FieldTypes> get(final String fieldName) { return Optional.ofNullable(get(ImmutableSet.of(fieldName)).get(fieldName)); }
@Test public void getMultipleFields() { dbService.save(createDto("graylog_0", "abc", Collections.emptySet())); dbService.save(createDto("graylog_1", "xyz", Collections.emptySet())); dbService.save(createDto("graylog_2", "xyz", of( FieldTypeDTO.create("yolo1", "boolean") ))); dbService.save(createDto("graylog_3", "xyz", of( FieldTypeDTO.create("yolo1", "text") ))); final Map<String, FieldTypes> result = lookup.get(of("yolo1", "timestamp")); assertThat(result).containsOnlyKeys("yolo1", "timestamp"); assertThat(result.get("yolo1").fieldName()).isEqualTo("yolo1"); assertThat(result.get("yolo1").types()).hasSize(2); assertThat(result.get("yolo1").types()).containsOnly( FieldTypes.Type.builder() .type("string") .properties(of("full-text-search")) .indexNames(of("graylog_3")) .build(), FieldTypes.Type.builder() .type("boolean") .properties(of("enumerable")) .indexNames(of("graylog_2")) .build() ); assertThat(result.get("timestamp").fieldName()).isEqualTo("timestamp"); assertThat(result.get("timestamp").types()).hasSize(1); assertThat(result.get("timestamp").types()).containsOnly(FieldTypes.Type.builder() .type("date") .properties(of("enumerable")) .indexNames(of("graylog_0", "graylog_1", "graylog_2", "graylog_3")) .build()); }
@Override public void smoke() { tobacco.smoke(this); }
@Test void testSmokeEveryThingThroughInjectionFramework() { List<Class<? extends Tobacco>> tobaccos = List.of( OldTobyTobacco.class, RivendellTobacco.class, SecondBreakfastTobacco.class ); // Configure the tobacco in the injection framework ... // ... and create a new wizard with it // Verify if the wizard is smoking the correct tobacco ... tobaccos.forEach(tobaccoClass -> { final var injector = Guice.createInjector(new AbstractModule() { @Override protected void configure() { bind(Tobacco.class).to(tobaccoClass); } }); final var guiceWizard = injector.getInstance(GuiceWizard.class); guiceWizard.smoke(); String lastMessage = appender.getLastMessage(); assertEquals("GuiceWizard smoking " + tobaccoClass.getSimpleName(), lastMessage); }); // ... and nothing else is happening. assertEquals(tobaccos.size(), appender.getLogSize()); }
public static Labels fromString(String stringLabels) throws IllegalArgumentException { Map<String, String> labels = new HashMap<>(); try { if (stringLabels != null && !stringLabels.isEmpty()) { String[] labelsArray = stringLabels.split(","); for (String label : labelsArray) { String[] fields = label.split("="); labels.put(fields[0].trim(), fields[1].trim()); } } } catch (Exception e) { throw new IllegalArgumentException("Failed to parse labels from string " + stringLabels, e); } return new Labels(labels); }
@Test public void testParseEmptyLabels() { String validLabels = ""; assertThat(Labels.fromString(validLabels), is(Labels.EMPTY)); }
Iterator<StructSpec> commonStructs() { return new Iterator<StructSpec>() { private final Iterator<String> iter = commonStructNames.iterator(); @Override public boolean hasNext() { return iter.hasNext(); } @Override public StructSpec next() { return structs.get(iter.next()).spec; } }; }
@Test public void testCommonStructs() throws Exception { MessageSpec testMessageSpec = MessageGenerator.JSON_SERDE.readValue(String.join("", Arrays.asList( "{", " \"type\": \"request\",", " \"name\": \"LeaderAndIsrRequest\",", " \"validVersions\": \"0-4\",", " \"deprecatedVersions\": \"0-1\",", " \"flexibleVersions\": \"0+\",", " \"fields\": [", " { \"name\": \"field1\", \"type\": \"int32\", \"versions\": \"0+\" },", " { \"name\": \"field2\", \"type\": \"[]TestCommonStruct\", \"versions\": \"1+\" },", " { \"name\": \"field3\", \"type\": \"[]TestInlineStruct\", \"versions\": \"0+\", ", " \"fields\": [", " { \"name\": \"inlineField1\", \"type\": \"int64\", \"versions\": \"0+\" }", " ]}", " ],", " \"commonStructs\": [", " { \"name\": \"TestCommonStruct\", \"versions\": \"0+\", \"fields\": [", " { \"name\": \"commonField1\", \"type\": \"int64\", \"versions\": \"0+\" }", " ]}", " ]", "}")), MessageSpec.class); StructRegistry structRegistry = new StructRegistry(); structRegistry.register(testMessageSpec); assertEquals(structRegistry.commonStructNames(), Collections.singleton("TestCommonStruct")); assertFalse(structRegistry.isStructArrayWithKeys(testMessageSpec.fields().get(1))); assertFalse(structRegistry.isStructArrayWithKeys(testMessageSpec.fields().get(2))); assertTrue(structRegistry.commonStructs().hasNext()); assertEquals(structRegistry.commonStructs().next().name(), "TestCommonStruct"); }
public static RSAPublicKey parseRSAPublicKey(String pem) throws ServletException { String fullPem = PEM_HEADER + pem + PEM_FOOTER; PublicKey key = null; try { CertificateFactory fact = CertificateFactory.getInstance("X.509"); ByteArrayInputStream is = new ByteArrayInputStream( fullPem.getBytes(StandardCharsets.UTF_8)); X509Certificate cer = (X509Certificate) fact.generateCertificate(is); key = cer.getPublicKey(); } catch (CertificateException ce) { String message = null; if (pem.startsWith(PEM_HEADER)) { message = "CertificateException - be sure not to include PEM header " + "and footer in the PEM configuration element."; } else { message = "CertificateException - PEM may be corrupt"; } throw new ServletException(message, ce); } return (RSAPublicKey) key; }
@Test public void testInvalidPEMWithHeaderAndFooter() throws Exception { String pem = "-----BEGIN CERTIFICATE-----\n" + "MIICOjCCAaOgAwIBAgIJANXi/oWxvJNzMA0GCSqGSIb3DQEBBQUAMF8xCzAJBgNVBAYTAlVTMQ0w" + "CwYDVQQIEwRUZXN0MQ0wCwYDVQQHEwRUZXN0MQ8wDQYDVQQKEwZIYWRvb3AxDTALBgNVBAsTBFRl" + "c3QxEjAQBgNVBAMTCWxvY2FsaG9zdDAeFw0xNTAxMDIyMTE5MjRaFw0xNjAxMDIyMTE5MjRaMF8x" + "CzAJBgNVBAYTAlVTMQ0wCwYDVQQIEwRUZXN0MQ0wCwYDVQQHEwRUZXN0MQ8wDQYDVQQKEwZIYWRv" + "b3AxDTALBgNVBAsTBFRlc3QxEjAQBgNVBAMTCWxvY2FsaG9zdDCBnzANBgkqhkiG9w0BAQEFAAOB" + "jQAwgYkCgYEAwpfpLdi7dWTHNzETt+L7618/dWUQFb/C7o1jIxFgbKOVIB6d5YmvUbJck5PYxFkz" + "C25fmU5H71WGOI1Kle5TFDmIo+hqh5xqu1YNRZz9i6D94g+2AyYr9BpvH4ZfdHs7r9AU7c3kq68V" + "7OPuuaHb25J8isiOyA3RiWuJGQlXTdkCAwEAATANBgkqhkiG9w0BAQUFAAOBgQAdRUyCUqE9sdim" + "Fbll9BuZDKV16WXeWGq+kTd7ETe7l0fqXjq5EnrifOai0L/pXwVvS2jrFkKQRlRxRGUNaeEBZ2Wy" + "9aTyR+HGHCfvwoCegc9rAVw/DLaRriSO/jnEXzYK6XLVKH+hx5UXrJ7Oyc7JjZUc3g9kCWORThCX" + "Mzc1xA==" + "\n-----END CERTIFICATE-----"; try { CertificateUtil.parseRSAPublicKey(pem); fail("Should not have thrown ServletException"); } catch (ServletException se) { assertTrue(se.getMessage().contains("PEM header")); } }
@Override public NativeEntity<NotificationDto> createNativeEntity(Entity entity, Map<String, ValueReference> parameters, Map<EntityDescriptor, Object> nativeEntities, String username) { if (entity instanceof EntityV1) { final User user = Optional.ofNullable(userService.load(username)).orElseThrow(() -> new IllegalStateException("Cannot load user <" + username + "> from db")); return decode((EntityV1) entity, parameters, nativeEntities, user); } else { throw new IllegalArgumentException("Unsupported entity version: " + entity.getClass()); } }
@Test public void createNativeEntity() { final EntityV1 entityV1 = createTestEntity(); final JobDefinitionDto jobDefinitionDto = mock(JobDefinitionDto.class); when(jobDefinitionService.save(any(JobDefinitionDto.class))).thenReturn(jobDefinitionDto); final UserImpl kmerzUser = new UserImpl( mock(PasswordAlgorithmFactory.class), new Permissions(ImmutableSet.of()), mock(ClusterConfigService.class), ImmutableMap.of("username", "kmerz")); when(userService.load("kmerz")).thenReturn(kmerzUser); final NativeEntity<NotificationDto> nativeEntity = facade.createNativeEntity( entityV1, ImmutableMap.of(), ImmutableMap.of(), "kmerz"); assertThat(nativeEntity).isNotNull(); final NotificationDto notificationDto = nativeEntity.entity(); assertThat(notificationDto.title()).isEqualTo("title"); assertThat(notificationDto.description()).isEqualTo("descriptions"); assertThat(notificationDto.config().type()).isEqualTo("http-notification-v1"); }
@Override public void importData(JsonReader reader) throws IOException { logger.info("Reading configuration for 1.2"); // this *HAS* to start as an object reader.beginObject(); while (reader.hasNext()) { JsonToken tok = reader.peek(); switch (tok) { case NAME: String name = reader.nextName(); // find out which member it is if (name.equals(CLIENTS)) { readClients(reader); } else if (name.equals(GRANTS)) { readGrants(reader); } else if (name.equals(WHITELISTEDSITES)) { readWhitelistedSites(reader); } else if (name.equals(BLACKLISTEDSITES)) { readBlacklistedSites(reader); } else if (name.equals(AUTHENTICATIONHOLDERS)) { readAuthenticationHolders(reader); } else if (name.equals(ACCESSTOKENS)) { readAccessTokens(reader); } else if (name.equals(REFRESHTOKENS)) { readRefreshTokens(reader); } else if (name.equals(SYSTEMSCOPES)) { readSystemScopes(reader); } else { for (MITREidDataServiceExtension extension : extensions) { if (extension.supportsVersion(THIS_VERSION)) { extension.importExtensionData(name, reader); break; } } // unknown token, skip it reader.skipValue(); } break; case END_OBJECT: // the object ended, we're done here reader.endObject(); continue; default: logger.debug("Found unexpected entry"); reader.skipValue(); continue; } } fixObjectReferences(); for (MITREidDataServiceExtension extension : extensions) { if (extension.supportsVersion(THIS_VERSION)) { extension.fixExtensionObjectReferences(maps); break; } } maps.clearAll(); }
@Test public void testImportAuthenticationHolders() throws IOException { OAuth2Request req1 = new OAuth2Request(new HashMap<String, String>(), "client1", new ArrayList<GrantedAuthority>(), true, new HashSet<String>(), new HashSet<String>(), "http://foo.com", new HashSet<String>(), null); Authentication mockAuth1 = mock(Authentication.class, withSettings().serializable()); OAuth2Authentication auth1 = new OAuth2Authentication(req1, mockAuth1); AuthenticationHolderEntity holder1 = new AuthenticationHolderEntity(); holder1.setId(1L); holder1.setAuthentication(auth1); OAuth2Request req2 = new OAuth2Request(new HashMap<String, String>(), "client2", new ArrayList<GrantedAuthority>(), true, new HashSet<String>(), new HashSet<String>(), "http://bar.com", new HashSet<String>(), null); Authentication mockAuth2 = mock(Authentication.class, withSettings().serializable()); OAuth2Authentication auth2 = new OAuth2Authentication(req2, mockAuth2); AuthenticationHolderEntity holder2 = new AuthenticationHolderEntity(); holder2.setId(2L); holder2.setAuthentication(auth2); String configJson = "{" + "\"" + MITREidDataService.CLIENTS + "\": [], " + "\"" + MITREidDataService.ACCESSTOKENS + "\": [], " + "\"" + MITREidDataService.REFRESHTOKENS + "\": [], " + "\"" + MITREidDataService.GRANTS + "\": [], " + "\"" + MITREidDataService.WHITELISTEDSITES + "\": [], " + "\"" + MITREidDataService.BLACKLISTEDSITES + "\": [], " + "\"" + MITREidDataService.SYSTEMSCOPES + "\": [], " + "\"" + MITREidDataService.AUTHENTICATIONHOLDERS + "\": [" + "{\"id\":1,\"clientId\":\"client1\",\"redirectUri\":\"http://foo.com\"," + "\"savedUserAuthentication\":null}," + "{\"id\":2,\"clientId\":\"client2\",\"redirectUri\":\"http://bar.com\"," + "\"savedUserAuthentication\":null}" + " ]" + "}"; logger.debug(configJson); JsonReader reader = new JsonReader(new StringReader(configJson)); final Map<Long, AuthenticationHolderEntity> fakeDb = new HashMap<>(); when(authHolderRepository.save(isA(AuthenticationHolderEntity.class))).thenAnswer(new Answer<AuthenticationHolderEntity>() { Long id = 243L; @Override public AuthenticationHolderEntity answer(InvocationOnMock invocation) throws Throwable { AuthenticationHolderEntity _site = (AuthenticationHolderEntity) invocation.getArguments()[0]; if(_site.getId() == null) { _site.setId(id++); } fakeDb.put(_site.getId(), _site); return _site; } }); dataService.importData(reader); verify(authHolderRepository, times(2)).save(capturedAuthHolders.capture()); List<AuthenticationHolderEntity> savedAuthHolders = capturedAuthHolders.getAllValues(); assertThat(savedAuthHolders.size(), is(2)); assertThat(savedAuthHolders.get(0).getAuthentication().getOAuth2Request().getClientId(), equalTo(holder1.getAuthentication().getOAuth2Request().getClientId())); assertThat(savedAuthHolders.get(1).getAuthentication().getOAuth2Request().getClientId(), equalTo(holder2.getAuthentication().getOAuth2Request().getClientId())); }
public long rangeSize() { long result = 0; Sequence sequence = getHead(); while (sequence != null) { result += sequence.range(); sequence = sequence.getNext(); } return result; }
@Test public void testRangeSize() { SequenceSet set = new SequenceSet(); set.add(1); assertEquals(1, set.rangeSize()); set.add(10); set.add(20); assertEquals(3, set.rangeSize()); set.clear(); assertEquals(0, set.rangeSize()); }
static void enableStatisticManagementOnNodes(HazelcastClientInstanceImpl client, String cacheName, boolean statOrMan, boolean enabled) { Collection<Member> members = client.getClientClusterService().getMemberList(); Collection<Future> futures = new ArrayList<>(); for (Member member : members) { try { UUID uuid = member.getUuid(); ClientMessage request = CacheManagementConfigCodec.encodeRequest(cacheName, statOrMan, enabled, uuid); ClientInvocation clientInvocation = new ClientInvocation(client, request, cacheName, uuid); Future<ClientMessage> future = clientInvocation.invoke(); futures.add(future); } catch (Exception e) { sneakyThrow(e); } } // make sure all configs are created FutureUtil.waitWithDeadline(futures, CacheProxyUtil.AWAIT_COMPLETION_TIMEOUT_SECONDS, TimeUnit.SECONDS); }
@Test(expected = IllegalArgumentException.class) public void testEnableStatisticManagementOnNodes_sneakyThrowsException() { Member member = mock(Member.class); when(member.getUuid()).thenThrow(new IllegalArgumentException("expected")); Collection<Member> members = singletonList(member); when(exceptionThrowingClient.getClientClusterService().getMemberList()).thenReturn(members); enableStatisticManagementOnNodes(exceptionThrowingClient, CACHE_NAME, false, false); }
@Override public KsMaterializedQueryResult<WindowedRow> get( final GenericKey key, final int partition, final Range<Instant> windowStart, final Range<Instant> windowEnd, final Optional<Position> position ) { try { final WindowRangeQuery<GenericKey, GenericRow> query = WindowRangeQuery.withKey(key); StateQueryRequest<KeyValueIterator<Windowed<GenericKey>, GenericRow>> request = inStore(stateStore.getStateStoreName()).withQuery(query); if (position.isPresent()) { request = request.withPositionBound(PositionBound.at(position.get())); } final StateQueryResult<KeyValueIterator<Windowed<GenericKey>, GenericRow>> result = stateStore.getKafkaStreams().query(request); final QueryResult<KeyValueIterator<Windowed<GenericKey>, GenericRow>> queryResult = result.getPartitionResults().get(partition); if (queryResult.isFailure()) { throw failedQueryException(queryResult); } try (KeyValueIterator<Windowed<GenericKey>, GenericRow> it = queryResult.getResult()) { final Builder<WindowedRow> builder = ImmutableList.builder(); while (it.hasNext()) { final KeyValue<Windowed<GenericKey>, GenericRow> next = it.next(); final Window wnd = next.key.window(); if (!windowStart.contains(wnd.startTime())) { continue; } if (!windowEnd.contains(wnd.endTime())) { continue; } final long rowTime = wnd.end(); final WindowedRow row = WindowedRow.of( stateStore.schema(), next.key, next.value, rowTime ); builder.add(row); } return KsMaterializedQueryResult.rowIteratorWithPosition( builder.build().iterator(), queryResult.getPosition()); } } catch (final NotUpToBoundException | MaterializationException e) { throw e; } catch (final Exception e) { throw new MaterializationException("Failed to get value from materialized table", e); } }
@Test public void shouldReturnEmptyIfKeyNotPresent() { // When: final Iterator<WindowedRow> rowIterator = table.get( A_KEY, PARTITION, WINDOW_START_BOUNDS, WINDOW_END_BOUNDS).rowIterator; // Then: assertThat(rowIterator.hasNext(), is(false)); }
@Override public void validate(Context context) { if (! context.deployState().isHosted()) return; if (context.model().getAdmin().getApplicationType() != ApplicationType.DEFAULT) return; for (ContainerCluster<?> cluster : context.model().getContainerClusters().values()) { if (cluster.getSecretStore().isPresent() && ! hasIdentityProvider(cluster)) context.illegal(String.format( "Container cluster '%s' uses a secret store, so an Athenz domain and an Athenz service" + " must be declared in deployment.xml.", cluster.getName())); } }
@Test void app_without_athenz_in_deployment_fails_validation() throws Exception { Throwable exception = assertThrows(IllegalArgumentException.class, () -> { DeployState deployState = deployState(servicesXml(), deploymentXml(false)); VespaModel model = new VespaModel(new NullConfigModelRegistry(), deployState); ValidationTester.validate(new SecretStoreValidator(), model, deployState); }); assertTrue(exception.getMessage().contains("Container cluster 'default' uses a secret store, so an Athenz domain and" + " an Athenz service must be declared in deployment.xml.")); }
public static Properties getProperties(File file) throws AnalysisException { try (BufferedReader utf8Reader = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8)) { return getProperties(utf8Reader); } catch (IOException | IllegalArgumentException e) { throw new AnalysisException("Error parsing PyPA core-metadata file", e); } }
@Test public void getProperties_should_properly_parse_multiline_description() throws IOException { String payload = "Metadata-Version: 1.0\r\n" + "Description: This is the first line\r\n" + " | and this the second\r\n" + " |\r\n" + " | and the fourth after an empty third\r\n" + "\r\n" + "This: is the body and it is ignored. It may contain an extensive description in various formats"; Properties props = PyPACoreMetadataParser.getProperties(new BufferedReader(new StringReader(payload))); Assert.assertEquals("1.0", props.get("Metadata-Version")); Assert.assertEquals("This is the first line\n" + " and this the second\n" + "\n" + " and the fourth after an empty third", props.get("Description")); Assert.assertFalse("Body was parsed as a header", props.containsKey("This")); }
@Override public void profileIncrement(Map<String, ? extends Number> properties) { }
@Test public void profileIncrement() { mSensorsAPI.setTrackEventCallBack(new SensorsDataTrackEventCallBack() { @Override public boolean onTrackEvent(String eventName, JSONObject eventProperties) { Assert.fail(); return false; } }); mSensorsAPI.profileIncrement("abcde", 123); }
@Override public boolean tryFence(HAServiceTarget target, String argsStr) throws BadFencingConfigurationException { Args args = new Args(argsStr); InetSocketAddress serviceAddr = target.getAddress(); String host = serviceAddr.getHostName(); Session session; try { session = createSession(serviceAddr.getHostName(), args); } catch (JSchException e) { LOG.warn("Unable to create SSH session", e); return false; } LOG.info("Connecting to " + host + "..."); try { session.connect(getSshConnectTimeout()); } catch (JSchException e) { LOG.warn("Unable to connect to " + host + " as user " + args.user, e); return false; } LOG.info("Connected to " + host); try { return doFence(session, serviceAddr); } catch (JSchException e) { LOG.warn("Unable to achieve fencing on remote host", e); return false; } finally { session.disconnect(); } }
@Test(timeout=20000) public void testFence() throws BadFencingConfigurationException { Assume.assumeTrue(isConfigured()); Configuration conf = new Configuration(); conf.set(SshFenceByTcpPort.CONF_IDENTITIES_KEY, TEST_KEYFILE); SshFenceByTcpPort fence = new SshFenceByTcpPort(); fence.setConf(conf); assertTrue(fence.tryFence( TEST_TARGET, null)); }
@Override public List<String> listPartitionNamesByFilter(String catName, String dbName, String tblName, GetPartitionsArgs args) throws MetaException, NoSuchObjectException { catName = normalizeIdentifier(catName); dbName = normalizeIdentifier(dbName); tblName = normalizeIdentifier(tblName); MTable mTable = ensureGetMTable(catName, dbName, tblName); List<FieldSchema> partitionKeys = convertToFieldSchemas(mTable.getPartitionKeys()); String filter = args.getFilter(); final ExpressionTree tree = (filter != null && !filter.isEmpty()) ? PartFilterExprUtil.parseFilterTree(filter) : ExpressionTree.EMPTY_TREE; return new GetListHelper<String>(catName, dbName, tblName, true, true) { private final SqlFilterForPushdown filter = new SqlFilterForPushdown(); @Override protected boolean canUseDirectSql(GetHelper<List<String>> ctx) throws MetaException { return directSql.generateSqlFilterForPushdown(catName, dbName, tblName, partitionKeys, tree, null, filter); } @Override protected List<String> getSqlResult(GetHelper<List<String>> ctx) throws MetaException { return directSql.getPartitionNamesViaSql(filter, partitionKeys, getDefaultPartitionName(args.getDefaultPartName()), null, args.getMax()); } @Override protected List<String> getJdoResult(GetHelper<List<String>> ctx) throws MetaException, NoSuchObjectException, InvalidObjectException { return getPartitionNamesViaOrm(catName, dbName, tblName, tree, null, args.getMax(), true, partitionKeys); } }.run(false); }
@Test public void testListPartitionNamesByFilter() throws Exception { Database db1 = new DatabaseBuilder() .setName(DB1) .setDescription("description") .setLocation("locationurl") .build(conf); try (AutoCloseable c = deadline()) { objectStore.createDatabase(db1); } StorageDescriptor sd = createFakeSd("location"); HashMap<String, String> tableParams = new HashMap<>(); tableParams.put("EXTERNAL", "false"); FieldSchema partitionKey1 = new FieldSchema("Country", ColumnType.STRING_TYPE_NAME, ""); FieldSchema partitionKey2 = new FieldSchema("State", ColumnType.STRING_TYPE_NAME, ""); Table tbl1 = new Table(TABLE1, DB1, "owner", 1, 2, 3, sd, Arrays.asList(partitionKey1, partitionKey2), tableParams, null, null, "MANAGED_TABLE"); try (AutoCloseable c = deadline()) { objectStore.createTable(tbl1); } HashMap<String, String> partitionParams = new HashMap<>(); partitionParams.put("PARTITION_LEVEL_PRIVILEGE", "true"); List<String> value1 = Arrays.asList("US", "CA"); Partition part1 = new Partition(value1, DB1, TABLE1, 111, 111, sd, partitionParams); part1.setCatName(DEFAULT_CATALOG_NAME); try (AutoCloseable c = deadline()) { objectStore.addPartition(part1); } List<String> value2 = Arrays.asList("US", "MA"); Partition part2 = new Partition(value2, DB1, TABLE1, 222, 222, sd, partitionParams); part2.setCatName(DEFAULT_CATALOG_NAME); try (AutoCloseable c = deadline()) { objectStore.addPartition(part2); } List<String> partNames; try (AutoCloseable c = deadline()) { partNames = objectStore.listPartitionNamesByFilter(DEFAULT_CATALOG_NAME, DB1, TABLE1, new GetPartitionsArgs.GetPartitionsArgsBuilder().filter("Country = 'US'").build()); } Assert.assertEquals(2, partNames.size()); Assert.assertEquals("country=US/state=CA", partNames.get(0)); Assert.assertEquals("country=US/state=MA", partNames.get(1)); try (AutoCloseable c = deadline()) { partNames = objectStore.listPartitionNamesByFilter(DEFAULT_CATALOG_NAME, DB1, TABLE1, new GetPartitionsArgs.GetPartitionsArgsBuilder().filter("State = 'MA'").build()); } Assert.assertEquals(1, partNames.size()); Assert.assertEquals("country=US/state=MA", partNames.get(0)); try (AutoCloseable c = deadline()) { partNames = objectStore.listPartitionNamesByFilter(DEFAULT_CATALOG_NAME, DB1, TABLE1, new GetPartitionsArgs.GetPartitionsArgsBuilder().filter("Country = 'US' and State = 'MA'").build()); } Assert.assertEquals(1, partNames.size()); Assert.assertEquals("country=US/state=MA", partNames.get(0)); try (AutoCloseable c = deadline()) { partNames = objectStore.listPartitionNamesByFilter(DEFAULT_CATALOG_NAME, DB1, TABLE1, new GetPartitionsArgs.GetPartitionsArgsBuilder().filter("Country != ''").build()); } Assert.assertEquals(2, partNames.size()); Assert.assertEquals("country=US/state=CA", partNames.get(0)); Assert.assertEquals("country=US/state=MA", partNames.get(1)); }
@Override public void pushMsgToRuleEngine(TopicPartitionInfo tpi, UUID msgId, ToRuleEngineMsg msg, TbQueueCallback callback) { log.trace("PUSHING msg: {} to:{}", msg, tpi); producerProvider.getRuleEngineMsgProducer().send(tpi, new TbProtoQueueMsg<>(msgId, msg), callback); toRuleEngineMsgs.incrementAndGet(); }
@Test public void testPushMsgToRuleEngineUseQueueFromMsgIsFalse() { TbQueueProducer<TbProtoQueueMsg<TransportProtos.ToRuleEngineMsg>> tbREQueueProducer = mock(TbQueueProducer.class); TbQueueCallback callback = mock(TbQueueCallback.class); TenantId tenantId = TenantId.fromUUID(UUID.fromString("5377c8d0-26e5-4d81-84c6-4344043973c8")); DeviceId deviceId = new DeviceId(UUID.fromString("016c2abb-f46f-49f9-a83d-4d28b803cfe6")); DeviceProfile deviceProfile = new DeviceProfile(new DeviceProfileId(UUID.fromString("dc5766e2-1a32-4022-859b-743050097ab7"))); deviceProfile.setDefaultQueueName(DataConstants.MAIN_QUEUE_NAME); TbMsg requestMsg = TbMsg.newMsg(TbMsgType.REST_API_REQUEST, deviceId, TbMsgMetaData.EMPTY, TbMsg.EMPTY_JSON_OBJECT); when(deviceProfileCache.get(any(TenantId.class), any(DeviceId.class))).thenReturn(deviceProfile); when(producerProvider.getRuleEngineMsgProducer()).thenReturn(tbREQueueProducer); clusterService.pushMsgToRuleEngine(tenantId, deviceId, requestMsg, false, callback); verify(producerProvider).getRuleEngineMsgProducer(); TbMsg expectedMsg = TbMsg.transformMsgQueueName(requestMsg, DataConstants.MAIN_QUEUE_NAME); ArgumentCaptor<TbMsg> actualMsg = ArgumentCaptor.forClass(TbMsg.class); verify(ruleEngineProducerService).sendToRuleEngine(eq(tbREQueueProducer), eq(tenantId), actualMsg.capture(), eq(callback)); assertThat(actualMsg.getValue()).usingRecursiveComparison().ignoringFields("ctx").isEqualTo(expectedMsg); }
@Override public InstancePort instancePort(MacAddress macAddress) { checkNotNull(macAddress, ERR_NULL_MAC_ADDRESS); return instancePortStore.instancePorts().stream() .filter(port -> port.macAddress().equals(macAddress)) .findFirst().orElse(null); }
@Test public void testGetInstancePortByIpAndNetId() { createBasicInstancePorts(); InstancePort port = target.instancePort(IP_ADDRESS_1, NETWORK_ID_1); assertEquals("Instance port did not match", port, instancePort1); }
@Override public boolean supports(Job job) { JobDetails jobDetails = job.getJobDetails(); return jobDetails.hasStaticFieldName(); }
@Test void supportsJobIfJobIsStaticMethodCall() { Job job = anEnqueuedJob() .withJobDetails(systemOutPrintLnJobDetails("This is a test")) .build(); assertThat(backgroundStaticFieldJobWithoutIocRunner.supports(job)).isTrue(); }
@Override public V put(K key, V value, long ttl, TimeUnit unit) { return get(putAsync(key, value, ttl, unit)); }
@Test public void testEntryUpdate() throws InterruptedException { RMapCache<Integer, Integer> map = redisson.getMapCache("simple"); map.put(1, 1, 1, TimeUnit.SECONDS); assertThat(map.get(1)).isEqualTo(1); Thread.sleep(1000); assertThat(map.put(1, 1, 0, TimeUnit.SECONDS)).isNull(); assertThat(map.get(1)).isEqualTo(1); }
void unassignStandby(final TaskId task) { final Set<TaskId> taskIds = assignedStandbyTasks.taskIds(); if (!taskIds.contains(task)) { throw new IllegalArgumentException("Tried to unassign standby task " + task + ", but it is not currently assigned: " + this); } taskIds.remove(task); }
@Test public void shouldRefuseToUnassignNotAssignedStandbyTask() { final ClientState clientState = new ClientState(1); assertThrows(IllegalArgumentException.class, () -> clientState.unassignStandby(TASK_0_0)); }
@Override public <I> void foreach(List<I> data, SerializableConsumer<I> consumer, int parallelism) { data.stream().forEach(throwingForeachWrapper(consumer)); }
@Test public void testForeach() { List<Integer> mapList = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); List<Integer> result = new ArrayList<>(10); context.foreach(mapList, result::add, 2); Assertions.assertEquals(result.size(), mapList.size()); Assertions.assertTrue(result.containsAll(mapList)); }
@Override public void removeLoadBalancer(String name) { checkArgument(name != null, ERR_NULL_LOAD_BALANCER_NAME); synchronized (this) { if (isLoadBalancerInUse(name)) { final String error = String.format(MSG_LOAD_BALANCER, name, ERR_IN_USE); throw new IllegalStateException(error); } } KubevirtLoadBalancer lb = kubevirtLoadBalancerStore.removeLoadBalancer(name); if (lb != null) { log.info(String.format(MSG_LOAD_BALANCER, lb.name(), MSG_REMOVED)); } }
@Test(expected = IllegalArgumentException.class) public void testRemoveLoadBalancerWithNull() { target.removeLoadBalancer(null); }
@Override public boolean find(final Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return true; } try { if(containerService.isContainer(file)) { try { if(log.isDebugEnabled()) { log.debug(String.format("Test if bucket %s is accessible", file)); } return session.getClient().isBucketAccessible(containerService.getContainer(file).getName()); } catch(ServiceException e) { throw new S3ExceptionMappingService().map("Failure to read attributes of {0}", e, file); } } if(file.isFile() || file.isPlaceholder()) { attributes.find(file, listener); return true; } else { if(log.isDebugEnabled()) { log.debug(String.format("Search for common prefix %s", file)); } // Check for common prefix try { new S3ObjectListService(session, acl).list(file, new CancellingListProgressListener(), String.valueOf(Path.DELIMITER), 1); return true; } catch(ListCanceledException l) { // Found common prefix return true; } catch(NotfoundException e) { throw e; } } } catch(NotfoundException e) { return false; } catch(RetriableAccessDeniedException e) { // Must fail with server error throw e; } catch(AccessDeniedException e) { // Object is inaccessible to current user, but does exist. return true; } }
@Test public void testFindUnknownBucket() throws Exception { final Path test = new Path(UUID.randomUUID().toString(), EnumSet.of(Path.Type.volume, Path.Type.directory)); assertFalse(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(test)); }
@Override public void captureData(PrintWriter writer) { writer.println("<html>"); writer.println("<h1>Channelz</h1>"); appendTopChannels(writer); writer.println("</html>"); }
@Test public void testRendersOnlyWindmillChannels() throws UnsupportedEncodingException { String windmill1 = "WindmillHost1"; String windmill2 = "WindmillHost2"; String nonWindmill1 = "NonWindmillHost1"; String someOtherHost1 = "SomeOtherHost2"; ManagedChannel[] unusedChannels = new ManagedChannel[] { InProcessChannelBuilder.forName(windmill1).build(), InProcessChannelBuilder.forName(windmill2).build(), InProcessChannelBuilder.forName(nonWindmill1).build(), InProcessChannelBuilder.forName(someOtherHost1).build() }; DataflowWorkerHarnessOptions options = PipelineOptionsFactory.create().as(DataflowWorkerHarnessOptions.class); FakeWindmillServer fakeWindmillServer = new FakeWindmillServer(new ErrorCollector(), s -> Optional.empty()); fakeWindmillServer.setWindmillServiceEndpoints( ImmutableSet.of(HostAndPort.fromHost(windmill1), HostAndPort.fromHost(windmill2))); options.setChannelzShowOnlyWindmillServiceChannels(true); ChannelzServlet channelzServlet = new ChannelzServlet("/channelz", options, fakeWindmillServer::getWindmillServiceEndpoints); StringWriter stringWriter = new StringWriter(); PrintWriter writer = new PrintWriter(stringWriter); channelzServlet.captureData(writer); writer.flush(); String channelzData = stringWriter.toString(); assertTrue(channelzData.contains(windmill1)); assertTrue(channelzData.contains(windmill2)); // The logic does a substring match on the target // NonWindmillHost1 matches since it contains WindmillHost1 which is a windmill host assertTrue(channelzData.contains(nonWindmill1)); assertFalse(channelzData.contains(someOtherHost1)); }
public void union(Block block) { currentBlockIndex++; ensureBlocksCapacity(currentBlockIndex + 1); blocks[currentBlockIndex] = block; int positionCount = block.getPositionCount(); int[] positions = new int[positionCount]; // Add the elements to the hash table. Since union can only increase the set size, there is no need to create a separate hashtable. int positionsIndex = 0; for (int i = 0; i < positionCount; i++) { int hashPosition = getInsertPosition(blockPositionByHash, getMaskedHash(hashPosition(elementType, block, i)), block, i); if (hashPosition != INVALID_POSITION) { // There is no need to test if adding element is successful since it's on the same hash table addElement(blockPositionByHash, hashPosition, block, i); positions[positionsIndex++] = i; } } getPositionsForBlocks().add(positionsList(positions, 0, positionsIndex)); size += positionsIndex; }
@Test public void testExceptWithDistinctValues() { OptimizedTypedSet typedSet = new OptimizedTypedSet(BIGINT, BIGINT_DISTINCT_METHOD_HANDLE, POSITIONS_PER_PAGE); Block block = createLongSequenceBlock(0, POSITIONS_PER_PAGE - 1).appendNull(); typedSet.union(block); testExcept(typedSet, block, createEmptyBlock(BIGINT)); testExcept(typedSet, block, block); }
@Override public void calculate() { setValue(this.total / getDurationInMinute()); }
@Test public void testCalculate() { long time1 = 1597113318673L; long time2 = 1597113447737L; function.accept(MeterEntity.newService("sum_sync_time", Layer.GENERAL), time1); function.accept(MeterEntity.newService("sum_sync_time", Layer.GENERAL), time2); function.calculate(); assertThat(function.getValue()).isEqualTo(time1 + time2); }
public int add(Object o) { HollowTypeMapper typeMapper = getTypeMapper(o.getClass(), null, null); return typeMapper.write(o); }
@Test public void testEnumAndInlineClass() throws IOException { HollowObjectMapper mapper = new HollowObjectMapper(writeStateEngine); mapper.add(TestEnum.ONE); mapper.add(TestEnum.TWO); mapper.add(TestEnum.THREE); roundTripSnapshot(); HollowPrimaryKeyIndex idx = new HollowPrimaryKeyIndex(readStateEngine, new PrimaryKey("TestEnum", "_name")); int twoOrdinal = idx.getMatchingOrdinal("TWO"); GenericHollowObject obj = new GenericHollowObject(readStateEngine, "TestEnum", twoOrdinal); Assert.assertEquals("TWO", obj.getString("_name")); GenericHollowObject subObj = obj.getObject("testClass"); Assert.assertEquals(2, subObj.getInt("val1")); Assert.assertEquals(3, subObj.getInt("val2")); }
@Override public ExecuteContext doAfter(ExecuteContext context) { final Object result = context.getResult(); if (isValidResult(result)) { if (result == null) { RegisterContext.INSTANCE.compareAndSet(true, false); return context; } long beat; if (result instanceof ObjectNode) { // New version ObjectNode node = (ObjectNode) result; beat = node.get("clientBeatInterval").asLong(); } else if (result instanceof Long) { // Older versions beat = (Long) result; } else { return context; } // If the heartbeat is 0 L, // the current instance cannot communicate with the Nacos registry // and the registry for the instance is invalid if (beat == 0L) { RegisterContext.INSTANCE.compareAndSet(true, false); } else if (beat > 0L) { RegisterContext.INSTANCE.compareAndSet(false, true); } else { return context; } } return context; }
@Test public void doAfter() throws Exception { REGISTER_CONFIG.setEnableSpringRegister(true); REGISTER_CONFIG.setOpenMigration(true); final ExecuteContext context = buildContext(); RegisterContext.INSTANCE.setAvailable(true); interceptor.after(context); Assert.assertFalse(RegisterContext.INSTANCE.isAvailable()); // Heartbeats are available context.changeResult(1L); interceptor.after(context); Assert.assertTrue(RegisterContext.INSTANCE.isAvailable()); // Heartbeat is not available context.changeResult(0L); interceptor.after(context); Assert.assertFalse(RegisterContext.INSTANCE.isAvailable()); // ObjectNode available final ObjectNode node = Mockito.mock(ObjectNode.class); Mockito.when(node.get("clientBeatInterval")).thenReturn(new LongNode(1L)); context.changeResult(node); interceptor.after(context); Assert.assertTrue(RegisterContext.INSTANCE.isAvailable()); // ObjectNode not available Mockito.reset(node); Mockito.when(node.get("clientBeatInterval")).thenReturn(new LongNode(0L)); context.changeResult(node); interceptor.after(context); Assert.assertFalse(RegisterContext.INSTANCE.isAvailable()); REGISTER_CONFIG.setEnableSpringRegister(false); REGISTER_CONFIG.setOpenMigration(false); RegisterContext.INSTANCE.setAvailable(false); }
public T maxInitialLineLength(int value) { if (value <= 0) { throw new IllegalArgumentException("maxInitialLineLength must be strictly positive"); } this.maxInitialLineLength = value; return get(); }
@Test void maxInitialLineLengthBadValues() { assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> conf.maxInitialLineLength(0)) .as("rejects 0") .withMessage("maxInitialLineLength must be strictly positive"); assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> conf.maxInitialLineLength(-1)) .as("rejects negative") .withMessage("maxInitialLineLength must be strictly positive"); }