focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static String generateFileName(String string) { string = StringUtils.stripAccents(string); StringBuilder buf = new StringBuilder(); for (int i = 0; i < string.length(); i++) { char c = string.charAt(i); if (Character.isSpaceChar(c) && (buf.length() == 0 || Character.isSpaceChar(buf.charAt(buf.length() - 1)))) { continue; } if (ArrayUtils.contains(validChars, c)) { buf.append(c); } } String filename = buf.toString().trim(); if (TextUtils.isEmpty(filename)) { return randomString(8); } else if (filename.length() >= MAX_FILENAME_LENGTH) { return filename.substring(0, MAX_FILENAME_LENGTH - MD5_HEX_LENGTH - 1) + "_" + md5(filename); } else { return filename; } }
@Test public void testGenerateFileName1() throws Exception { String result = FileNameGenerator.generateFileName("ab/c: <abc"); assertEquals(result, "abc abc"); createFiles(result); }
@Override public ObjectNode encode(Instruction instruction, CodecContext context) { checkNotNull(instruction, "Instruction cannot be null"); return new EncodeInstructionCodecHelper(instruction, context).encode(); }
@Test public void modOchSignalInstructionTest() { L0ModificationInstruction.ModOchSignalInstruction instruction = (L0ModificationInstruction.ModOchSignalInstruction) Instructions.modL0Lambda(Lambda.ochSignal(GridType.DWDM, ChannelSpacing.CHL_100GHZ, 4, 8)); ObjectNode instructionJson = instructionCodec.encode(instruction, context); assertThat(instructionJson, matchesInstruction(instruction)); }
@Override protected void encodeInitialLine(ByteBuf buf, HttpRequest request) throws Exception { ByteBufUtil.copy(request.method().asciiName(), buf); String uri = request.uri(); if (uri.isEmpty()) { // Add " / " as absolute path if uri is not present. // See https://tools.ietf.org/html/rfc2616#section-5.1.2 ByteBufUtil.writeMediumBE(buf, SPACE_SLASH_AND_SPACE_MEDIUM); } else { CharSequence uriCharSequence = uri; boolean needSlash = false; int start = uri.indexOf("://"); if (start != -1 && uri.charAt(0) != SLASH) { start += 3; // Correctly handle query params. // See https://github.com/netty/netty/issues/2732 int index = uri.indexOf(QUESTION_MARK, start); if (index == -1) { if (uri.lastIndexOf(SLASH) < start) { needSlash = true; } } else { if (uri.lastIndexOf(SLASH, index) < start) { uriCharSequence = new StringBuilder(uri).insert(index, SLASH); } } } buf.writeByte(SP).writeCharSequence(uriCharSequence, CharsetUtil.UTF_8); if (needSlash) { // write "/ " after uri ByteBufUtil.writeShortBE(buf, SLASH_AND_SPACE_SHORT); } else { buf.writeByte(SP); } } request.protocolVersion().encode(buf); ByteBufUtil.writeShortBE(buf, CRLF_SHORT); }
@Test public void testUriWithEmptyPath() throws Exception { for (ByteBuf buffer : getBuffers()) { HttpRequestEncoder encoder = new HttpRequestEncoder(); encoder.encodeInitialLine(buffer, new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "http://localhost:9999/?p1=v1")); String req = buffer.toString(Charset.forName("US-ASCII")); assertEquals("GET http://localhost:9999/?p1=v1 HTTP/1.1\r\n", req); buffer.release(); } }
public QueryConfiguration applyOverrides(QueryConfigurationOverrides overrides) { Map<String, String> sessionProperties; if (overrides.getSessionPropertiesOverrideStrategy() == OVERRIDE) { sessionProperties = new HashMap<>(overrides.getSessionPropertiesOverride()); } else { sessionProperties = new HashMap<>(this.sessionProperties); if (overrides.getSessionPropertiesOverrideStrategy() == SUBSTITUTE) { sessionProperties.putAll(overrides.getSessionPropertiesOverride()); } } overrides.getSessionPropertiesToRemove().forEach(sessionProperties::remove); return new QueryConfiguration( overrides.getCatalogOverride().orElse(catalog), overrides.getSchemaOverride().orElse(schema), Optional.ofNullable(overrides.getUsernameOverride().orElse(username.orElse(null))), Optional.ofNullable(overrides.getPasswordOverride().orElse(password.orElse(null))), Optional.of(sessionProperties), isReusableTable, Optional.of(partitions)); }
@Test public void testSessionPropertySubstituteAndRemove() { overrides.setSessionPropertiesToRemove("property_2"); overrides.setSessionPropertiesOverrideStrategy(SUBSTITUTE); QueryConfiguration removed = new QueryConfiguration( CATALOG_OVERRIDE, SCHEMA_OVERRIDE, Optional.of(USERNAME_OVERRIDE), Optional.of(PASSWORD_OVERRIDE), Optional.of(SESSION_PROPERTIES_OVERRIDE), Optional.of(CLIENT_TAGS), Optional.empty()); assertEquals(CONFIGURATION_1.applyOverrides(overrides), removed); }
public static List<StreamedRow> toRows( final Buffer buff, final Function<StreamedRow, StreamedRow> addHostInfo ) { final List<StreamedRow> rows = new ArrayList<>(); int begin = 0; for (int i = 0; i <= buff.length(); i++) { if ((i == buff.length() && (i - begin > 1)) || (i < buff.length() && buff.getByte(i) == (byte) '\n')) { if (begin != i) { // Ignore random newlines - the server can send these final Buffer sliced = buff.slice(begin, i); final Buffer tidied = toJsonMsg(sliced, true); if (tidied.length() > 0) { final StreamedRow row = deserialize(tidied, StreamedRow.class); rows.add(addHostInfo.apply(row)); } } begin = i + 1; } } return rows; }
@Test public void toRows_errorParsingNotAtEndProto() { // When: final Exception e = assertThrows( KsqlRestClientException.class, () -> KsqlTargetUtil.toRows(Buffer.buffer("[{\"header\":{\"queryId\":\"queryId\"," + "\"schema\":\"`A` INTEGER KEY, `B` DOUBLE, `C` ARRAY<STRING>\"," + "\"protoSchema\":" + "\"syntax = \\\"proto3\\\";\\n" + "\\n" + "message ConnectDefault1 {\\n" + " int32 A = 1;\\n" + " double B = 2;\\n" + " repeated string C = 3;\\n" + "}\\n" + "\"}},\n" + "{\"row\":{\"protobufBytes\":\"CHsRAAAAAABAbUAaBWhlbGxv\"}},\n" + "{\"row\":{\"protobufBytes\":\"CMgDEQAA"), Functions.identity()) ); // Then: assertThat(e.getMessage(), is(("Failed to deserialise object"))); }
Object getCellValue(Cell cell, Schema.FieldType type) { ByteString cellValue = cell.getValue(); int valueSize = cellValue.size(); switch (type.getTypeName()) { case BOOLEAN: checkArgument(valueSize == 1, message("Boolean", 1)); return cellValue.toByteArray()[0] != 0; case BYTE: checkArgument(valueSize == 1, message("Byte", 1)); return cellValue.toByteArray()[0]; case INT16: checkArgument(valueSize == 2, message("Int16", 2)); return Shorts.fromByteArray(cellValue.toByteArray()); case INT32: checkArgument(valueSize == 4, message("Int32", 4)); return Ints.fromByteArray(cellValue.toByteArray()); case INT64: checkArgument(valueSize == 8, message("Int64", 8)); return Longs.fromByteArray(cellValue.toByteArray()); case FLOAT: checkArgument(valueSize == 4, message("Float", 4)); return Float.intBitsToFloat(Ints.fromByteArray(cellValue.toByteArray())); case DOUBLE: checkArgument(valueSize == 8, message("Double", 8)); return Double.longBitsToDouble(Longs.fromByteArray(cellValue.toByteArray())); case DATETIME: return DateTime.parse(cellValue.toStringUtf8()); case STRING: return cellValue.toStringUtf8(); case BYTES: return cellValue.toByteArray(); case LOGICAL_TYPE: String identifier = checkArgumentNotNull(type.getLogicalType()).getIdentifier(); throw new IllegalStateException("Unsupported logical type: " + identifier); default: throw new IllegalArgumentException( String.format("Unsupported cell value type '%s'.", type.getTypeName())); } }
@Test public void shouldFailParseInt32TypeTooLong() { byte[] value = new byte[6]; IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> PARSER.getCellValue(cell(value), INT32)); checkMessage(exception.getMessage(), "Int32 has to be 4-bytes long bytearray"); }
@Operation(summary = "signOut", description = "SIGN_OUT_NOTES") @PostMapping(value = "/signOut") @ApiException(SIGN_OUT_ERROR) public Result signOut(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, HttpServletRequest request) { String ip = getClientIpAddress(request); sessionService.expireSession(loginUser.getId()); // clear session request.removeAttribute(Constants.SESSION_USER); return success(); }
@Test public void testSignOut() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); MvcResult mvcResult = mockMvc.perform(post("/signOut") .header("sessionId", sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); }
@Override public HttpServletRequest readRequest(AwsProxyRequest request, SecurityContext securityContext, Context lambdaContext, ContainerConfig config) throws InvalidRequestEventException { // Expect the HTTP method and context to be populated. If they are not, we are handling an // unsupported event type. if (request.getHttpMethod() == null || request.getHttpMethod().equals("") || request.getRequestContext() == null) { throw new InvalidRequestEventException(INVALID_REQUEST_ERROR); } request.setPath(stripBasePath(request.getPath(), config)); if (request.getMultiValueHeaders() != null && request.getMultiValueHeaders().getFirst(HttpHeaders.CONTENT_TYPE) != null) { String contentType = request.getMultiValueHeaders().getFirst(HttpHeaders.CONTENT_TYPE); // put single as we always expect to have one and only one content type in a request. request.getMultiValueHeaders().putSingle(HttpHeaders.CONTENT_TYPE, getContentTypeWithCharset(contentType, config)); } AwsProxyHttpServletRequest servletRequest = new AwsProxyHttpServletRequest(request, lambdaContext, securityContext, config); servletRequest.setServletContext(servletContext); servletRequest.setAttribute(API_GATEWAY_CONTEXT_PROPERTY, request.getRequestContext()); servletRequest.setAttribute(API_GATEWAY_STAGE_VARS_PROPERTY, request.getStageVariables()); servletRequest.setAttribute(API_GATEWAY_EVENT_PROPERTY, request); servletRequest.setAttribute(ALB_CONTEXT_PROPERTY, request.getRequestContext().getElb()); servletRequest.setAttribute(LAMBDA_CONTEXT_PROPERTY, lambdaContext); servletRequest.setAttribute(JAX_SECURITY_CONTEXT_PROPERTY, securityContext); return servletRequest; }
@Test void readRequest_urlDecode_expectDecodedPath() { AwsProxyRequest request = new AwsProxyRequestBuilder(ENCODED_REQUEST_PATH, "GET").build(); try { HttpServletRequest servletRequest = reader.readRequest(request, null, null, ContainerConfig.defaultConfig()); assertNotNull(servletRequest); assertEquals(DECODED_REQUEST_PATH, servletRequest.getPathInfo()); assertEquals(ENCODED_REQUEST_PATH, servletRequest.getRequestURI()); } catch (InvalidRequestEventException e) { e.printStackTrace(); fail("Could not read request"); } }
@Override public boolean isSecure() { return isSecure; }
@Test public void testIsSecure() { assertThat(polarisRegistration1.isSecure()).isFalse(); }
public static LinearModel fit(Formula formula, DataFrame data, Properties params) { double lambda1 = Double.parseDouble(params.getProperty("smile.elastic_net.lambda1")); double lambda2 = Double.parseDouble(params.getProperty("smile.elastic_net.lambda2")); double tol = Double.parseDouble(params.getProperty("smile.elastic_net.tolerance", "1E-4")); int maxIter = Integer.parseInt(params.getProperty("smile.elastic_net.iterations", "1000")); return fit(formula, data, lambda1, lambda2, tol, maxIter); }
@Test public void tesProstate() { System.out.println("Prostate"); RegressionValidation<LinearModel> result = RegressionValidation.of(Prostate.formula, Prostate.train, Prostate.test, (formula, data) -> ElasticNet.fit(formula, data, 0.8, 0.2)); System.out.println(result.model); System.out.println(result); assertEquals(0.7103, result.metrics.rmse, 1E-4); }
public void tasks() { try { requireJob(); } catch (Exception e) { renderText(e.getMessage()); return; } if (app.getJob() != null) { try { String tt = $(TASK_TYPE); tt = tt.isEmpty() ? "All" : StringUtils.capitalize( org.apache.hadoop.util.StringUtils.toLowerCase( MRApps.taskType(tt).toString())); setTitle(join(tt, " Tasks for ", $(JOB_ID))); } catch (Exception e) { LOG.error("Failed to render tasks page with task type : " + $(TASK_TYPE) + " for job id : " + $(JOB_ID), e); badRequest(e.getMessage()); } } render(tasksPage()); }
@Test public void testTasks() { appController.tasks(); assertEquals(TasksPage.class, appController.getClazz()); }
protected void processFileContents(List<String> fileLines, String filePath, Engine engine) throws AnalysisException { fileLines.stream() .map(fileLine -> fileLine.split("(,|=>)")) .map(requires -> { //LOGGER.debug("perl scanning file:" + fileLine); final String fqName = requires[0].substring(8) .replace("'", "") .replace("\"", "") .trim(); final String version; if (requires.length == 1) { version = "0"; } else { final Matcher matcher = VERSION_PATTERN.matcher(requires[1]); if (matcher.find()) { version = matcher.group(1); } else { version = "0"; } } final int pos = fqName.lastIndexOf("::"); final String namespace; final String name; if (pos > 0) { namespace = fqName.substring(0, pos); name = fqName.substring(pos + 2); } else { namespace = null; name = fqName; } final Dependency dependency = new Dependency(true); final File f = new File(filePath); dependency.setFileName(f.getName()); dependency.setFilePath(filePath); dependency.setActualFilePath(filePath); dependency.setDisplayFileName("'" + fqName + "', '" + version + "'"); dependency.setEcosystem(Ecosystem.PERL); dependency.addEvidence(EvidenceType.VENDOR, "cpanfile", "requires", fqName, Confidence.HIGHEST); dependency.addEvidence(EvidenceType.PRODUCT, "cpanfile", "requires", fqName, Confidence.HIGHEST); dependency.addEvidence(EvidenceType.VERSION, "cpanfile", "requires", version, Confidence.HIGHEST); Identifier id = null; try { //note - namespace might be null and that's okay. final PackageURL purl = PackageURLBuilder.aPackageURL() .withType("cpan") .withNamespace(namespace) .withName(name) .withVersion(version) .build(); id = new PurlIdentifier(purl, Confidence.HIGH); } catch (MalformedPackageURLException ex) { LOGGER.debug("Error building package url for " + fqName + "; using generic identifier instead.", ex); id = new GenericIdentifier("cpan:" + fqName + "::" + version, Confidence.HIGH); } dependency.setVersion(version); dependency.setName(fqName); dependency.addSoftwareIdentifier(id); //sha1sum is used for anchor links in the HtML report dependency.setSha1sum(Checksum.getSHA1Checksum(id.getValue())); return dependency; }).forEachOrdered(engine::addDependency); }
@Test public void testProcessSingleFileContents() throws AnalysisException { Dependency d = new Dependency(); List<String> dependencyLines = Arrays.asList(new String[]{ "requires 'JSON', '>= 2.00, < 2.80'",}); PerlCpanfileAnalyzer instance = new PerlCpanfileAnalyzer(); Engine engine = new Engine(getSettings()); instance.processFileContents(dependencyLines, "./cpanfile", engine); assertEquals(1, engine.getDependencies().length); Dependency dep = engine.getDependencies()[0]; assertEquals("'JSON', '2.00'", dep.getDisplayFileName()); assertEquals("2.00", dep.getVersion()); assertEquals("pkg:cpan/JSON@2.00", dep.getSoftwareIdentifiers().iterator().next().getValue()); }
public final void buildListing(Path pathToListFile, DistCpContext distCpContext) throws IOException { validatePaths(distCpContext); doBuildListing(pathToListFile, distCpContext); Configuration config = getConf(); config.set(DistCpConstants.CONF_LABEL_LISTING_FILE_PATH, pathToListFile.toString()); config.setLong(DistCpConstants.CONF_LABEL_TOTAL_BYTES_TO_BE_COPIED, getBytesToCopy()); config.setLong(DistCpConstants.CONF_LABEL_TOTAL_NUMBER_OF_RECORDS, getNumberOfPaths()); validateFinalListing(pathToListFile, distCpContext); LOG.info("Number of paths in the copy list: " + this.getNumberOfPaths()); }
@Test(timeout=10000) public void testBuildListingForSingleFile() { FileSystem fs = null; String testRootString = "/singleFileListing"; Path testRoot = new Path(testRootString); SequenceFile.Reader reader = null; try { fs = FileSystem.get(getConf()); if (fs.exists(testRoot)) TestDistCpUtils.delete(fs, testRootString); Path sourceFile = new Path(testRoot, "/source/foo/bar/source.txt"); Path decoyFile = new Path(testRoot, "/target/moo/source.txt"); Path targetFile = new Path(testRoot, "/target/moo/target.txt"); TestDistCpUtils.createFile(fs, sourceFile.toString()); TestDistCpUtils.createFile(fs, decoyFile.toString()); TestDistCpUtils.createFile(fs, targetFile.toString()); List<Path> srcPaths = new ArrayList<Path>(); srcPaths.add(sourceFile); DistCpOptions options = new DistCpOptions.Builder(srcPaths, targetFile) .build(); CopyListing listing = new SimpleCopyListing(getConf(), CREDENTIALS); final Path listFile = new Path(testRoot, "/tmp/fileList.seq"); listing.buildListing(listFile, new DistCpContext(options)); reader = new SequenceFile.Reader(getConf(), SequenceFile.Reader.file(listFile)); CopyListingFileStatus fileStatus = new CopyListingFileStatus(); Text relativePath = new Text(); Assert.assertTrue(reader.next(relativePath, fileStatus)); Assert.assertTrue(relativePath.toString().equals("")); } catch (Exception e) { Assert.fail("Unexpected exception encountered."); LOG.error("Unexpected exception: ", e); } finally { TestDistCpUtils.delete(fs, testRootString); IOUtils.closeStream(reader); } }
void uploadMetric(String json) throws IOException { Request request = buildHttpRequest(metricsServerUrl, json); execute(okHttpClient.newCall(request)); }
@Test void uploadMetric() throws IOException { ArgumentCaptor<Request> requestCaptor = ArgumentCaptor.forClass(Request.class); settings.setProperty(SONAR_TELEMETRY_COMPRESSION.getKey(), false); underTest.start(); underTest.uploadMetric(JSON); verify(okHttpClient).newCall(requestCaptor.capture()); Request request = requestCaptor.getValue(); assertThat(request.method()).isEqualTo("POST"); assertThat(request.body().contentType()).isEqualTo(MediaType.parse("application/json; charset=utf-8")); Buffer body = new Buffer(); request.body().writeTo(body); assertThat(body.readUtf8()).isEqualTo(JSON); assertThat(request.url()).hasToString(METRICS_TELEMETRY_URL); }
public static int toMonths(int year, int months) { try { return addExact(multiplyExact(year, 12), months); } catch (ArithmeticException e) { throw new IllegalArgumentException(e); } }
@Test public void testMaxYears() { int years = Integer.MAX_VALUE / 12; assertEquals(toMonths(years, 0), years * 12); }
public InterpreterSetting get(String id) { return interpreterSettings.get(id); }
@Test void testInterpreterInclude() throws Exception { try { System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_INCLUDES.getVarName(), "mock1"); setUp(); assertEquals(1, interpreterSettingManager.get().size()); assertEquals("mock1", interpreterSettingManager.get().get(0).getGroup()); } finally { System.clearProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_INCLUDES.getVarName()); } }
public String redirectWithSession(AuthenticateRequest params) throws IOException, ParseException, JOSEException, InvalidSignatureException, DienstencatalogusException { var session = startSessionFromApp(params); return protocol + "://" + appHost+ "/digid-app?data=" + URLEncoder.encode("digid-app-auth://app_session_id=" + session.get("app_session_id") + "&host=" + digidHost + "&browser=safari", StandardCharsets.UTF_8); }
@Test void redirectWithSessionTest() throws InvalidSignatureException, IOException, ParseException, DienstencatalogusException, JOSEException { AuthenticateRequest authenticateRequest = new AuthenticateRequest(); authenticateRequest.setClientId(client.CLIENT_ID); authenticateRequest.setRequest(client.generateRequest()); authenticateRequest.setRedirectUri("redirect_uri"); mockDcMetadataResponse(); when(provider.verifySignature("test", authenticateRequest.getSignedJwt())).thenReturn(true); when(appClient.startAppSession(any(), any(), any(), any(), any(), any())).thenReturn(Map.of("id", "SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS")); String response = openIdService.redirectWithSession(authenticateRequest); }
@Override public double rand() { // faster calculation by inversion boolean inv = p > 0.5; double np = n * Math.min(p, 1.0 - p); // Poisson's approximation for extremely low np int x; if (np < 1E-6) { x = PoissonDistribution.tinyLambdaRand(np); } else { RandomNumberGenerator rng; if (np < 55) { // inversion method, using chop-down search from 0 if (p <= 0.5) { rng = new ModeSearch(p); } else { rng = new ModeSearch(1.0 - p); // faster calculation by inversion } } else { // ratio of uniforms method if (p <= 0.5) { rng = new Patchwork(p); } else { rng = new Patchwork(1.0 - p); // faster calculation by inversion } } x = rng.rand(); } // undo inversion return inv ? n - x : x; }
@Test public void testRand() { System.out.println("rand"); MathEx.setSeed(19650218); BinomialDistribution instance = new BinomialDistribution(1000, 0.99); assertEquals(987, instance.rand(), 1E-7); }
@Override public boolean archive(String fileName, byte[] data) { checkArgument(!Strings.isNullOrEmpty(fileName)); checkNotNull(data); try { logger.atInfo().log("Archiving data to file system with filename '%s'.", fileName); Files.asByteSink(new File(fileName)).write(data); return true; } catch (IOException e) { logger.atWarning().withCause(e).log("Failed archiving data to file '%s'.", fileName); return false; } }
@Test public void archive_whenInvalidTargetFileAndCharSequenceData_returnsFalse() throws IOException { File tempFile = temporaryFolder.newFile(); String data = "file data"; RawFileArchiver rawFileArchiver = new RawFileArchiver(); assertThat(rawFileArchiver.archive(tempFile.getParent(), data)).isFalse(); assertThat(tempFile.length()).isEqualTo(0); }
@Override public int getMajorJavaVersion() { JavaVersion version = JavaVersion.current(); JavaPluginExtension javaPluginExtension = project.getExtensions().findByType(JavaPluginExtension.class); if (javaPluginExtension != null) { version = javaPluginExtension.getTargetCompatibility(); } return Integer.valueOf(version.getMajorVersion()); }
@Test public void testGetMajorJavaVersion() { JavaPluginExtension extension = project.getExtensions().findByType(JavaPluginExtension.class); extension.setTargetCompatibility(JavaVersion.VERSION_1_3); assertThat(gradleProjectProperties.getMajorJavaVersion()).isEqualTo(3); extension.setTargetCompatibility(JavaVersion.VERSION_11); assertThat(gradleProjectProperties.getMajorJavaVersion()).isEqualTo(11); extension.setTargetCompatibility(JavaVersion.VERSION_1_9); assertThat(gradleProjectProperties.getMajorJavaVersion()).isEqualTo(9); }
@Nonnull public static List<IndexIterationPointer> normalizePointers(@Nonnull List<IndexIterationPointer> result, boolean descending) { if (result.size() <= 1) { // single pointer, nothing to do return result; } // without the same ordering of pointers order of results would be unspecified assert result.stream().allMatch(r -> r.isDescending() == descending) : "All iteration pointers must have the same direction"; // order of ranges is critical for preserving ordering of the results Collections.sort(result, descending ? POINTER_COMPARATOR_REVERSED : POINTER_COMPARATOR); // loop until we processed the last remaining pair // // do the normalization in place without extra shifts in the array // we write normalized pointers from the beginning int writeIdx = 0; IndexIterationPointer currentMerged = result.get(0); for (int nextPointerIdx = 1; nextPointerIdx < result.size(); nextPointerIdx++) { // compare current pointer with next one and merge if they overlap // otherwise go to next pointer // pointers might be ordered in descending way but util methods expect ascending order of arguments IndexIterationPointer next = result.get(nextPointerIdx); if (!descending && overlapsOrdered(currentMerged, next, OrderedIndexStore.SPECIAL_AWARE_COMPARATOR)) { // merge overlapping ranges currentMerged = union(currentMerged, next, OrderedIndexStore.SPECIAL_AWARE_COMPARATOR); } else if (descending && overlapsOrdered(next, currentMerged, OrderedIndexStore.SPECIAL_AWARE_COMPARATOR)) { // merge overlapping ranges currentMerged = union(next, currentMerged, OrderedIndexStore.SPECIAL_AWARE_COMPARATOR); } else { // write current pointer and advance result.set(writeIdx++, currentMerged); currentMerged = next; } } // write last remaining pointer result.set(writeIdx++, currentMerged); return result.subList(0, writeIdx); }
@Test void normalizePointersMany() { // some cases for partial reduction of number of pointers assertThat(normalizePointers(arrayListOf( pointer(singleton(6)), pointer(singleton(5)), IS_NULL), false)) .containsExactly(IS_NULL, pointer(singleton(5)), pointer(singleton(6))); assertThat(normalizePointers(arrayListOf( pointer(singleton(6)), pointer(singleton(5)), IS_NOT_NULL), false)) .containsExactly(IS_NOT_NULL); assertThat(normalizePointers(arrayListOf( pointer(greaterThan(5)), pointer(singleton(6)), IS_NULL), false)) .containsExactly(IS_NULL, pointer(greaterThan(5))); }
public static <V> Read<V> read() { return new AutoValue_SparkReceiverIO_Read.Builder<V>().build(); }
@Test public void testReadBuildsCorrectly() { ReceiverBuilder<String, CustomReceiverWithOffset> receiverBuilder = new ReceiverBuilder<>(CustomReceiverWithOffset.class).withConstructorArgs(); SerializableFunction<String, Long> offsetFn = Long::valueOf; SerializableFunction<String, Instant> timestampFn = Instant::parse; SparkReceiverIO.Read<String> read = SparkReceiverIO.<String>read() .withGetOffsetFn(offsetFn) .withTimestampFn(timestampFn) .withPullFrequencySec(PULL_FREQUENCY_SEC) .withStartPollTimeoutSec(START_POLL_TIMEOUT_SEC) .withStartOffset(START_OFFSET) .withSparkReceiverBuilder(receiverBuilder); assertEquals(offsetFn, read.getGetOffsetFn()); assertEquals(receiverBuilder, read.getSparkReceiverBuilder()); }
@Override public boolean add(EpoxyModel<?> epoxyModel) { notifyInsertion(size(), 1); return super.add(epoxyModel); }
@Test public void testAdd() { modelList.add(new TestModel()); modelList.add(new TestModel()); verify(observer).onItemRangeInserted(3, 1); verify(observer).onItemRangeInserted(4, 1); }
public static TypeDescription convert(Schema schema) { final TypeDescription root = TypeDescription.createStruct(); final Types.StructType schemaRoot = schema.asStruct(); for (Types.NestedField field : schemaRoot.asStructType().fields()) { TypeDescription orcColumnType = convert(field.fieldId(), field.type(), field.isRequired()); root.addField(field.name(), orcColumnType); } return root; }
@Test public void testSkipNonIcebergColumns() { TypeDescription schema = TypeDescription.createStruct(); TypeDescription intCol = TypeDescription.createInt(); intCol.setAttribute(ICEBERG_ID_ATTRIBUTE, "1"); intCol.setAttribute(ICEBERG_REQUIRED_ATTRIBUTE, "true"); TypeDescription listCol = TypeDescription.createList( TypeDescription.createMap( TypeDescription.createString(), TypeDescription.createDate())); listCol.setAttribute(ICEBERG_ID_ATTRIBUTE, "2"); schema.addField("intCol", intCol); schema.addField("listCol", listCol); TypeDescription stringKey = TypeDescription.createString(); stringKey.setAttribute(ICEBERG_ID_ATTRIBUTE, "3"); TypeDescription booleanVal = TypeDescription.createBoolean(); booleanVal.setAttribute(ICEBERG_ID_ATTRIBUTE, "4"); TypeDescription mapCol = TypeDescription.createMap(stringKey, booleanVal); mapCol.setAttribute(ICEBERG_ID_ATTRIBUTE, "5"); schema.addField("mapCol", mapCol); Schema icebergSchema = ORCSchemaUtil.convert(schema); Schema expectedSchema = new Schema( required(1, "intCol", Types.IntegerType.get()), // Skipped listCol since element has no Iceberg ID optional( 5, "mapCol", Types.MapType.ofOptional(3, 4, Types.StringType.get(), Types.BooleanType.get()))); assertThat(icebergSchema.asStruct()) .as("Schemas must match.") .isEqualTo(expectedSchema.asStruct()); TypeDescription structCol = TypeDescription.createStruct(); structCol.setAttribute(ICEBERG_ID_ATTRIBUTE, "7"); structCol.setAttribute(ICEBERG_REQUIRED_ATTRIBUTE, "true"); TypeDescription binaryCol = TypeDescription.createBinary(); TypeDescription doubleCol = TypeDescription.createDouble(); doubleCol.setAttribute(ICEBERG_ID_ATTRIBUTE, "6"); doubleCol.setAttribute(ICEBERG_REQUIRED_ATTRIBUTE, "true"); structCol.addField("binaryCol", binaryCol); structCol.addField("doubleCol", doubleCol); schema.addField("structCol", structCol); TypeDescription stringKey2 = TypeDescription.createString(); stringKey2.setAttribute(ICEBERG_ID_ATTRIBUTE, "8"); TypeDescription mapCol2 = TypeDescription.createMap(stringKey2, TypeDescription.createDate()); mapCol2.setAttribute(ICEBERG_ID_ATTRIBUTE, "10"); schema.addField("mapCol2", mapCol2); Schema icebergSchema2 = ORCSchemaUtil.convert(schema); Schema expectedSchema2 = new Schema( required(1, "intCol", Types.IntegerType.get()), optional( 5, "mapCol", Types.MapType.ofOptional(3, 4, Types.StringType.get(), Types.BooleanType.get())), required( 7, "structCol", Types.StructType.of( // Skipped binaryCol required(6, "doubleCol", Types.DoubleType.get()) // Skipped mapCol2 since value has no Iceberg ID ))); assertThat(icebergSchema2.asStruct()) .as("Schemas must match.") .isEqualTo(expectedSchema2.asStruct()); }
@Override public String execute(CommandContext commandContext, String[] args) { if (ArrayUtils.isEmpty(args)) { return "Please input the index of the method you want to invoke, eg: \r\n select 1"; } Channel channel = commandContext.getRemote(); String message = args[0]; List<Method> methodList = channel.attr(InvokeTelnet.INVOKE_METHOD_LIST_KEY).get(); if (CollectionUtils.isEmpty(methodList)) { return "Please use the invoke command first."; } if (!StringUtils.isNumber(message) || Integer.parseInt(message) < 1 || Integer.parseInt(message) > methodList.size()) { return "Illegal index ,please input select 1~" + methodList.size(); } Method method = methodList.get(Integer.parseInt(message) - 1); channel.attr(SELECT_METHOD_KEY).set(method); channel.attr(SELECT_KEY).set(Boolean.TRUE); String invokeMessage = channel.attr(InvokeTelnet.INVOKE_MESSAGE_KEY).get(); return invokeTelnet.execute(commandContext, new String[] {invokeMessage}); }
@Test void testInvokeWithIllegalMessage() throws RemotingException { defaultAttributeMap.attr(ChangeTelnet.SERVICE_KEY).set(DemoService.class.getName()); defaultAttributeMap.attr(InvokeTelnet.INVOKE_METHOD_LIST_KEY).set(methods); given(mockChannel.attr(ChangeTelnet.SERVICE_KEY)) .willReturn(defaultAttributeMap.attr(ChangeTelnet.SERVICE_KEY)); given(mockChannel.attr(InvokeTelnet.INVOKE_METHOD_LIST_KEY)) .willReturn(defaultAttributeMap.attr(InvokeTelnet.INVOKE_METHOD_LIST_KEY)); registerProvider(DemoService.class.getName(), new DemoServiceImpl(), DemoService.class); String result = select.execute(mockCommandContext, new String[] {"index"}); assertTrue(result.contains("Illegal index ,please input select 1")); result = select.execute(mockCommandContext, new String[] {"0"}); assertTrue(result.contains("Illegal index ,please input select 1")); result = select.execute(mockCommandContext, new String[] {"1000"}); assertTrue(result.contains("Illegal index ,please input select 1")); defaultAttributeMap.attr(ChangeTelnet.SERVICE_KEY).remove(); defaultAttributeMap.attr(InvokeTelnet.INVOKE_METHOD_LIST_KEY).remove(); }
public static boolean equalIncreasingByteBuffer(int start, int len, ByteBuffer buf) { if (buf == null) { return false; } buf.rewind(); if (buf.remaining() != len) { return false; } for (int k = 0; k < len; k++) { if (buf.get() != (byte) (start + k)) { return false; } } return true; }
@Test public void equalIncreasingByteBuffer() { class TestCase { boolean mExpected; ByteBuffer mBuffer; int mLength; int mStart; public TestCase(boolean expected, ByteBuffer buffer, int length, int start) { mExpected = expected; mBuffer = buffer; mLength = length; mStart = start; } } ArrayList<TestCase> testCases = new ArrayList<>(); testCases.add(new TestCase(false, null, 0, 0)); testCases.add(new TestCase(true, ByteBuffer.wrap(new byte[] {}), 0, 0)); testCases.add(new TestCase(false, ByteBuffer.wrap(new byte[] {1}), 0, 0)); testCases.add(new TestCase(true, ByteBuffer.wrap(new byte[] {}), 0, 3)); testCases.add(new TestCase(false, ByteBuffer.wrap(new byte[] {1}), 0, 3)); testCases.add(new TestCase(true, ByteBuffer.wrap(new byte[] {0}), 1, 0)); testCases.add(new TestCase(false, ByteBuffer.wrap(new byte[] {1}), 1, 0)); testCases.add(new TestCase(true, ByteBuffer.wrap(new byte[] {0, 1, 2}), 3, 0)); testCases.add(new TestCase(false, ByteBuffer.wrap(new byte[] {0, 1, 2, (byte) 0xFF}), 3, 0)); testCases.add(new TestCase(false, ByteBuffer.wrap(new byte[] {1, 2, 3}), 3, 0)); testCases.add(new TestCase(true, ByteBuffer.wrap(new byte[] {3}), 1, 3)); testCases.add(new TestCase(false, ByteBuffer.wrap(new byte[] {2}), 1, 3)); testCases.add(new TestCase(true, ByteBuffer.wrap(new byte[] {3, 4, 5}), 3, 3)); testCases.add(new TestCase(false, ByteBuffer.wrap(new byte[] {3, 4, 5, (byte) 0xFF}), 3, 3)); testCases.add(new TestCase(false, ByteBuffer.wrap(new byte[] {2, 3, 4}), 3, 3)); for (TestCase testCase : testCases) { boolean result = BufferUtils.equalIncreasingByteBuffer(testCase.mStart, testCase.mLength, testCase.mBuffer); assertEquals(testCase.mExpected, result); } }
public static Builder newBuilder() { return new AutoValue_DLPInspectText.Builder(); }
@Test public void throwsExceptionWhenDelimiterIsNullAndHeadersAreSet() { PCollectionView<List<String>> header = testPipeline.apply(Create.of("header")).apply(View.asList()); assertThrows( "Column delimiter should be set if headers are present.", IllegalArgumentException.class, () -> DLPInspectText.newBuilder() .setProjectId(PROJECT_ID) .setBatchSizeBytes(BATCH_SIZE_SMALL) .setInspectTemplateName(TEMPLATE_NAME) .setHeaderColumns(header) .build()); testPipeline.run().waitUntilFinish(); }
@Deprecated(forRemoval=true, since = "13.0") public static byte[] convertOctetStreamToText(byte[] source, MediaType destination) { if (source == null) return null; return convertCharset(source, UTF_8, destination.getCharset()); }
@Test public void testOctetStreamToTextConversion() { String text = "Have you ever retired a human by mistake?"; byte[] bytes1 = text.getBytes(); byte[] bytes2 = new byte[]{1, 2, 3}; byte[] result1 = StandardConversions.convertOctetStreamToText(bytes1, TEXT_PLAIN.withCharset(US_ASCII)); byte[] result2 = StandardConversions.convertOctetStreamToText(bytes2, TEXT_PLAIN); assertArrayEquals(text.getBytes(US_ASCII), result1); assertArrayEquals(new String(bytes2).getBytes(UTF_8), result2); }
@Override public Optional<Endpoint> getRestEndpoint(String clusterId) { Optional<KubernetesService> restService = getService(ExternalServiceDecorator.getExternalServiceName(clusterId)); if (!restService.isPresent()) { return Optional.empty(); } final Service service = restService.get().getInternalResource(); final KubernetesConfigOptions.ServiceExposedType serviceExposedType = ServiceType.classify(service); return serviceExposedType .serviceType() .getRestEndpoint(service, internalClient, nodePortAddressType); }
@Test void testServiceLoadBalancerNullHostAndIP() { mockExpectedServiceFromServerSide(buildExternalServiceWithLoadBalancer(null, null)); final Optional<Endpoint> resultEndpoint = flinkKubeClient.getRestEndpoint(CLUSTER_ID); assertThat(resultEndpoint).isNotPresent(); }
@SuppressWarnings("ContinueOrBreakFromFinallyBlock") static void onRpcReturned(final ThreadId id, final RequestType reqType, final Status status, final Message request, final Message response, final int seq, final int stateVersion, final long rpcSendTime) { if (id == null) { return; } final long startTimeMs = Utils.nowMs(); Replicator r; if ((r = (Replicator) id.lock()) == null) { return; } if (stateVersion != r.version) { LOG.debug( "Replicator {} ignored old version response {}, current version is {}, request is {}\n, and response is {}\n, status is {}.", r, stateVersion, r.version, request, response, status); id.unlock(); return; } final PriorityQueue<RpcResponse> holdingQueue = r.pendingResponses; holdingQueue.add(new RpcResponse(reqType, seq, status, request, response, rpcSendTime)); if (holdingQueue.size() > r.raftOptions.getMaxReplicatorInflightMsgs()) { LOG.warn("Too many pending responses {} for replicator {}, maxReplicatorInflightMsgs={}", holdingQueue.size(), r, r.raftOptions.getMaxReplicatorInflightMsgs()); r.resetInflights(); r.setState(State.Probe); r.sendProbeRequest(); return; } boolean continueSendEntries = false; final boolean isLogDebugEnabled = LOG.isDebugEnabled(); StringBuilder sb = null; if (isLogDebugEnabled) { sb = new StringBuilder("Replicator ") // .append(r) // .append(" is processing RPC responses, "); } try { int processed = 0; while (!holdingQueue.isEmpty()) { final RpcResponse queuedPipelinedResponse = holdingQueue.peek(); // Sequence mismatch, waiting for next response. if (queuedPipelinedResponse.seq != r.requiredNextSeq) { if (processed > 0) { if (isLogDebugEnabled) { sb.append("has processed ") // .append(processed) // .append(" responses, "); } break; } else { // Do not processed any responses, UNLOCK id and return. continueSendEntries = false; id.unlock(); return; } } holdingQueue.remove(); processed++; final Inflight inflight = r.pollInflight(); if (inflight == null) { // The previous in-flight requests were cleared. if (isLogDebugEnabled) { sb.append("ignore response because request not found: ") // .append(queuedPipelinedResponse) // .append(",\n"); } continue; } if (inflight.seq != queuedPipelinedResponse.seq) { // reset state LOG.warn( "Replicator {} response sequence out of order, expect {}, but it is {}, reset state to try again.", r, inflight.seq, queuedPipelinedResponse.seq); r.resetInflights(); r.setState(State.Probe); continueSendEntries = false; r.block(Utils.nowMs(), RaftError.EREQUEST.getNumber()); return; } try { switch (queuedPipelinedResponse.requestType) { case AppendEntries: continueSendEntries = onAppendEntriesReturned(id, inflight, queuedPipelinedResponse.status, (AppendEntriesRequest) queuedPipelinedResponse.request, (AppendEntriesResponse) queuedPipelinedResponse.response, rpcSendTime, startTimeMs, r); break; case Snapshot: continueSendEntries = onInstallSnapshotReturned(id, r, queuedPipelinedResponse.status, (InstallSnapshotRequest) queuedPipelinedResponse.request, (InstallSnapshotResponse) queuedPipelinedResponse.response); break; } } finally { if (continueSendEntries) { // Success, increase the response sequence. r.getAndIncrementRequiredNextSeq(); } else { // The id is already unlocked in onAppendEntriesReturned/onInstallSnapshotReturned, we SHOULD break out. break; } } } } finally { if (isLogDebugEnabled) { sb.append("after processed, continue to send entries: ") // .append(continueSendEntries); LOG.debug(sb.toString()); } if (continueSendEntries) { // unlock in sendEntries. r.sendEntries(); } } }
@Test public void testOnRpcReturnedTermMismatch() { final Replicator r = getReplicator(); final RpcRequests.AppendEntriesRequest request = createEmptyEntriesRequest(); final RpcRequests.AppendEntriesResponse response = RpcRequests.AppendEntriesResponse.newBuilder() // .setSuccess(false) // .setLastLogIndex(12) // .setTerm(2) // .build(); this.id.unlock(); Replicator.onRpcReturned(this.id, Replicator.RequestType.AppendEntries, Status.OK(), request, response, 0, 0, Utils.monotonicMs()); Mockito.verify(this.node).increaseTermTo( 2, new Status(RaftError.EHIGHERTERMRESPONSE, "Leader receives higher term heartbeat_response from peer:%s, group:%s", this.peerId, this.node .getGroupId())); assertNull(r.id); }
public static boolean isUsableLocalPort(int port) { if (false == isValidPort(port)) { // 给定的IP未在指定端口范围中 return false; } // issue#765@Github, 某些绑定非127.0.0.1的端口无法被检测到 try (ServerSocket ss = new ServerSocket(port)) { ss.setReuseAddress(true); } catch (IOException ignored) { return false; } try (DatagramSocket ds = new DatagramSocket(port)) { ds.setReuseAddress(true); } catch (IOException ignored) { return false; } return true; }
@Test @Disabled public void isUsableLocalPortTest(){ assertTrue(NetUtil.isUsableLocalPort(80)); }
public AnalysisResult analysis(AnalysisResult result) { // 1. Set sub package name by source.metrics Class<? extends Metrics> metricsClass = MetricsHolder.find(result.getAggregationFuncStmt().getAggregationFunctionName()); String metricsClassSimpleName = metricsClass.getSimpleName(); result.setMetricsClassName(metricsClassSimpleName); // Optional for filter List<ConditionExpression> expressions = result.getFilters().getFilterExpressionsParserResult(); if (expressions != null && expressions.size() > 0) { for (ConditionExpression expression : expressions) { final FilterMatchers.MatcherInfo matcherInfo = FilterMatchers.INSTANCE.find( expression.getExpressionType()); final String getter = matcherInfo.isBooleanType() ? ClassMethodUtil.toIsMethod(expression.getAttributes()) : ClassMethodUtil.toGetMethod(expression.getAttributes()); final Expression filterExpression = new Expression(); filterExpression.setExpressionObject(matcherInfo.getMatcher().getName()); filterExpression.setLeft(TypeCastUtil.withCast(expression.getCastType(), "source." + getter)); filterExpression.setRight(expression.getValue()); result.getFilters().addFilterExpressions(filterExpression); } } // 3. Find Entrance method of this metrics Class<?> c = metricsClass; Method entranceMethod = null; SearchEntrance: while (!c.equals(Object.class)) { for (Method method : c.getMethods()) { Entrance annotation = method.getAnnotation(Entrance.class); if (annotation != null) { entranceMethod = method; break SearchEntrance; } } c = c.getSuperclass(); } if (entranceMethod == null) { throw new IllegalArgumentException("Can't find Entrance method in class: " + metricsClass.getName()); } EntryMethod entryMethod = new EntryMethod(); result.setEntryMethod(entryMethod); entryMethod.setMethodName(entranceMethod.getName()); // 4. Use parameter's annotation of entrance method to generate aggregation entrance. for (Parameter parameter : entranceMethod.getParameters()) { Class<?> parameterType = parameter.getType(); Annotation[] parameterAnnotations = parameter.getAnnotations(); if (parameterAnnotations == null || parameterAnnotations.length == 0) { throw new IllegalArgumentException( "Entrance method:" + entranceMethod + " doesn't include the annotation."); } Annotation annotation = parameterAnnotations[0]; if (annotation instanceof SourceFrom) { entryMethod.addArg( parameterType, TypeCastUtil.withCast( result.getFrom().getSourceCastType(), "source." + ClassMethodUtil.toGetMethod(result.getFrom().getSourceAttribute()) ) ); } else if (annotation instanceof ConstOne) { entryMethod.addArg(parameterType, "1"); } else if (annotation instanceof org.apache.skywalking.oap.server.core.analysis.metrics.annotation.Expression) { if (isNull(result.getAggregationFuncStmt().getFuncConditionExpressions()) || result.getAggregationFuncStmt().getFuncConditionExpressions().isEmpty()) { throw new IllegalArgumentException( "Entrance method:" + entranceMethod + " argument can't find funcParamExpression."); } else { ConditionExpression expression = result.getAggregationFuncStmt().getNextFuncConditionExpression(); final FilterMatchers.MatcherInfo matcherInfo = FilterMatchers.INSTANCE.find( expression.getExpressionType()); final String getter = matcherInfo.isBooleanType() ? ClassMethodUtil.toIsMethod(expression.getAttributes()) : ClassMethodUtil.toGetMethod(expression.getAttributes()); final Expression argExpression = new Expression(); argExpression.setRight(expression.getValue()); argExpression.setExpressionObject(matcherInfo.getMatcher().getName()); argExpression.setLeft(TypeCastUtil.withCast(expression.getCastType(), "source." + getter)); entryMethod.addArg(argExpression); } } else if (annotation instanceof Arg) { entryMethod.addArg(parameterType, result.getAggregationFuncStmt().getNextFuncArg()); } else { throw new IllegalArgumentException( "Entrance method:" + entranceMethod + " doesn't the expected annotation."); } } // 5. Get all column declared in MetricsHolder class. c = metricsClass; while (!c.equals(Object.class)) { for (Field field : c.getDeclaredFields()) { Column column = field.getAnnotation(Column.class); if (column != null) { result.addPersistentField( field.getName(), column.name(), field.getType()); } } c = c.getSuperclass(); } // 6. Based on Source, generate default columns List<SourceColumn> columns = SourceColumnsFactory.getColumns(result.getFrom().getSourceName()); result.setFieldsFromSource(columns); result.generateSerializeFields(); return result; }
@Test public void shouldUseCorrectMatcher() { AnalysisResult result = new AnalysisResult(); result.getFrom().setSourceName("Endpoint"); result.getFrom().getSourceAttribute().add("latency"); result.setMetricsName("EndpointAvg"); result.getAggregationFuncStmt().setAggregationFunctionName("longAvg"); DeepAnalysis analysis = new DeepAnalysis(); result.getFilters().setFilterExpressions(null); result.getFilters().setFilterExpressionsParserResult(null); result.getFilters().addFilterExpressionsParserResult(new ConditionExpression("booleanMatch", "valid", "")); result = analysis.analysis(result); assertTrue(result.getFilters().getFilterExpressions().size() > 0); assertEquals(BooleanMatch.class.getName(), result.getFilters().getFilterExpressions().get(0).getExpressionObject()); assertEquals("source.isValid()", result.getFilters().getFilterExpressions().get(0).getLeft()); result.getFilters().setFilterExpressions(null); result.getFilters().setFilterExpressionsParserResult(null); result.getFilters().addFilterExpressionsParserResult(new ConditionExpression("stringMatch", "type", "")); result = analysis.analysis(result); assertTrue(result.getFilters().getFilterExpressions().size() > 0); assertEquals(StringMatch.class.getName(), result.getFilters().getFilterExpressions().get(0).getExpressionObject()); assertEquals("source.getType()", result.getFilters().getFilterExpressions().get(0).getLeft()); result.getFilters().setFilterExpressions(null); result.getFilters().setFilterExpressionsParserResult(null); result.getFilters().addFilterExpressionsParserResult(new ConditionExpression("notEqualMatch", "type", "")); result = analysis.analysis(result); assertTrue(result.getFilters().getFilterExpressions().size() > 0); assertEquals(NotEqualMatch.class.getName(), result.getFilters().getFilterExpressions().get(0).getExpressionObject()); assertEquals("source.getType()", result.getFilters().getFilterExpressions().get(0).getLeft()); result.getFilters().setFilterExpressions(null); result.getFilters().setFilterExpressionsParserResult(null); result.getFilters().addFilterExpressionsParserResult(new ConditionExpression("booleanNotEqualMatch", "type", "")); result = analysis.analysis(result); assertTrue(result.getFilters().getFilterExpressions().size() > 0); assertEquals(BooleanNotEqualMatch.class.getName(), result.getFilters().getFilterExpressions().get(0).getExpressionObject()); assertEquals("source.isType()", result.getFilters().getFilterExpressions().get(0).getLeft()); }
public void expand(String key, long value, RangeHandler rangeHandler, EdgeHandler edgeHandler) { if (value < lowerBound || value > upperBound) { // Value outside bounds -> expand to nothing. return; } int maxLevels = value > 0 ? maxPositiveLevels : maxNegativeLevels; int sign = value > 0 ? 1 : -1; // Append key to feature string builder StringBuilder builder = new StringBuilder(128); builder.append(key).append('='); long levelSize = arity; long edgeInterval = (value / arity) * arity; edgeHandler.handleEdge(createEdgeFeatureHash(builder, edgeInterval), (int) Math.abs(value - edgeInterval)); for (int i = 0; i < maxLevels; ++i) { long start = (value / levelSize) * levelSize; if (Math.abs(start) + levelSize - 1 < 0) { // overflow break; } rangeHandler.handleRange(createRangeFeatureHash(builder, start, start + sign * (levelSize - 1))); levelSize *= arity; if (levelSize <= 0 && levelSize != Long.MIN_VALUE) { //overflow break; } } }
@Test void requireThatLowerBoundIsUsed() { PredicateRangeTermExpander expander = new PredicateRangeTermExpander(10, -9999, 99); Iterator<String> expectedLabels = List.of( "key=-49-40", "key=-99-0", "key=-999-0", "key=-9999-0").iterator(); expander.expand("key", -42, range -> assertEquals(PredicateHash.hash64(expectedLabels.next()), range), (edge, value) -> { assertEquals(PredicateHash.hash64("key=-40"), edge); assertEquals(2, value); }); assertFalse(expectedLabels.hasNext()); }
public static String buildDTServiceName(URI uri, int defPort) { String authority = uri.getAuthority(); if (authority == null) { return null; } InetSocketAddress addr = NetUtils.createSocketAddr(authority, defPort); return buildTokenService(addr).toString(); }
@Test public void testBuildDTServiceName() { Configuration conf = new Configuration(false); conf.setBoolean( CommonConfigurationKeys.HADOOP_SECURITY_TOKEN_SERVICE_USE_IP, true); SecurityUtil.setConfiguration(conf); assertEquals("127.0.0.1:123", SecurityUtil.buildDTServiceName(URI.create("test://LocalHost"), 123) ); assertEquals("127.0.0.1:123", SecurityUtil.buildDTServiceName(URI.create("test://LocalHost:123"), 456) ); assertEquals("127.0.0.1:123", SecurityUtil.buildDTServiceName(URI.create("test://127.0.0.1"), 123) ); assertEquals("127.0.0.1:123", SecurityUtil.buildDTServiceName(URI.create("test://127.0.0.1:123"), 456) ); }
@Override public Collection<TaskAwarePlugin> getTaskAwarePluginList() { return Collections.emptyList(); }
@Test public void testGetTaskAwarePluginList() { Assert.assertEquals(Collections.emptyList(), manager.getTaskAwarePluginList()); }
@Override public String getCatalogSeparator() { return null; }
@Test void assertGetCatalogSeparator() { assertNull(metaData.getCatalogSeparator()); }
public static String uncompress(byte[] compressedURL) { StringBuffer url = new StringBuffer(); switch (compressedURL[0] & 0x0f) { case EDDYSTONE_URL_PROTOCOL_HTTP_WWW: url.append(URL_PROTOCOL_HTTP_WWW_DOT); break; case EDDYSTONE_URL_PROTOCOL_HTTPS_WWW: url.append(URL_PROTOCOL_HTTPS_WWW_DOT); break; case EDDYSTONE_URL_PROTOCOL_HTTP: url.append(URL_PROTOCOL_HTTP_COLON_SLASH_SLASH); break; case EDDYSTONE_URL_PROTOCOL_HTTPS: url.append(URL_PROTOCOL_HTTPS_COLON_SLASH_SLASH); break; default: break; } byte lastByte = -1; for (int i = 1; i < compressedURL.length; i++) { byte b = compressedURL[i]; if (lastByte == 0 && b == 0 ) { break; } lastByte = b; String tld = topLevelDomainForByte(b); if (tld != null) { url.append(tld); } else { url.append((char) b); } } return url.toString(); }
@Test public void testUncompressWithSubdomainsAndSlashesInPath() throws MalformedURLException { String testURL = "http://www.forums.google.com/123/456"; byte[] testBytes = {0x00, 'f', 'o', 'r', 'u', 'm', 's', '.', 'g', 'o', 'o', 'g', 'l', 'e', 0x00, '1', '2', '3', '/', '4', '5', '6'}; assertEquals(testURL, UrlBeaconUrlCompressor.uncompress(testBytes)); }
public static DeviceId toDeviceId(ResourceId path) { checkArgument(isUnderDeviceRootNode(path), path); // FIXME if we decide to drop any of intermediate nodes // "/" - "devices" - "device" int rootIdx = ResourceIds.startsWithRootNode(path) ? 0 : -1; return toDeviceId(path.nodeKeys().get(rootIdx + DEVICE_INDEX)); }
@Test public void testToDeviceId() throws CloneNotSupportedException { ResourceId ridAchild = ridA.copyBuilder() .addBranchPointSchema("some", "ns") .addBranchPointSchema("random", "ns") .addBranchPointSchema("child", "ns") .build(); assertEquals(DID_A, DeviceResourceIds.toDeviceId(ridAchild)); NodeKey<?> nodeKey = ridA.nodeKeys().get(2); assertThat(nodeKey, is(instanceOf(ListKey.class))); assertThat(nodeKey.schemaId(), is(equalTo(new SchemaId(DEVICE_NAME, DCS_NAMESPACE)))); ListKey listKey = (ListKey) nodeKey; assertThat(listKey.keyLeafs(), is(contains(new KeyLeaf(DEVICE_ID_KL_NAME, DCS_NAMESPACE, DID_A.toString())))); }
public List<AppStoreEntry> search(String keyword) { List<AppStoreEntry> apps = new ArrayList<AppStoreEntry>(); SolrClient solr = getSolrClient(); SolrQuery query = new SolrQuery(); if (keyword.length()==0) { query.setQuery("*:*"); query.setFilterQueries("type_s:AppStoreEntry"); } else { query.setQuery(keyword); query.setFilterQueries("type_s:AppStoreEntry"); } query.setRows(40); QueryResponse response; try { response = solr.query(query); Iterator<SolrDocument> list = response.getResults().listIterator(); while (list.hasNext()) { SolrDocument d = list.next(); AppStoreEntry entry = new AppStoreEntry(); entry.setId(d.get("id").toString()); entry.setOrg(d.get("org_s").toString()); entry.setName(d.get("name_s").toString()); entry.setDesc(d.get("desc_s").toString()); entry.setLike(Integer.parseInt(d.get("like_i").toString())); entry.setDownload(Integer.parseInt(d.get("download_i").toString())); apps.add(entry); } } catch (SolrServerException | IOException e) { LOG.error("Error in searching for applications: ", e); } return apps; }
@Test void testSearch() throws Exception { Application example = new Application(); example.setOrganization("jenkins-ci.org"); example.setName("jenkins"); example.setDescription("World leading open source automation system."); example.setIcon("/css/img/feather.png"); spy.register(example); List<AppStoreEntry> results = spy.search("name_s:jenkins"); int expected = 1; int actual = results.size(); assertEquals(expected, actual); }
public static Config fromProperties(Properties properties) { return fromProperties("Properties@" + System.identityHashCode(properties), properties); }
@Test void testFromProperties() { var properties = new Properties(); properties.setProperty("object1.field1", "1"); properties.setProperty("object1.field2", "2"); properties.setProperty("object2.field1[0]", "3"); properties.setProperty("object2.field1[1]", "4"); properties.setProperty("object2.field1[3]", "5"); properties.setProperty("object2.field1[4].field1", "7"); properties.setProperty("object3", "6"); var config = MapConfigFactory.fromProperties(properties); assertThat(config.get("object1.field1")) .isInstanceOf(ConfigValue.StringValue.class) .hasFieldOrPropertyWithValue("value", "1"); assertThat(config.get("object1.field2")) .isInstanceOf(ConfigValue.StringValue.class) .hasFieldOrPropertyWithValue("value", "2"); assertThat(config.get("object2.field1")) .asInstanceOf(InstanceOfAssertFactories.iterable(ConfigValue.class)) .map(v -> v == null ? null : v.value()) .has(new Condition<>(v -> v.equals("3"), ""), Index.atIndex(0)) .has(new Condition<>(v -> v.equals("4"), ""), Index.atIndex(1)) .has(new Condition<>(Objects::isNull, ""), Index.atIndex(2)) .has(new Condition<>(v -> v.equals("5"), ""), Index.atIndex(3)) .has(new Condition<>(v -> v instanceof Map<?, ?> map && map.size() == 1 && map.get("field1") instanceof ConfigValue.StringValue str && str.value().equals("7"), ""), Index.atIndex(4)); assertThat(config.get("object3")) .isInstanceOf(ConfigValue.StringValue.class) .hasFieldOrPropertyWithValue("value", "6"); }
public void retrieveDocuments() throws DocumentRetrieverException { boolean first = true; String route = params.cluster.isEmpty() ? params.route : resolveClusterRoute(params.cluster); MessageBusParams messageBusParams = createMessageBusParams(params.configId, params.timeout, route); documentAccess = documentAccessFactory.createDocumentAccess(messageBusParams); session = documentAccess.createSyncSession(new SyncParameters.Builder().build()); int trace = params.traceLevel; if (trace > 0) { session.setTraceLevel(trace); } Iterator<String> iter = params.documentIds; if (params.jsonOutput && !params.printIdsOnly) { System.out.println('['); } while (iter.hasNext()) { if (params.jsonOutput && !params.printIdsOnly) { if (!first) { System.out.println(','); } else { first = false; } } String docid = iter.next(); Message msg = createDocumentRequest(docid); Reply reply = session.syncSend(msg); printReply(reply); } if (params.jsonOutput && !params.printIdsOnly) { System.out.println(']'); } }
@Test void testMultipleMessages() throws DocumentRetrieverException { ClientParameters params = createParameters() .setDocumentIds(asIterator(DOC_ID_1, DOC_ID_2, DOC_ID_3)) .build(); when(mockedSession.syncSend(any())).thenReturn( createDocumentReply(DOC_ID_1), createDocumentReply(DOC_ID_2), createDocumentReply(DOC_ID_3)); DocumentRetriever documentRetriever = createDocumentRetriever(params); documentRetriever.retrieveDocuments(); verify(mockedSession, times(3)).syncSend(any()); assertContainsDocument(DOC_ID_1); assertContainsDocument(DOC_ID_2); assertContainsDocument(DOC_ID_3); }
@Override public JavaKeyStore load(SecureConfig config) { if (!exists(config)) { throw new SecretStoreException.LoadException( String.format("Can not find Logstash keystore at %s. Please verify this file exists and is a valid Logstash keystore.", config.getPlainText("keystore.file") == null ? "<undefined>" : new String(config.getPlainText("keystore.file")))); } try { init(config); lock.lock(); try (final InputStream is = Files.newInputStream(keyStorePath)) { try { keyStore.load(is, this.keyStorePass); } catch (IOException ioe) { if (ioe.getCause() instanceof UnrecoverableKeyException) { throw new SecretStoreException.AccessException( String.format("Can not access Logstash keystore at %s. Please verify correct file permissions and keystore password.", keyStorePath.toAbsolutePath()), ioe); } else { throw new SecretStoreException.LoadException(String.format("Found a file at %s, but it is not a valid Logstash keystore.", keyStorePath.toAbsolutePath().toString()), ioe); } } byte[] marker = retrieveSecret(LOGSTASH_MARKER); if (marker == null) { throw new SecretStoreException.LoadException(String.format("Found a keystore at %s, but it is not a Logstash keystore.", keyStorePath.toAbsolutePath().toString())); } LOGGER.debug("Using existing keystore at {}", keyStorePath.toAbsolutePath()); return this; } } catch (SecretStoreException sse) { throw sse; } catch (Exception e) { //should never happen throw new SecretStoreException.UnknownException("Error while trying to load the Logstash keystore", e); } finally { releaseLock(lock); config.clearValues(); } }
@Ignore("This test timed out on Windows. Issue: https://github.com/elastic/logstash/issues/9916") @Test public void testWithRealSecondJvm() throws Exception { Path magicFile = folder.newFolder().toPath().resolve(EXTERNAL_TEST_FILE_LOCK); Path altPath = folder.newFolder().toPath().resolve("alt.logstash.keystore"); String java = System.getProperty("java.home") + File.separator + "bin" + File.separator + "java"; ProcessBuilder builder = new ProcessBuilder(java, "-cp", System.getProperty("java.class.path"), getClass().getCanonicalName(), EXTERNAL_TEST_WRITE, magicFile.toAbsolutePath().toString(), altPath.toAbsolutePath().toString()); Future<Integer> future = Executors.newScheduledThreadPool(1).submit(() -> builder.start().waitFor()); while (!future.isDone()) { try { Files.readAllBytes(magicFile); } catch (NoSuchFileException sfe) { Thread.sleep(100); continue; } } SecureConfig config = new SecureConfig(); config.add("keystore.file", altPath.toAbsolutePath().toString().toCharArray()); JavaKeyStore keyStore = new JavaKeyStore().load(config); validateAtoZ(keyStore); }
@Override public <T> Invoker<T> select(final List<Invoker<T>> invokers, final URL url, final Invocation invocation) throws RpcException { String shenyuSelectorId = invocation.getAttachment(Constants.DUBBO_SELECTOR_ID); String shenyuRuleId = invocation.getAttachment(Constants.DUBBO_RULE_ID); String remoteAddressIp = invocation.getAttachment(Constants.DUBBO_REMOTE_ADDRESS); List<DubboUpstream> dubboUpstreams = ApacheDubboPluginDataHandler.SELECTOR_CACHED_HANDLE.get().obtainHandle(shenyuSelectorId); DubboRuleHandle dubboRuleHandle = ApacheDubboPluginDataHandler.RULE_CACHED_HANDLE.get().obtainHandle(shenyuRuleId); // if gray list is not empty,just use load balance to choose one. if (CollectionUtils.isNotEmpty(dubboUpstreams)) { Upstream upstream = LoadBalancerFactory.selector(UpstreamCacheManager.getInstance().findUpstreamListBySelectorId(shenyuSelectorId), dubboRuleHandle.getLoadBalance(), remoteAddressIp); if (StringUtils.isBlank(upstream.getUrl()) && StringUtils.isBlank(upstream.getGroup()) && StringUtils.isBlank(upstream.getVersion())) { return select(invokers, url, invocation, dubboRuleHandle.getLoadBalance()); } // url is the first level, then is group, the version is the lowest. final List<Invoker<T>> invokerGrays = invokers.stream().filter(each -> { if (StringUtils.isNotBlank(upstream.getUrl())) { URL eachUrl = each.getUrl(); return eachUrl.getAddress().equals(upstream.getUrl()); } return true; }).filter(each -> { if (StringUtils.isNotBlank(upstream.getGroup())) { final URL eachUrl = each.getUrl(); return upstream.getGroup().equals(eachUrl.getParameter(Constants.GROUP)); } return true; }).filter(each -> { if (StringUtils.isNotBlank(upstream.getVersion())) { final URL eachUrl = each.getUrl(); return upstream.getVersion().equals(eachUrl.getParameter(Constants.VERSION)); } return true; }).collect(Collectors.toList()); if (CollectionUtils.isEmpty(invokerGrays)) { return select(invokers, url, invocation, dubboRuleHandle.getLoadBalance()); } return select(invokerGrays, url, invocation, dubboRuleHandle.getLoadBalance()); } return select(invokers, url, invocation, dubboRuleHandle.getLoadBalance()); }
@Test public void apacheDubboLoadBalanceTest() { assertEquals(apacheDubboGrayLoadBalance.select(invokers, url, invocation), mockInvoker); when(upstream.getUrl()).thenReturn("localhost"); assertEquals(apacheDubboGrayLoadBalance.select(invokers, url, invocation), mockInvoker); when(upstream.getUrl()).thenReturn(null); when(upstream.getGroup()).thenReturn("group"); assertEquals(apacheDubboGrayLoadBalance.select(invokers, url, invocation), mockInvoker); when(upstream.getGroup()).thenReturn(null); when(upstream.getVersion()).thenReturn("version"); assertEquals(apacheDubboGrayLoadBalance.select(invokers, url, invocation), mockInvoker); ApacheDubboPluginDataHandler.SELECTOR_CACHED_HANDLE.get() .removeHandle(Constants.DUBBO_SELECTOR_ID); assertEquals(apacheDubboGrayLoadBalance.select(invokers, url, invocation), mockInvoker); }
public static long calculateFileSizeInPath(File path) { long size = 0; try { if (!path.exists() || Files.isSymbolicLink(path.toPath())) { return 0; } if (path.isFile()) { return path.length(); } if (path.isDirectory()) { File[] files = path.listFiles(); if (files != null && files.length > 0) { for (File file : files) { long fileSize = calculateFileSizeInPath(file); if (fileSize == -1) return -1; size += fileSize; } } } } catch (Exception e) { log.error("calculate all file size in: {} error", path.getAbsolutePath(), e); return -1; } return size; }
@Test public void testCalculateFileSizeInPath() throws Exception { /** * testCalculateFileSizeInPath * - file_0 * - dir_1 * - file_1_0 * - file_1_1 * - dir_1_2 * - file_1_2_0 * - dir_2 */ File baseFile = tempDir.getRoot(); // test empty path assertEquals(0, UtilAll.calculateFileSizeInPath(baseFile)); File file0 = new File(baseFile, "file_0"); assertTrue(file0.createNewFile()); writeFixedBytesToFile(file0, 1313); assertEquals(1313, UtilAll.calculateFileSizeInPath(baseFile)); // build a file tree like above File dir1 = new File(baseFile, "dir_1"); dir1.mkdirs(); File file10 = new File(dir1, "file_1_0"); File file11 = new File(dir1, "file_1_1"); File dir12 = new File(dir1, "dir_1_2"); dir12.mkdirs(); File file120 = new File(dir12, "file_1_2_0"); File dir2 = new File(baseFile, "dir_2"); dir2.mkdirs(); // write all file with 1313 bytes data assertTrue(file10.createNewFile()); writeFixedBytesToFile(file10, 1313); assertTrue(file11.createNewFile()); writeFixedBytesToFile(file11, 1313); assertTrue(file120.createNewFile()); writeFixedBytesToFile(file120, 1313); assertEquals(1313 * 4, UtilAll.calculateFileSizeInPath(baseFile)); }
@Override public CompletableFuture<RemovedTaskResult> remove(final TaskId taskId) { final CompletableFuture<RemovedTaskResult> future = new CompletableFuture<>(); tasksAndActionsLock.lock(); try { tasksAndActions.add(TaskAndAction.createRemoveTask(taskId, future)); tasksAndActionsCondition.signalAll(); } finally { tasksAndActionsLock.unlock(); } return future; }
@Test public void shouldThrowIfRemovingUpdatingActiveTaskFailsWithRuntimeException() throws Exception { final StreamTask task = statefulTask(TASK_0_0, mkSet(TOPIC_PARTITION_A_0)).inState(State.RESTORING).build(); final RuntimeException runtimeException = new RuntimeException("Something happened"); setupShouldThrowIfRemovingUpdatingStatefulTaskFailsWithException(task, runtimeException); final CompletableFuture<StateUpdater.RemovedTaskResult> future = stateUpdater.remove(task.id()); verifyRemovingUpdatingStatefulTaskFails(future, task, runtimeException, false); }
@Override public String toString() { return getClass().getSimpleName() + "{rowCount=" + rowCount + '}'; }
@Test public void testToString() { HazelcastTableStatistic statistic = new HazelcastTableStatistic(100L); assertEquals("HazelcastTableStatistic{rowCount=100}", statistic.toString()); }
@Override public synchronized EurekaClientConfig get() { if (config == null) { config = (namespace == null) ? new DefaultEurekaClientConfig() : new DefaultEurekaClientConfig(namespace); // TODO: Remove this when DiscoveryManager is finally no longer used DiscoveryManager.getInstance().setEurekaClientConfig(config); } return config; }
@Test public void testNameSpaceInjection() throws Exception { ConfigurationManager.getConfigInstance().setProperty("testnamespace.serviceUrl.default", SERVICE_URI); Injector injector = LifecycleInjector.builder() .withBootstrapModule(new BootstrapModule() { @Override public void configure(BootstrapBinder binder) { binder.bind(String.class).annotatedWith(EurekaNamespace.class).toInstance("testnamespace."); } }) .build() .createInjector(); DefaultEurekaClientConfig clientConfig = injector.getInstance(DefaultEurekaClientConfig.class); List<String> serviceUrls = clientConfig.getEurekaServerServiceUrls("default"); assertThat(serviceUrls.get(0), is(equalTo(SERVICE_URI))); }
public static Collection<PValue> nonAdditionalInputs(AppliedPTransform<?, ?, ?> application) { ImmutableList.Builder<PValue> mainInputs = ImmutableList.builder(); PTransform<?, ?> transform = application.getTransform(); for (Map.Entry<TupleTag<?>, PCollection<?>> input : application.getInputs().entrySet()) { if (!transform.getAdditionalInputs().containsKey(input.getKey())) { mainInputs.add(input.getValue()); } } checkArgument( !mainInputs.build().isEmpty() || application.getInputs().isEmpty(), "Expected at least one main input if any inputs exist"); return mainInputs.build(); }
@Test public void nonAdditionalInputsWithOneMainInputSucceeds() { PCollection<Long> input = pipeline.apply(GenerateSequence.from(1L)); AppliedPTransform<PInput, POutput, TestTransform> transform = AppliedPTransform.of( "input-single", Collections.singletonMap(new TupleTag<Long>() {}, input), Collections.emptyMap(), new TestTransform(), ResourceHints.create(), pipeline); assertThat(TransformInputs.nonAdditionalInputs(transform), Matchers.containsInAnyOrder(input)); }
Plugin create(Options.Plugin plugin) { try { return instantiate(plugin.pluginString(), plugin.pluginClass(), plugin.argument()); } catch (IOException | URISyntaxException e) { throw new CucumberException(e); } }
@Test void instantiates_custom_file_plugin() { PluginOption option = parse(WantsFile.class.getName() + ":halp.txt"); WantsFile plugin = (WantsFile) fc.create(option); assertThat(plugin.out, is(equalTo(new File("halp.txt")))); }
@Override public void clear() { throw new UnsupportedOperationException("LazySet is not modifiable"); }
@Test(expected = UnsupportedOperationException.class) public void testClear_throwsException() { set.clear(); }
List<BatchInterface> makeRemoveBatches(Collection<String> filenames) throws IOException { List<BatchInterface> batches = new ArrayList<>(); for (List<String> filesToDelete : Lists.partition(Lists.newArrayList(filenames), MAX_REQUESTS_PER_BATCH)) { BatchInterface batch = batchRequestSupplier.get(); for (String file : filesToDelete) { enqueueDelete(GcsPath.fromUri(file), batch); } batches.add(batch); } return batches; }
@Test public void testMakeRemoveBatches() throws IOException { GcsUtil gcsUtil = gcsOptionsWithTestCredential().getGcsUtil(); // Small number of files fits in 1 batch List<BatchInterface> batches = gcsUtil.makeRemoveBatches(makeStrings("s", 3)); assertThat(batches.size(), equalTo(1)); assertThat(sumBatchSizes(batches), equalTo(3)); // 1 batch of files fits in 1 batch batches = gcsUtil.makeRemoveBatches(makeStrings("s", 100)); assertThat(batches.size(), equalTo(1)); assertThat(sumBatchSizes(batches), equalTo(100)); // A little more than 5 batches of files fits in 6 batches batches = gcsUtil.makeRemoveBatches(makeStrings("s", 501)); assertThat(batches.size(), equalTo(6)); assertThat(sumBatchSizes(batches), equalTo(501)); }
@Override public boolean accept(final Host bookmark) { final String[] elements = StringUtils.split(StringUtils.lowerCase(searchString, Locale.ROOT)); for(String search : elements) { if(StringUtils.lowerCase(BookmarkNameProvider.toString(bookmark), Locale.ROOT).contains(search)) { return true; } if(null != bookmark.getCredentials().getUsername()) { if(StringUtils.lowerCase(bookmark.getCredentials().getUsername(), Locale.ROOT).contains(search)) { return true; } } if(null != bookmark.getComment()) { if(StringUtils.lowerCase(bookmark.getComment(), Locale.ROOT).contains(search)) { return true; } } if(StringUtils.lowerCase(bookmark.getHostname(), Locale.ROOT).contains(search)) { return true; } for(String label : bookmark.getLabels()) { if(StringUtils.lowerCase(label, Locale.ROOT).contains(search)) { return true; } } } return false; }
@Test public void accept() { final Host bookmark = new Host(new TestProtocol(Scheme.http), "a"); assertTrue(new BookmarkSearchFilter("b a").accept(bookmark)); assertFalse(new BookmarkSearchFilter("b testa").accept(bookmark)); assertFalse(new BookmarkSearchFilter("b b").accept(bookmark)); assertTrue(new BookmarkSearchFilter("HTTP").accept(bookmark)); bookmark.setNickname("t"); assertTrue(new BookmarkSearchFilter("t").accept(bookmark)); assertFalse(new BookmarkSearchFilter("t2").accept(bookmark)); bookmark.setLabels(Collections.singleton("l")); assertTrue(new BookmarkSearchFilter("l").accept(bookmark)); assertFalse(new BookmarkSearchFilter("l2").accept(bookmark)); }
public static ColumnDataType convertToColumnDataType(RelDataType relDataType) { SqlTypeName sqlTypeName = relDataType.getSqlTypeName(); if (sqlTypeName == SqlTypeName.NULL) { return ColumnDataType.UNKNOWN; } boolean isArray = (sqlTypeName == SqlTypeName.ARRAY); if (isArray) { assert relDataType.getComponentType() != null; sqlTypeName = relDataType.getComponentType().getSqlTypeName(); } switch (sqlTypeName) { case BOOLEAN: return isArray ? ColumnDataType.BOOLEAN_ARRAY : ColumnDataType.BOOLEAN; case TINYINT: case SMALLINT: case INTEGER: return isArray ? ColumnDataType.INT_ARRAY : ColumnDataType.INT; case BIGINT: return isArray ? ColumnDataType.LONG_ARRAY : ColumnDataType.LONG; case DECIMAL: return resolveDecimal(relDataType, isArray); case FLOAT: case REAL: return isArray ? ColumnDataType.FLOAT_ARRAY : ColumnDataType.FLOAT; case DOUBLE: return isArray ? ColumnDataType.DOUBLE_ARRAY : ColumnDataType.DOUBLE; case DATE: case TIME: case TIMESTAMP: return isArray ? ColumnDataType.TIMESTAMP_ARRAY : ColumnDataType.TIMESTAMP; case CHAR: case VARCHAR: return isArray ? ColumnDataType.STRING_ARRAY : ColumnDataType.STRING; case BINARY: case VARBINARY: return isArray ? ColumnDataType.BYTES_ARRAY : ColumnDataType.BYTES; case OTHER: case ANY: return ColumnDataType.OBJECT; default: if (relDataType.getComponentType() != null) { throw new IllegalArgumentException("Unsupported collection type: " + relDataType); } LOGGER.warn("Unexpected SQL type: {}, use OBJECT instead", sqlTypeName); return ColumnDataType.OBJECT; } }
@Test public void testConvertToColumnDataTypeForObjectTypes() { Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType( new ObjectSqlType(SqlTypeName.BOOLEAN, SqlIdentifier.STAR, true, null, null)), DataSchema.ColumnDataType.BOOLEAN); Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType( new ObjectSqlType(SqlTypeName.TINYINT, SqlIdentifier.STAR, true, null, null)), DataSchema.ColumnDataType.INT); Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType( new ObjectSqlType(SqlTypeName.SMALLINT, SqlIdentifier.STAR, true, null, null)), DataSchema.ColumnDataType.INT); Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType( new ObjectSqlType(SqlTypeName.INTEGER, SqlIdentifier.STAR, true, null, null)), DataSchema.ColumnDataType.INT); Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType( new ObjectSqlType(SqlTypeName.BIGINT, SqlIdentifier.STAR, true, null, null)), DataSchema.ColumnDataType.LONG); Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType( new ObjectSqlType(SqlTypeName.FLOAT, SqlIdentifier.STAR, true, null, null)), DataSchema.ColumnDataType.FLOAT); Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType( new ObjectSqlType(SqlTypeName.DOUBLE, SqlIdentifier.STAR, true, null, null)), DataSchema.ColumnDataType.DOUBLE); Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType( new ObjectSqlType(SqlTypeName.TIMESTAMP, SqlIdentifier.STAR, true, null, null)), DataSchema.ColumnDataType.TIMESTAMP); Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType( new ObjectSqlType(SqlTypeName.CHAR, SqlIdentifier.STAR, true, null, null)), DataSchema.ColumnDataType.STRING); Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType( new ObjectSqlType(SqlTypeName.VARCHAR, SqlIdentifier.STAR, true, null, null)), DataSchema.ColumnDataType.STRING); Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType( new ObjectSqlType(SqlTypeName.VARBINARY, SqlIdentifier.STAR, true, null, null)), DataSchema.ColumnDataType.BYTES); Assert.assertEquals(RelToPlanNodeConverter.convertToColumnDataType( new ObjectSqlType(SqlTypeName.OTHER, SqlIdentifier.STAR, true, null, null)), DataSchema.ColumnDataType.OBJECT); }
@Override public Stream<MappingField> resolveAndValidateFields( boolean isKey, List<MappingField> userFields, Map<String, String> options, InternalSerializationService serializationService ) { Map<QueryPath, MappingField> fieldsByPath = extractFields(userFields, isKey); for (QueryPath path : fieldsByPath.keySet()) { if (path.isTopLevel()) { throw QueryException.error("Cannot use the '" + path + "' field with Avro serialization"); } } Schema schema = getSchema(fieldsByPath, options, isKey); if (schema != null && options.containsKey("schema.registry.url")) { throw new IllegalArgumentException("Inline schema cannot be used with schema registry"); } if (userFields.isEmpty()) { if (schema == null) { throw QueryException.error( "Either a column list or an inline schema is required to create Avro-based mapping"); } return resolveFields(schema, (name, type) -> new MappingField(name, type, new QueryPath(name, isKey).toString())); } else { if (schema != null) { validate(schema, getFields(fieldsByPath).collect(toList())); } return fieldsByPath.values().stream(); } }
@Test public void when_schemaIsNotRecord_then_throws() { assertThatThrownBy(() -> INSTANCE.resolveAndValidateFields( isKey, List.of(field("field", QueryDataType.INT)), Map.of(isKey ? OPTION_KEY_AVRO_SCHEMA : OPTION_VALUE_AVRO_SCHEMA, Schema.create(Schema.Type.INT).toString()), null )).hasMessage("Schema must be an Avro record"); }
public ValidationResult validateMessagesAndAssignOffsets(PrimitiveRef.LongRef offsetCounter, MetricsRecorder metricsRecorder, BufferSupplier bufferSupplier) { if (sourceCompressionType == CompressionType.NONE && targetCompression.type() == CompressionType.NONE) { // check the magic value if (!records.hasMatchingMagic(toMagic)) return convertAndAssignOffsetsNonCompressed(offsetCounter, metricsRecorder); else // Do in-place validation, offset assignment and maybe set timestamp return assignOffsetsNonCompressed(offsetCounter, metricsRecorder); } else return validateMessagesAndAssignOffsetsCompressed(offsetCounter, metricsRecorder, bufferSupplier); }
@Test public void testOffsetAssignmentAfterDownConversionV1ToV0Compressed() { long offset = 1234567; long now = System.currentTimeMillis(); Compression compression = Compression.gzip().build(); MemoryRecords records = createRecords(RecordBatch.MAGIC_VALUE_V1, now, compression); checkOffsets(records, 0); checkOffsets(new LogValidator( records, new TopicPartition("topic", 0), time, CompressionType.GZIP, compression, false, RecordBatch.MAGIC_VALUE_V0, TimestampType.CREATE_TIME, 5000L, 5000L, RecordBatch.NO_PARTITION_LEADER_EPOCH, AppendOrigin.CLIENT, MetadataVersion.latestTesting() ).validateMessagesAndAssignOffsets( PrimitiveRef.ofLong(offset), metricsRecorder, RequestLocal.withThreadConfinedCaching().bufferSupplier() ).validatedRecords, offset); }
static Map<String, String> toMap(List<Settings.Setting> settingsList) { Map<String, String> result = new LinkedHashMap<>(); for (Settings.Setting s : settingsList) { // we need the "*.file.suffixes" and "*.file.patterns" properties for language detection // see DefaultLanguagesRepository.populateFileSuffixesAndPatterns() if (!s.getInherited() || s.getKey().endsWith(".file.suffixes") || s.getKey().endsWith(".file.patterns")) { switch (s.getValueOneOfCase()) { case VALUE: result.put(s.getKey(), s.getValue()); break; case VALUES: result.put(s.getKey(), s.getValues().getValuesList().stream().map(StringEscapeUtils::escapeCsv).collect(Collectors.joining(","))); break; case FIELDVALUES: convertPropertySetToProps(result, s); break; default: if (!s.getKey().endsWith(".secured")) { throw new IllegalStateException("Unknown property value for " + s.getKey()); } } } } return result; }
@Test public void should_not_load_inherited_properties() { assertThat(AbstractSettingsLoader.toMap(List.of( Setting.newBuilder() .setInherited(true) .setKey("sonar.inherited.property") .setValues(Values.newBuilder().addValues("foo")).build() ))).isEmpty(); }
public <T extends MongoEntity> MongoCollection<T> collection(String collectionName, Class<T> valueType) { return getCollection(collectionName, valueType); }
@Test void testIdGeneration() { final MongoCollection<IdGenerationTest> collection = collections.collection("id-generation-test", IdGenerationTest.class); final var testObject = new IdGenerationTest(null); final InsertOneResult result = collection.insertOne(testObject); final BsonValue insertedId = result.getInsertedId(); assertThat(insertedId).isNotNull().satisfies(id -> assertThat(id.isObjectId()).isTrue()); assertThat(collection.find(Filters.eq("_id", insertedId)).first()) .isEqualTo(new IdGenerationTest(insertedId.asObjectId().getValue().toHexString())); }
@Override public Flux<RawMetric> retrieve(KafkaCluster c, Node node) { log.debug("Retrieving metrics from prometheus exporter: {}:{}", node.host(), c.getMetricsConfig().getPort()); MetricsConfig metricsConfig = c.getMetricsConfig(); var webClient = new WebClientConfigurator() .configureBufferSize(DataSize.ofMegabytes(20)) .configureBasicAuth(metricsConfig.getUsername(), metricsConfig.getPassword()) .configureSsl( c.getOriginalProperties().getSsl(), new ClustersProperties.KeystoreConfig( metricsConfig.getKeystoreLocation(), metricsConfig.getKeystorePassword())) .build(); return retrieve(webClient, node.host(), c.getMetricsConfig()); }
@Test void callsMetricsEndpointAndConvertsResponceToRawMetric() { var url = mockWebServer.url("/metrics"); mockWebServer.enqueue(prepareResponse()); MetricsConfig metricsConfig = prepareMetricsConfig(url.port(), null, null); StepVerifier.create(retriever.retrieve(WebClient.create(), url.host(), metricsConfig)) .expectNextSequence(expectedRawMetrics()) // third metric should not be present, since it has "NaN" value .verifyComplete(); }
public void process() throws Exception { if (_segmentMetadata.getTotalDocs() == 0) { LOGGER.info("Skip preprocessing empty segment: {}", _segmentMetadata.getName()); return; } // Segment processing has to be done with a local directory. File indexDir = new File(_indexDirURI); // This fixes the issue of temporary files not getting deleted after creating new inverted indexes. removeInvertedIndexTempFiles(indexDir); try (SegmentDirectory.Writer segmentWriter = _segmentDirectory.createWriter()) { // Update default columns according to the schema. if (_schema != null) { DefaultColumnHandler defaultColumnHandler = DefaultColumnHandlerFactory .getDefaultColumnHandler(indexDir, _segmentMetadata, _indexLoadingConfig, _schema, segmentWriter); defaultColumnHandler.updateDefaultColumns(); _segmentMetadata = new SegmentMetadataImpl(indexDir); _segmentDirectory.reloadMetadata(); } else { LOGGER.warn("Skip creating default columns for segment: {} without schema", _segmentMetadata.getName()); } // Update single-column indices, like inverted index, json index etc. List<IndexHandler> indexHandlers = new ArrayList<>(); // We cannot just create all the index handlers in a random order. // Specifically, ForwardIndexHandler needs to be executed first. This is because it modifies the segment metadata // while rewriting forward index to create a dictionary. Some other handlers (like the range one) assume that // metadata was already been modified by ForwardIndexHandler. IndexHandler forwardHandler = createHandler(StandardIndexes.forward()); indexHandlers.add(forwardHandler); forwardHandler.updateIndices(segmentWriter); // Now that ForwardIndexHandler.updateIndices has been updated, we can run all other indexes in any order _segmentMetadata = new SegmentMetadataImpl(indexDir); _segmentDirectory.reloadMetadata(); for (IndexType<?, ?, ?> type : IndexService.getInstance().getAllIndexes()) { if (type != StandardIndexes.forward()) { IndexHandler handler = createHandler(type); indexHandlers.add(handler); handler.updateIndices(segmentWriter); // Other IndexHandler classes may modify the segment metadata while creating a temporary forward // index to generate their respective indexes from if the forward index was disabled. This new metadata is // needed to construct other indexes like RangeIndex. _segmentMetadata = _segmentDirectory.getSegmentMetadata(); } } // Perform post-cleanup operations on the index handlers. for (IndexHandler handler : indexHandlers) { handler.postUpdateIndicesCleanup(segmentWriter); } // Add min/max value to column metadata according to the prune mode. ColumnMinMaxValueGeneratorMode columnMinMaxValueGeneratorMode = _indexLoadingConfig.getColumnMinMaxValueGeneratorMode(); if (columnMinMaxValueGeneratorMode != ColumnMinMaxValueGeneratorMode.NONE) { ColumnMinMaxValueGenerator columnMinMaxValueGenerator = new ColumnMinMaxValueGenerator(_segmentMetadata, segmentWriter, columnMinMaxValueGeneratorMode); columnMinMaxValueGenerator.addColumnMinMaxValue(); // NOTE: This step may modify the segment metadata. When adding new steps after this, un-comment the next line. // _segmentMetadata = new SegmentMetadataImpl(indexDir); } segmentWriter.save(); } // Startree creation will load the segment again, so we need to close and re-open the segment writer to make sure // that the other required indices (e.g. forward index) are up-to-date. try (SegmentDirectory.Writer segmentWriter = _segmentDirectory.createWriter()) { // Create/modify/remove star-trees if required. processStarTrees(indexDir); _segmentDirectory.reloadMetadata(); segmentWriter.save(); } }
@Test public void testV1CleanupH3AndTextIndices() throws Exception { constructV1Segment(Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); // Remove all indices and add the two derived columns for H3 and Json index. try (SegmentDirectory segmentDirectory = SegmentDirectoryLoaderRegistry.getDefaultSegmentDirectoryLoader() .load(_indexDir.toURI(), new SegmentDirectoryLoaderContext.Builder().setSegmentDirectoryConfigs(_configuration).build()); SegmentPreProcessor processor = new SegmentPreProcessor(segmentDirectory, getDefaultIndexLoadingConfig(), _newColumnsSchemaWithH3Json)) { processor.process(); } SegmentMetadataImpl segmentMetadata = new SegmentMetadataImpl(_indexDir); assertNotNull(segmentMetadata.getColumnMetadataFor("newH3Col")); assertNotNull(segmentMetadata.getColumnMetadataFor("newJsonCol")); _indexLoadingConfig = getDefaultIndexLoadingConfig(); _indexLoadingConfig.setH3IndexConfigs( ImmutableMap.of("newH3Col", new H3IndexConfig(ImmutableMap.of("resolutions", "5")))); _indexLoadingConfig.setJsonIndexColumns(new HashSet<>(Collections.singletonList("newJsonCol"))); // V1 use separate file for each column index. File h3File = new File(_indexDir, "newH3Col" + V1Constants.Indexes.H3_INDEX_FILE_EXTENSION); File jsFile = new File(_indexDir, "newJsonCol" + V1Constants.Indexes.JSON_INDEX_FILE_EXTENSION); assertFalse(h3File.exists()); assertFalse(jsFile.exists()); // Create H3 and Json indices. try (SegmentDirectory segmentDirectory = SegmentDirectoryLoaderRegistry.getDefaultSegmentDirectoryLoader() .load(_indexDir.toURI(), new SegmentDirectoryLoaderContext.Builder().setSegmentDirectoryConfigs(_configuration).build()); SegmentPreProcessor processor = new SegmentPreProcessor(segmentDirectory, _indexLoadingConfig, null)) { processor.process(); } assertTrue(h3File.exists()); assertTrue(jsFile.exists()); // Remove H3 and Json indices. try (SegmentDirectory segmentDirectory = SegmentDirectoryLoaderRegistry.getDefaultSegmentDirectoryLoader() .load(_indexDir.toURI(), new SegmentDirectoryLoaderContext.Builder().setSegmentDirectoryConfigs(_configuration).build()); SegmentPreProcessor processor = new SegmentPreProcessor(segmentDirectory, getDefaultIndexLoadingConfig(), null)) { processor.process(); } assertFalse(h3File.exists()); assertFalse(jsFile.exists()); }
public static void onSuccess(final ServerMemberManager manager, final Member member) { final NodeState old = member.getState(); manager.getMemberAddressInfos().add(member.getAddress()); member.setState(NodeState.UP); member.setFailAccessCnt(0); if (!Objects.equals(old, member.getState())) { manager.notifyMemberChange(member); } }
@Test void testMemberOnSuccessFromDown() { final Member remote = buildMember(); remote.setState(NodeState.DOWN); remote.setFailAccessCnt(4); MemberUtil.onSuccess(memberManager, remote); assertEquals(NodeState.UP, remote.getState()); assertEquals(0, remote.getFailAccessCnt()); verify(memberManager).notifyMemberChange(remote); }
List<Token> tokenize() throws ScanException { List<Token> tokenList = new ArrayList<Token>(); StringBuilder buf = new StringBuilder(); while (pointer < patternLength) { char c = pattern.charAt(pointer); pointer++; switch (state) { case LITERAL_STATE: handleLiteralState(c, tokenList, buf); break; case START_STATE: handleStartState(c, tokenList, buf); break; case DEFAULT_VAL_STATE: handleDefaultValueState(c, tokenList, buf); default: } } // EOS switch (state) { case LITERAL_STATE: addLiteralToken(tokenList, buf); break; case DEFAULT_VAL_STATE: // trailing colon. see also LOGBACK-1140 buf.append(CoreConstants.COLON_CHAR); addLiteralToken(tokenList, buf); break; case START_STATE: // trailing $. see also LOGBACK-1149 buf.append(CoreConstants.DOLLAR); addLiteralToken(tokenList, buf); break; } return tokenList; }
@Test public void basicDefaultSeparator() throws ScanException { String input = "${a:-b}"; Tokenizer tokenizer = new Tokenizer(input); List<Token> tokenList = tokenizer.tokenize(); witnessList.add(Token.START_TOKEN); witnessList.add(new Token(Token.Type.LITERAL, "a")); witnessList.add(Token.DEFAULT_SEP_TOKEN); witnessList.add(new Token(Token.Type.LITERAL, "b")); witnessList.add(Token.CURLY_RIGHT_TOKEN); assertEquals(witnessList, tokenList); }
public boolean isTimeoutException() { return internalException instanceof TimeoutException; }
@Test public void testIsTimeoutException() { EventHandlerExceptionInfo timeoutExceptionInfo = EventHandlerExceptionInfo.fromInternal( new TimeoutException(), () -> OptionalInt.of(1)); assertTrue(timeoutExceptionInfo.isTimeoutException()); }
public static void executeWithRetries( final Function function, final RetryBehaviour retryBehaviour ) throws Exception { executeWithRetries(() -> { function.call(); return null; }, retryBehaviour); }
@Test public void shouldNotRetryOnNonRetriableException() throws Exception { // Given: final AtomicBoolean firstCall = new AtomicBoolean(true); final Callable<Object> throwsException = () -> { if (firstCall.get()) { firstCall.set(false); throw new RuntimeException("First non-retry exception"); } else { throw new RuntimeException("Test should not retry"); } }; // When: final RuntimeException e = assertThrows( RuntimeException.class, () -> ExecutorUtil.executeWithRetries(throwsException, ON_RETRYABLE) ); // Then: assertThat(e.getMessage(), containsString("First non-retry exception")); }
public void storeData(Data data) { dataStore.put(data.getKey(), data); }
@Test void testStoreData() { try { shard.storeData(data); var field = Shard.class.getDeclaredField("dataStore"); field.setAccessible(true); var dataMap = (Map<Integer, Data>) field.get(shard); assertEquals(1, dataMap.size()); assertEquals(data, dataMap.get(1)); } catch (NoSuchFieldException | IllegalAccessException e) { fail("Fail to modify field access."); } }
@Override public Predicate negate() { return new GreaterLessPredicate(attributeName, value, !equal, !less); }
@Test public void negate_whenEqualsTrueAndLessFalse_thenReturnNewInstanceWithEqualsFalseAndLessTrue() { String attribute = "attribute"; Comparable value = 1; GreaterLessPredicate original = new GreaterLessPredicate(attribute, value, true, false); GreaterLessPredicate negate = (GreaterLessPredicate) original.negate(); assertThat(negate).isNotSameAs(original); assertThat(negate.attributeName).isEqualTo(attribute); assertThat(negate.equal).isFalse(); assertThat(negate.less).isTrue(); }
@UdafFactory(description = "return the earliest value of a column") public static <T> Udaf<T, Struct, T> earliest() { return earliest(true); }
@Test public void shouldCaptureValuesUpToN() { // Given: final Udaf<Integer, List<Struct>, List<Integer>> udaf = EarliestByOffset.earliest(2); udaf.initializeTypeArguments(Collections.singletonList(SqlArgument.of(SqlTypes.INTEGER))); // When: final List<Struct> res0 = udaf.aggregate(321, new ArrayList<>()); final List<Struct> res1 = udaf.aggregate(123, res0); // Then: assertThat(res1, hasSize(2)); assertThat(res1.get(0).get(VAL_FIELD), is(321)); assertThat(res1.get(1).get(VAL_FIELD), is(123)); }
@Override public String getDescription() { return "Scrape the content at a given url in markdown format."; }
@Test void testGetDescription() { assertEquals("Scrape the content at a given url in markdown format.", markdownBrowserAction.getDescription()); }
public static BadRequestException create(String... errorMessages) { return create(asList(errorMessages)); }
@Test public void getMessage_return_first_error() { BadRequestException underTest = BadRequestException.create(asList("error1", "error2")); assertThat(underTest.getMessage()).isEqualTo("error1"); }
abstract List<String> parseJobID() throws IOException;
@Test public void testParsePig() throws IOException { String errFileName = "src/test/data/status/pig"; PigJobIDParser pigJobIDParser = new PigJobIDParser(errFileName, new Configuration()); List<String> jobs = pigJobIDParser.parseJobID(); Assert.assertEquals(jobs.size(), 1); }
public static ConfigurableResource parseResourceConfigValue(String value) throws AllocationConfigurationException { return parseResourceConfigValue(value, Long.MAX_VALUE); }
@Test public void testMemoryPercentageNegativeValue() throws Exception { expectNegativePercentageOldStyle(); parseResourceConfigValue("-10% memory, 50% cpu"); }
public <U> ChainableFunction<F, U> then(Function<? super T, ? extends U> function) { return new ChainableFunction<F, U>() { @Override public U apply(F input) { return function.apply(ChainableFunction.this.apply(input)); } }; }
@Test public void then() { Integer result = parseInteger().then(plus(7)).apply("11"); assertThat(result).as("parseInt('11') then adding 7").isEqualTo(18); }
@Override public void updateService(String serviceName, String groupName, float protectThreshold) throws NacosException { Service service = new Service(); service.setName(serviceName); service.setGroupName(groupName); service.setProtectThreshold(protectThreshold); updateService(service, new NoneSelector()); }
@Test void testUpdateService1() throws NacosException { //given String serviceName = "service1"; String groupName = "groupName"; float protectThreshold = 0.1f; //when nacosNamingMaintainService.updateService(serviceName, groupName, protectThreshold); //then verify(serverProxy, times(1)).updateService(argThat(new ArgumentMatcher<Service>() { @Override public boolean matches(Service service) { return service.getName().equals(serviceName) && service.getGroupName().equals(groupName) && Math.abs(service.getProtectThreshold() - protectThreshold) < 0.1f; } }), argThat(o -> o instanceof NoneSelector)); }
public int capacity() { return capacity; }
@Test void shouldCalculateCapacityForBuffer() { assertThat(ringBuffer.capacity(), is(CAPACITY)); }
public static ParsedCommand parse( // CHECKSTYLE_RULES.ON: CyclomaticComplexity final String sql, final Map<String, String> variables) { validateSupportedStatementType(sql); final String substituted; try { substituted = VariableSubstitutor.substitute(KSQL_PARSER.parse(sql).get(0), variables); } catch (ParseFailedException e) { throw new MigrationException(String.format( "Failed to parse the statement. Statement: %s. Reason: %s", sql, e.getMessage())); } final SqlBaseParser.SingleStatementContext statementContext = KSQL_PARSER.parse(substituted) .get(0).getStatement(); final boolean isStatement = StatementType.get(statementContext.statement().getClass()) == StatementType.STATEMENT; return new ParsedCommand(substituted, isStatement ? Optional.empty() : Optional.of(new AstBuilder(TypeRegistry.EMPTY) .buildStatement(statementContext))); }
@Test public void shouldDefineStatementWithVariable() { // Given: final String defineVar = "DEFiNe word = 'walk${suffix}';"; // When: List<CommandParser.ParsedCommand> commands = parse(defineVar, ImmutableMap.of("suffix", "ing")); // Then: assertThat(commands.size(), is(1)); assertThat(commands.get(0).getStatement().isPresent(), is (true)); assertThat(commands.get(0).getStatement().get(), instanceOf(DefineVariable.class)); assertThat(((DefineVariable) commands.get(0).getStatement().get()).getVariableName(), is("word")); assertThat(((DefineVariable) commands.get(0).getStatement().get()).getVariableValue(), is("walking")); }
public T get(int key) { if (_points.isEmpty()) { debug(_log, "get called on a hash ring with nothing in it"); return null; } int index = getIndex(key); return _points.get(index).getT(); }
@Test(groups = { "small", "back-end" }) public void testZeroItems() { Map<String, Integer> zero = new HashMap<>(); ConsistentHashRing<String> test = new ConsistentHashRing<>(zero); assertNull(test.get(0)); zero.put("test", 0); test = new ConsistentHashRing<>(zero); assertNull(test.get(100)); }
@Override public Mono<ProductReview> createProductReview(int productId, int rating, String review, String userId) { return this.productReviewRepository.save( new ProductReview(UUID.randomUUID(), productId, rating, review, userId)); }
@Test void createProductReview_ReturnsCreatedProductReview() { // given doAnswer(invocation -> Mono.justOrEmpty(invocation.getArguments()[0])).when(this.productReviewRepository) .save(any()); // when StepVerifier.create(this.service.createProductReview(1, 3, "Ну, на троечку", "5f1d5cf8-cbd6-11ee-9579-cf24d050b47c")) // then .expectNextMatches(productReview -> productReview.getProductId() == 1 && productReview.getRating() == 3 && productReview.getUserId().equals("5f1d5cf8-cbd6-11ee-9579-cf24d050b47c") && productReview.getReview().equals("Ну, на троечку") && productReview.getId() != null) .verifyComplete(); verify(this.productReviewRepository) .save(argThat(productReview -> productReview.getProductId() == 1 && productReview.getRating() == 3 && productReview.getUserId().equals("5f1d5cf8-cbd6-11ee-9579-cf24d050b47c") && productReview.getReview().equals("Ну, на троечку") && productReview.getId() != null)); }
public boolean canProcessTask(final Task task, final long now) { final String topologyName = task.id().topologyName(); if (!hasNamedTopologies) { // TODO implement error handling/backoff for non-named topologies (needs KIP) return !pausedTopologies.contains(UNNAMED_TOPOLOGY); } else { if (pausedTopologies.contains(topologyName)) { return false; } else { final NamedTopologyMetadata metadata = topologyNameToErrorMetadata.get(topologyName); return metadata == null || (metadata.canProcess() && metadata.canProcessTask(task, now)); } } }
@Test public void testNamedTopologiesCanBeStartedPaused() { final Set<String> pausedTopologies = new HashSet<>(); pausedTopologies.add(TOPOLOGY1); final TaskExecutionMetadata metadata = new TaskExecutionMetadata(NAMED_TOPOLOGIES, pausedTopologies, ProcessingMode.AT_LEAST_ONCE); final Task mockTask1 = createMockTask(TOPOLOGY1); final Task mockTask2 = createMockTask(TOPOLOGY2); assertFalse(metadata.canProcessTask(mockTask1, TIME_ZERO)); assertTrue(metadata.canProcessTask(mockTask2, TIME_ZERO)); pausedTopologies.remove(TOPOLOGY1); assertTrue(metadata.canProcessTask(mockTask1, TIME_ZERO)); assertTrue(metadata.canProcessTask(mockTask2, TIME_ZERO)); }
@Override public String lock(final Path file) throws BackgroundException { try { return new LocksApi(new BrickApiClient(session)) .postLocksPath(StringUtils.removeStart(file.getAbsolute(), String.valueOf(Path.DELIMITER)), new LocksPathBody().exclusive(true).allowAccessByAnyUser(true)).getToken(); } catch(ApiException e) { throw new BrickExceptionMappingService().map("Failure to write attributes of {0}", e, file); } }
@Test public void testLock() throws Exception { final TransferStatus status = new TransferStatus(); final Local local = new Local(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString()); final byte[] content = RandomUtils.nextBytes(128); final OutputStream out = local.getOutputStream(false); IOUtils.write(content, out); out.close(); status.setLength(content.length); final Path test = new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final BrickUploadFeature upload = new BrickUploadFeature(session, new BrickWriteFeature(session)); upload.upload(test, local, new BandwidthThrottle(BandwidthThrottle.UNLIMITED), new DisabledStreamListener(), status, new DisabledConnectionCallback()); assertTrue(new BrickFindFeature(session).find(test)); final BrickLockFeature feature = new BrickLockFeature(session); final String lockid = feature.lock(test); assertNotNull(lockid); try { feature.lock(test); fail(); } catch(LockedException e) { // Expected } feature.unlock(test, lockid); new BrickDeleteFeature(session).delete(Collections.singletonMap(test, new TransferStatus().withLockId(lockid)), new DisabledPasswordCallback(), new Delete.DisabledCallback()); }
@Override public <KR, VR> KStream<KR, VR> map(final KeyValueMapper<? super K, ? super V, ? extends KeyValue<? extends KR, ? extends VR>> mapper) { return map(mapper, NamedInternal.empty()); }
@Test public void shouldNotAllowNullMapperOnMapWithNamed() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.map(null, Named.as("map"))); assertThat(exception.getMessage(), equalTo("mapper can't be null")); }
public ClusterStateBundle.FeedBlock inferContentClusterFeedBlockOrNull(ContentCluster cluster) { if (!feedBlockEnabled) { return null; } var nodeInfos = cluster.getNodeInfos(); var exhaustions = enumerateNodeResourceExhaustionsAcrossAllNodes(nodeInfos); if (exhaustions.isEmpty()) { return null; } int maxDescriptions = 3; String description = exhaustions.stream() .limit(maxDescriptions) .map(NodeResourceExhaustion::toExhaustionAddedDescription) .collect(Collectors.joining(", ")); if (exhaustions.size() > maxDescriptions) { description += String.format(" (... and %d more)", exhaustions.size() - maxDescriptions); } description = decoratedMessage(cluster, description); // FIXME we currently will trigger a cluster state recomputation even if the number of // exhaustions is greater than what is returned as part of the description. Though at // that point, cluster state recomputations will be the least of your worries...! return ClusterStateBundle.FeedBlock.blockedWith(description, exhaustions); }
@Test void feed_block_returned_when_single_resource_beyond_limit() { var calc = new ResourceExhaustionCalculator(true, mapOf(usage("disk", 0.5), usage("memory", 0.8))); var cf = createFixtureWithReportedUsages(forNode(1, usage("disk", 0.51), usage("memory", 0.79)), forNode(2, usage("disk", 0.4), usage("memory", 0.6))); var feedBlock = calc.inferContentClusterFeedBlockOrNull(cf.cluster()); assertNotNull(feedBlock); assertTrue(feedBlock.blockFeedInCluster()); // Manually verify message decoration in this test assertEquals("in content cluster 'foo': disk on node 1 [storage.1.local] is 51.0% full " + "(the configured limit is 50.0%). See https://docs.vespa.ai/en/operations/feed-block.html", feedBlock.getDescription()); }
public static <K, V> KV<K, V> of(K key, V value) { return new KV<>(key, value); }
@Test public void testOrderByKey() { Comparator<KV<Integer, Integer>> orderByKey = new KV.OrderByKey<>(); for (Integer key1 : TEST_VALUES) { for (Integer val1 : TEST_VALUES) { for (Integer key2 : TEST_VALUES) { for (Integer val2 : TEST_VALUES) { assertEquals( compareInt(key1, key2), orderByKey.compare(KV.of(key1, val1), KV.of(key2, val2))); } } } } }
public InterProcessMutex lock(long seckillId) { try { String rootLockPath = "/goodskill"; InterProcessMutex interProcessMutex = new InterProcessMutex(client, rootLockPath + "/" + seckillId); boolean acquire = interProcessMutex.acquire(1000L, TimeUnit.MILLISECONDS); if (acquire) { log.info("成功获取到zk锁,秒杀id{}", seckillId); } else { log.info("未获取到zk锁,秒杀id{}", seckillId); } return interProcessMutex; } catch (Exception e) { log.warn("获取zk锁异常:{}", e.getMessage()); return null; } }
@Test void lock() throws InterruptedException { ExecutorService executorService = Executors.newFixedThreadPool(10); int count = 200; CountDownLatch countDownLatch = new CountDownLatch(count); for (int i = 0; i < count/2; i++) { executorService.execute(() -> { InterProcessMutex lock = zookeeperLockUtil.lock(10000L); if(lock != null) { try { Thread.sleep(10); } catch (InterruptedException e) { e.printStackTrace(); } try { lock.release(); } catch (Exception e) { throw new RuntimeException(e); } log.debug("计数器当前值{}", countDownLatch.getCount()); countDownLatch.countDown(); } }); executorService.execute(() -> { InterProcessMutex lock = zookeeperLockUtil.lock(10001L); if(lock != null) { try { Thread.sleep(10); } catch (InterruptedException e) { e.printStackTrace(); } try { lock.release(); } catch (Exception e) { throw new RuntimeException(e); } log.debug("计数器当前值{}", countDownLatch.getCount()); countDownLatch.countDown(); } }); } countDownLatch.await(); executorService.shutdown(); assertTrue(countDownLatch.getCount() == 0); }
@Override public String decrypt(String encryptedText) { try { javax.crypto.Cipher cipher = javax.crypto.Cipher.getInstance(CRYPTO_ALGO); cipher.init(javax.crypto.Cipher.DECRYPT_MODE, loadSecretFile()); byte[] cipherData = cipher.doFinal(Base64.decodeBase64(StringUtils.trim(encryptedText))); return new String(cipherData, StandardCharsets.UTF_8); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException(e); } }
@Test public void decrypt_other_key() throws Exception { URL resource = getClass().getResource("/org/sonar/api/config/internal/AesCipherTest/other_secret_key.txt"); AesECBCipher cipher = new AesECBCipher(new File(resource.toURI()).getCanonicalPath()); assertThatThrownBy(() -> cipher.decrypt("9mx5Zq4JVyjeChTcVjEide4kWCwusFl7P2dSVXtg9IY=")) .isInstanceOf(RuntimeException.class) .hasCauseInstanceOf(BadPaddingException.class); }
public static void runOnStartup() { for (JvmInitializer initializer : ReflectHelpers.loadServicesOrdered(JvmInitializer.class)) { // We write to standard out since logging has yet to be initialized. System.out.format("Running JvmInitializer#onStartup for %s%n", initializer); initializer.onStartup(); System.out.format("Completed JvmInitializer#onStartup for %s%n", initializer); } }
@Test public void runOnStartup_runsInitializers() { JvmInitializers.runOnStartup(); assertTrue(onStartupRan); MatcherAssert.assertThat( systemOutRule.getLog(), containsString("Running JvmInitializer#onStartup")); }
public static byte[] intToBytes(int intValue) { return intToBytes(intValue, DEFAULT_ORDER); }
@Test public void bytesToIntTest(){ int a = RandomUtil.randomInt(0, Integer.MAX_VALUE); ByteBuffer wrap = ByteBuffer.wrap(ByteUtil.intToBytes(a)); wrap.order(ByteOrder.LITTLE_ENDIAN); int aInt = wrap.getInt(); assertEquals(a, aInt); wrap = ByteBuffer.wrap(ByteUtil.intToBytes(a, ByteOrder.BIG_ENDIAN)); wrap.order(ByteOrder.BIG_ENDIAN); aInt = wrap.getInt(); assertEquals(a, aInt); }
public static void getSemanticPropsSingleFromString( SingleInputSemanticProperties result, String[] forwarded, String[] nonForwarded, String[] readSet, TypeInformation<?> inType, TypeInformation<?> outType) { getSemanticPropsSingleFromString( result, forwarded, nonForwarded, readSet, inType, outType, false); }
@Test void testForwardedInvalidString() { String[] forwardedFields = {"notValid"}; SingleInputSemanticProperties sp = new SingleInputSemanticProperties(); assertThatThrownBy( () -> SemanticPropUtil.getSemanticPropsSingleFromString( sp, forwardedFields, null, null, threeIntTupleType, threeIntTupleType)) .isInstanceOf(InvalidSemanticAnnotationException.class); }
@Override public Path move(final Path file, final Path renamed, final TransferStatus status, final Delete.Callback callback, final ConnectionCallback connectionCallback) throws BackgroundException { final Path copy = proxy.copy(file, renamed, new TransferStatus().withLength(file.attributes().getSize()), connectionCallback, new DisabledStreamListener()); delete.delete(Collections.singletonList(file), connectionCallback, callback); return copy; }
@Test public void testMove() throws Exception { final Path container = new Path("cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path test = new AzureTouchFeature(session, null).touch(new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); assertTrue(new AzureFindFeature(session, null).find(test)); final Path target = new AzureMoveFeature(session, null).move(test, new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus(), new Delete.DisabledCallback(), new DisabledConnectionCallback()); assertFalse(new AzureFindFeature(session, null).find(test)); assertTrue(new AzureFindFeature(session, null).find(target)); final PathAttributes targetAttr = new AzureAttributesFinderFeature(session, null).find(target); new AzureDeleteFeature(session, null).delete(Collections.<Path>singletonList(target), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
List<Task> describeTasks(String clusterArn, List<String> taskArns, AwsCredentials credentials) { String body = createBodyDescribeTasks(clusterArn, taskArns); Map<String, String> headers = createHeadersDescribeTasks(body, credentials); String response = callAwsService(body, headers); return parseDescribeTasks(response); }
@Test public void describeTasks() { // given String cluster = "arn:aws:ecs:eu-central-1:665466731577:cluster/rafal-test-cluster"; Map<String, String> tasks = Map.of( "arn:aws:ecs:eu-central-1-east-1:012345678910:task/0b69d5c0-d655-4695-98cd-5d2d526d9d5a", "10.0.1.16", "arn:aws:ecs:eu-central-1:012345678910:task/51a01bdf-d00e-487e-ab14-7645330b6207", "10.0.1.219"); stubDescribeTasks(tasks, cluster); // when List<Task> result = awsEcsApi.describeTasks(cluster, new ArrayList<>(tasks.keySet()), CREDENTIALS); // then assertEquals(2, result.size()); assertThat(result.stream().map(Task::getPrivateAddress)).containsExactlyInAnyOrder("10.0.1.16", "10.0.1.219"); assertThat(result.stream().map(Task::getAvailabilityZone)).containsExactlyInAnyOrder("eu-central-1a", "eu-central-1a"); }
public static String substVars(String val, PropertyContainer pc1) { return substVars(val, pc1, null); }
@Test public void defaultValueReferencingAVariable() { context.putProperty("v1", "k1"); String result = OptionHelper.substVars("${undef:-${v1}}", context); assertEquals("k1", result); }
public void updateByModeBits(Mode.Bits bits) { // Each index equals to corresponding AclAction's ordinal. // E.g. Mode.Bits.READ corresponds to AclAction.READ, the former has index 0 in indexedBits, // the later has ordinal 0 in AclAction. Mode.Bits[] indexedBits = new Mode.Bits[]{ Mode.Bits.READ, Mode.Bits.WRITE, Mode.Bits.EXECUTE }; for (int i = 0; i < 3; i++) { if (bits.imply(indexedBits[i])) { mActions.set(i); } else { mActions.clear(i); } } }
@Test public void updateByModeBits() { AclActions actions = new AclActions(); actions.updateByModeBits(Mode.Bits.NONE); assertEquals(Mode.Bits.NONE, actions.toModeBits()); actions = new AclActions(); actions.updateByModeBits(Mode.Bits.READ); assertEquals(Mode.Bits.READ, actions.toModeBits()); actions = new AclActions(); actions.updateByModeBits(Mode.Bits.WRITE); assertEquals(Mode.Bits.WRITE, actions.toModeBits()); actions = new AclActions(); actions.updateByModeBits(Mode.Bits.EXECUTE); assertEquals(Mode.Bits.EXECUTE, actions.toModeBits()); actions = new AclActions(); actions.updateByModeBits(Mode.Bits.READ_WRITE); assertEquals(Mode.Bits.READ_WRITE, actions.toModeBits()); actions = new AclActions(); actions.updateByModeBits(Mode.Bits.READ_EXECUTE); assertEquals(Mode.Bits.READ_EXECUTE, actions.toModeBits()); actions = new AclActions(); actions.updateByModeBits(Mode.Bits.WRITE_EXECUTE); assertEquals(Mode.Bits.WRITE_EXECUTE, actions.toModeBits()); actions = new AclActions(); actions.updateByModeBits(Mode.Bits.ALL); assertEquals(Mode.Bits.ALL, actions.toModeBits()); }
static <T extends Comparable<? super T>> int compareListWithFillValue( List<T> left, List<T> right, T fillValue) { int longest = Math.max(left.size(), right.size()); for (int i = 0; i < longest; i++) { T leftElement = fillValue; T rightElement = fillValue; if (i < left.size()) { leftElement = left.get(i); } if (i < right.size()) { rightElement = right.get(i); } int compareResult = leftElement.compareTo(rightElement); if (compareResult != 0) { return compareResult; } } return 0; }
@Test public void compareWithFillValue_nonEmptyListVariedSizeWithZeroFillValue_returnsPositive() { assertThat( ComparisonUtility.compareListWithFillValue( Lists.newArrayList(1, 3), Lists.newArrayList(1, 2, 3), 0)) .isGreaterThan(0); }
@Override public Row deserialize(byte[] message) throws IOException { return deserialize(message, true); }
@Test void testTimestampGenericGenericSerializeDeserializeNewMapping() throws Exception { final Tuple4<Class<? extends SpecificRecord>, SpecificRecord, GenericRecord, Row> testData = AvroTestUtils.getTimestampTestData(); final String schemaString = testData.f2.getSchema().toString(); final AvroRowSerializationSchema serializationSchema = new AvroRowSerializationSchema(schemaString); final AvroRowDeserializationSchema deserializationSchema = new AvroRowDeserializationSchema(schemaString); final byte[] bytes = serializationSchema.serialize(testData.f3); final Row actual = deserializationSchema.deserialize(bytes); assertThat(actual).isEqualTo(testData.f3); }
@Override public RequestFuture requestFuture(Request request) throws NacosException { Payload grpcRequest = GrpcUtils.convert(request); final ListenableFuture<Payload> requestFuture = grpcFutureServiceStub.request(grpcRequest); return new RequestFuture() { @Override public boolean isDone() { return requestFuture.isDone(); } @Override public Response get() throws Exception { Payload grpcResponse = requestFuture.get(); Response response = (Response) GrpcUtils.parse(grpcResponse); if (response instanceof ErrorResponse) { throw new NacosException(response.getErrorCode(), response.getMessage()); } return response; } @Override public Response get(long timeout) throws Exception { Payload grpcResponse = requestFuture.get(timeout, TimeUnit.MILLISECONDS); Response response = (Response) GrpcUtils.parse(grpcResponse); if (response instanceof ErrorResponse) { throw new NacosException(response.getErrorCode(), response.getMessage()); } return response; } }; }
@Test void testRequestFuture() throws Exception { RequestFuture requestFuture = connection.requestFuture(new HealthCheckRequest()); assertTrue(requestFuture.isDone()); Response response = requestFuture.get(); assertTrue(response instanceof HealthCheckResponse); }
@Udf(description = "Converts a TIMESTAMP value from one timezone to another") public Timestamp convertTz( @UdfParameter( description = "The TIMESTAMP value.") final Timestamp timestamp, @UdfParameter( description = "The fromTimeZone in java.util.TimeZone ID format. For example: \"UTC\"," + " \"America/Los_Angeles\", \"PST\", \"Europe/London\"") final String fromTimeZone, @UdfParameter( description = "The toTimeZone in java.util.TimeZone ID format. For example: \"UTC\"," + " \"America/Los_Angeles\", \"PST\", \"Europe/London\"") final String toTimeZone ) { if (timestamp == null || fromTimeZone == null || toTimeZone == null) { return null; } try { final long offset = TimeZone.getTimeZone(ZoneId.of(toTimeZone)).getOffset(timestamp.getTime()) - TimeZone.getTimeZone(ZoneId.of(fromTimeZone)).getOffset(timestamp.getTime()); return new Timestamp(timestamp.getTime() + offset); } catch (DateTimeException e) { throw new KsqlFunctionException("Invalid time zone: " + e.getMessage()); } }
@Test public void shouldReturnNullForNullTimestamp() { // When: final Object result = udf.convertTz(null, "America/Los_Angeles", "America/New_York"); // Then: assertNull(result); }
public static boolean shouldEnablePushdownForTable(ConnectorSession session, Table table, String path, Optional<Partition> optionalPartition) { if (!isS3SelectPushdownEnabled(session)) { return false; } if (path == null) { return false; } // Hive table partitions could be on different storages, // as a result, we have to check each individual optionalPartition Properties schema = optionalPartition .map(partition -> getHiveSchema(partition, table)) .orElseGet(() -> getHiveSchema(table)); return shouldEnablePushdownForTable(table, path, schema); }
@Test public void testShouldEnableSelectPushdown() { assertTrue(shouldEnablePushdownForTable(session, table, "s3://fakeBucket/fakeObject", Optional.empty())); assertTrue(shouldEnablePushdownForTable(session, table, "s3://fakeBucket/fakeObject", Optional.of(partition))); }
public Map<String, Parameter> generateMergedStepParams( WorkflowSummary workflowSummary, Step stepDefinition, StepRuntime stepRuntime, StepRuntimeSummary runtimeSummary) { Map<String, ParamDefinition> allParamDefs = new LinkedHashMap<>(); // Start with default step level params if present Map<String, ParamDefinition> globalDefault = defaultParamManager.getDefaultStepParams(); if (globalDefault != null) { ParamsMergeHelper.mergeParams( allParamDefs, globalDefault, ParamsMergeHelper.MergeContext.stepCreate(ParamSource.SYSTEM_DEFAULT)); } // Merge in injected params returned by step if present (template schema) Map<String, ParamDefinition> injectedParams = stepRuntime.injectRuntimeParams(workflowSummary, stepDefinition); maybeOverrideParamType(allParamDefs); if (injectedParams != null) { maybeOverrideParamType(injectedParams); ParamsMergeHelper.mergeParams( allParamDefs, injectedParams, ParamsMergeHelper.MergeContext.stepCreate(ParamSource.TEMPLATE_SCHEMA)); } // Merge in params applicable to step type Optional<Map<String, ParamDefinition>> defaultStepTypeParams = defaultParamManager.getDefaultParamsForType(stepDefinition.getType()); if (defaultStepTypeParams.isPresent()) { LOG.debug("Merging step level default for {}", stepDefinition.getType()); ParamsMergeHelper.mergeParams( allParamDefs, defaultStepTypeParams.get(), ParamsMergeHelper.MergeContext.stepCreate(ParamSource.SYSTEM_DEFAULT)); } // Merge in workflow and step info ParamsMergeHelper.mergeParams( allParamDefs, injectWorkflowAndStepInfoParams(workflowSummary, runtimeSummary), ParamsMergeHelper.MergeContext.stepCreate(ParamSource.SYSTEM_INJECTED)); // merge step run param and user provided restart step run params // first to get undefined params from both run param and restart params Map<String, ParamDefinition> undefinedRestartParams = new LinkedHashMap<>(); Optional<Map<String, ParamDefinition>> stepRestartParams = getUserStepRestartParam(workflowSummary, runtimeSummary); stepRestartParams.ifPresent(undefinedRestartParams::putAll); Optional<Map<String, ParamDefinition>> stepRunParams = getStepRunParams(workflowSummary, runtimeSummary); Map<String, ParamDefinition> systemInjectedRestartRunParams = new LinkedHashMap<>(); stepRunParams.ifPresent( params -> { params.forEach( (key, val) -> { if (runtimeSummary.getRestartConfig() != null && Constants.RESERVED_PARAM_NAMES.contains(key) && val.getMode() == ParamMode.CONSTANT && val.getSource() == ParamSource.SYSTEM_INJECTED) { ((AbstractParamDefinition) val) .getMeta() .put(Constants.METADATA_SOURCE_KEY, ParamSource.RESTART.name()); systemInjectedRestartRunParams.put(key, val); } }); systemInjectedRestartRunParams.keySet().forEach(params::remove); }); stepRunParams.ifPresent(undefinedRestartParams::putAll); Optional.ofNullable(stepDefinition.getParams()) .ifPresent( stepDefParams -> stepDefParams.keySet().stream() .filter(undefinedRestartParams::containsKey) .forEach(undefinedRestartParams::remove)); // Then merge undefined restart params if (!undefinedRestartParams.isEmpty()) { mergeUserProvidedStepParams(allParamDefs, undefinedRestartParams, workflowSummary); } // Final merge from step definition if (stepDefinition.getParams() != null) { maybeOverrideParamType(stepDefinition.getParams()); ParamsMergeHelper.mergeParams( allParamDefs, stepDefinition.getParams(), ParamsMergeHelper.MergeContext.stepCreate(ParamSource.DEFINITION)); } // merge step run params stepRunParams.ifPresent( stepParams -> mergeUserProvidedStepParams(allParamDefs, stepParams, workflowSummary)); // merge all user provided restart step run params stepRestartParams.ifPresent( stepParams -> mergeUserProvidedStepParams(allParamDefs, stepParams, workflowSummary)); // merge all system injected restart step run params with mode and source already set. allParamDefs.putAll(systemInjectedRestartRunParams); // Cleanup any params that are missing and convert to params return ParamsMergeHelper.convertToParameters(ParamsMergeHelper.cleanupParams(allParamDefs)); }
@Test public void testMergeStepRunParamFromWorkflowSummary() { Map<String, Map<String, ParamDefinition>> stepRunParams = singletonMap( "stepid", singletonMap("p1", ParamDefinition.buildParamDefinition("p1", "d1"))); workflowSummary.setStepRunParams(stepRunParams); Map<String, Parameter> stepParams = paramsManager.generateMergedStepParams(workflowSummary, step, stepRuntime, runtimeSummary); Assert.assertFalse(stepParams.isEmpty()); Assert.assertEquals("d1", stepParams.get("p1").asStringParam().getValue()); }
public static IndicesBlockStatus parseBlockSettings(final GetSettingsResponse settingsResponse) { IndicesBlockStatus result = new IndicesBlockStatus(); final ImmutableOpenMap<String, Settings> indexToSettingsMap = settingsResponse.getIndexToSettings(); final String[] indicesInResponse = indexToSettingsMap.keys().toArray(String.class); for (String index : indicesInResponse) { final Settings blockSettings = indexToSettingsMap.get(index).getByPrefix(BLOCK_SETTINGS_PREFIX); if (!blockSettings.isEmpty()) { final Set<String> blockSettingsNames = blockSettings.names(); final Set<String> blockSettingsSetToTrue = blockSettingsNames.stream() .filter(s -> blockSettings.getAsBoolean(s, false)) .map(s -> BLOCK_SETTINGS_PREFIX + s) .collect(Collectors.toSet()); if (!blockSettingsSetToTrue.isEmpty()) { result.addIndexBlocks(index, blockSettingsSetToTrue); } } } return result; }
@Test public void noBlockedIndicesIdentifiedIfEmptySettingsPresent() { ImmutableOpenMap.Builder<String, Settings> settingsBuilder = new ImmutableOpenMap.Builder<>(); settingsBuilder.put("index_0", Settings.builder().build()); GetSettingsResponse emptySettingsResponse = new GetSettingsResponse(settingsBuilder.build(), ImmutableOpenMap.of()); final IndicesBlockStatus indicesBlockStatus = BlockSettingsParser.parseBlockSettings(emptySettingsResponse); assertNotNull(indicesBlockStatus); assertEquals(0, indicesBlockStatus.countBlockedIndices()); }
@Override public void run(Namespace namespace, Liquibase liquibase) throws Exception { final String tag = namespace.getString("tag"); final Integer count = namespace.getInt("count"); final Date date = namespace.get("date"); final boolean dryRun = namespace.getBoolean("dry-run") != null && namespace.getBoolean("dry-run"); final String context = getContext(namespace); if (Stream.of(tag, count, date).filter(Objects::nonNull).count() != 1) { throw new IllegalArgumentException("Must specify either a count, a tag, or a date."); } if (count != null) { if (dryRun) { liquibase.rollback(count, context, new OutputStreamWriter(outputStream, StandardCharsets.UTF_8)); } else { liquibase.rollback(count, context); } } else if (tag != null) { if (dryRun) { liquibase.rollback(tag, context, new OutputStreamWriter(outputStream, StandardCharsets.UTF_8)); } else { liquibase.rollback(tag, context); } } else { if (dryRun) { liquibase.rollback(date, context, new OutputStreamWriter(outputStream, StandardCharsets.UTF_8)); } else { liquibase.rollback(date, context); } } }
@Test void testRollbackNChanges() throws Exception { // Migrate some DDL changes to the database migrateCommand.run(null, new Namespace(Map.of()), conf); try (Handle handle = dbi.open()) { assertThat(MigrationTestSupport.columnExists(handle, "PERSONS", "EMAIL")) .isTrue(); } // Rollback the last one (the email field) rollbackCommand.run(null, new Namespace(Map.of("count", 1)), conf); try (Handle handle = dbi.open()) { assertThat(MigrationTestSupport.columnExists(handle, "PERSONS", "EMAIL")) .isFalse(); } }
public static String jaasConfig(String moduleName, Map<String, String> options) { StringJoiner joiner = new StringJoiner(" "); for (Entry<String, String> entry : options.entrySet()) { String key = Objects.requireNonNull(entry.getKey()); String value = Objects.requireNonNull(entry.getValue()); if (key.contains("=") || key.contains(";")) { throw new IllegalArgumentException("Keys must not contain '=' or ';'"); } if (moduleName.isEmpty() || moduleName.contains(";") || moduleName.contains("=")) { throw new IllegalArgumentException("module name must be not empty and must not contain '=' or ';'"); } else { joiner.add(key + "=\"" + value + "\""); } } return moduleName + " required " + joiner + ";"; }
@Test public void testConfigWithNullOptionKey() { String moduleName = "ExampleModule"; Map<String, String> options = new HashMap<>(); options.put(null, "value1"); assertThrows(NullPointerException.class, () -> AuthenticationUtils.jaasConfig(moduleName, options)); }
public static <K, E> Collector<E, ImmutableListMultimap.Builder<K, E>, ImmutableListMultimap<K, E>> index(Function<? super E, K> keyFunction) { return index(keyFunction, Function.identity()); }
@Test public void index_with_valueFunction_fails_if_key_function_returns_null() { assertThatThrownBy(() -> SINGLE_ELEMENT_LIST.stream().collect(index(s -> null, MyObj::getText))) .isInstanceOf(NullPointerException.class) .hasMessage("Key function can't return null"); }