focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
void escape(String escapeChars, StringBuffer buf) { if ((pointer < patternLength)) { char next = pattern.charAt(pointer++); escapeUtil.escape(escapeChars, buf, next, pointer); } }
@Test public void testEscape() throws ScanException { { List<Token> tl = new TokenStream("\\%").tokenize(); List<Token> witness = new ArrayList<Token>(); witness.add(new Token(Token.LITERAL, "%")); assertEquals(witness, tl); } { List<Token> tl = new TokenStream("\\%\\(\\t\\)\\r\\n").tokenize(); List<Token> witness = new ArrayList<Token>(); witness.add(new Token(Token.LITERAL, "%(\t)\r\n")); assertEquals(witness, tl); } { List<Token> tl = new TokenStream("\\\\%x").tokenize(); List<Token> witness = new ArrayList<Token>(); witness.add(new Token(Token.LITERAL, "\\")); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.SIMPLE_KEYWORD, "x")); assertEquals(witness, tl); } { List<Token> tl = new TokenStream("%x\\)").tokenize(); List<Token> witness = new ArrayList<Token>(); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.SIMPLE_KEYWORD, "x")); witness.add(new Token(Token.LITERAL, ")")); assertEquals(witness, tl); } { List<Token> tl = new TokenStream("%x\\_a").tokenize(); List<Token> witness = new ArrayList<Token>(); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.SIMPLE_KEYWORD, "x")); witness.add(new Token(Token.LITERAL, "a")); assertEquals(witness, tl); } { List<Token> tl = new TokenStream("%x\\_%b").tokenize(); List<Token> witness = new ArrayList<Token>(); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.SIMPLE_KEYWORD, "x")); witness.add(Token.PERCENT_TOKEN); witness.add(new Token(Token.SIMPLE_KEYWORD, "b")); assertEquals(witness, tl); } }
@PublicAPI(usage = ACCESS) public JavaClasses importUrl(URL url) { return importUrls(singletonList(url)); }
@Test public void imports_nested_classes() throws Exception { JavaClasses classes = new ClassFileImporter().importUrl(getClass().getResource("testexamples/nestedimport")); assertThatTypes(classes).matchInAnyOrder( ClassWithNestedClass.class, ClassWithNestedClass.NestedClass.class, ClassWithNestedClass.StaticNestedClass.class, ClassWithNestedClass.NestedInterface.class, ClassWithNestedClass.StaticNestedInterface.class, Class.forName(ClassWithNestedClass.class.getName() + "$PrivateNestedClass")); }
public static String dataToAvroSchemaJson(DataSchema dataSchema) { return dataToAvroSchemaJson(dataSchema, new DataToAvroSchemaTranslationOptions()); }
@Test(dataProvider = "toAvroSchemaDataTestTypeRefAnnotationPropagationUnionWithAlias") public void testToAvroSchemaTestTypeRefAnnotationPropagationUnionWithAlias(String schemaBeforeTranslation, String expectedAvroSchemaAsString) throws Exception { DataSchema schema = TestUtil.dataSchemaFromPdlString(schemaBeforeTranslation); DataToAvroSchemaTranslationOptions transOptions = new DataToAvroSchemaTranslationOptions(OptionalDefaultMode.TRANSLATE_DEFAULT, JsonBuilder.Pretty.SPACES, EmbedSchemaMode.NONE); transOptions.setTyperefPropertiesExcludeSet(new HashSet<>(Arrays.asList("validate", "java"))); String avroSchemaText = SchemaTranslator.dataToAvroSchemaJson(schema, transOptions); DataMap avroSchemaAsDataMap = TestUtil.dataMapFromString(avroSchemaText); DataMap fieldsPropertiesMap = TestUtil.dataMapFromString(expectedAvroSchemaAsString); assertEquals(avroSchemaAsDataMap, fieldsPropertiesMap); }
public static Event[] fromJson(final String json) throws IOException { return fromJson(json, BasicEventFactory.INSTANCE); }
@Test public void testFromJsonWithNull() throws Exception { Event[] events = Event.fromJson(null); assertEquals(0, events.length); }
@Override public void delete(final Map<Path, TransferStatus> files, final PasswordCallback prompt, final Callback callback) throws BackgroundException { for(Path file : files.keySet()) { if(containerService.isContainer(file)) { continue; } callback.delete(file); if(file.getType().contains(Path.Type.upload)) { new B2LargeUploadPartService(session, fileid).delete(file.attributes().getVersionId()); } else { if(file.isDirectory()) { // Delete /.bzEmpty if any final String placeholder; try { placeholder = fileid.getVersionId(file); } catch(NotfoundException e) { log.warn(String.format("Ignore failure %s deleting placeholder file for %s", e, file)); continue; } if(null == placeholder) { continue; } try { session.getClient().deleteFileVersion(containerService.getKey(file), placeholder); } catch(B2ApiException e) { log.warn(String.format("Ignore failure %s deleting placeholder file for %s", e.getMessage(), file)); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Cannot delete {0}", e, file); } } else if(file.isFile()) { try { if(!versioning.isEnabled() || null == file.attributes().getVersionId()) { // Add hide marker if(log.isDebugEnabled()) { log.debug(String.format("Add hide marker %s of %s", file.attributes().getVersionId(), file)); } try { session.getClient().hideFile(fileid.getVersionId(containerService.getContainer(file)), containerService.getKey(file)); } catch(B2ApiException e) { if("already_hidden".equalsIgnoreCase(e.getCode())) { log.warn(String.format("Ignore failure %s hiding file %s already hidden", e.getMessage(), file)); } else { throw e; } } } else { // Delete specific version if(log.isDebugEnabled()) { log.debug(String.format("Delete version %s of %s", file.attributes().getVersionId(), file)); } session.getClient().deleteFileVersion(containerService.getKey(file), file.attributes().getVersionId()); } } catch(B2ApiException e) { throw new B2ExceptionMappingService(fileid).map("Cannot delete {0}", e, file); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Cannot delete {0}", e, file); } } fileid.cache(file, null); } } for(Path file : files.keySet()) { try { if(containerService.isContainer(file)) { callback.delete(file); // Finally delete bucket itself session.getClient().deleteBucket(fileid.getVersionId(file)); } } catch(B2ApiException e) { throw new B2ExceptionMappingService(fileid).map("Cannot delete {0}", e, file); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Cannot delete {0}", e, file); } } }
@Test public void testDeleteFileHide() throws Exception { final B2VersionIdProvider fileid = new B2VersionIdProvider(session); final Path bucket = new B2DirectoryFeature(session, fileid).mkdir(new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus()); final Path test = new Path(bucket, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final TransferStatus status = new TransferStatus(); new B2TouchFeature(session, fileid).touch(test, status); final String versionId = status.getResponse().getVersionId(); assertNotNull(versionId); // Hide new B2DeleteFeature(session, new B2VersionIdProvider(session)).delete(Collections.singletonList(test.withAttributes(PathAttributes.EMPTY)), new DisabledLoginCallback(), new Delete.DisabledCallback()); // Double hide. Ignore failure already_hidden new B2DeleteFeature(session, new B2VersionIdProvider(session)).delete(Collections.singletonList(test.withAttributes(PathAttributes.EMPTY)), new DisabledLoginCallback(), new Delete.DisabledCallback()); new B2DeleteFeature(session, fileid).delete(new B2ObjectListService(session, fileid).list(bucket, new DisabledListProgressListener()).toList(), new DisabledLoginCallback(), new Delete.DisabledCallback()); new B2DeleteFeature(session, new B2VersionIdProvider(session)).delete(Collections.singletonList(bucket), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static void validateSecret(byte[] secret) throws OtpInfoException { if (secret.length != SECRET_LENGTH && secret.length != SECRET_FULL_LENGTH) { throw new OtpInfoException(String.format("Invalid Yandex secret length: %d bytes", secret.length)); } // Secrets originating from a QR code do not have a checksum, so we assume those are valid if (secret.length == SECRET_LENGTH) { return; } char originalChecksum = (char) ((secret[secret.length - 2] & 0x0F) << 8 | secret[secret.length - 1] & 0xff); char accum = 0; int accumBits = 0; int inputTotalBitsAvailable = secret.length * 8 - 12; int inputIndex = 0; int inputBitsAvailable = 8; while (inputTotalBitsAvailable > 0) { int requiredBits = 13 - accumBits; if (inputTotalBitsAvailable < requiredBits) { requiredBits = inputTotalBitsAvailable; } while (requiredBits > 0) { int curInput = (secret[inputIndex] & (1 << inputBitsAvailable) - 1) & 0xff; int bitsToRead = Math.min(requiredBits, inputBitsAvailable); curInput >>= inputBitsAvailable - bitsToRead; accum = (char) (accum << bitsToRead | curInput); inputTotalBitsAvailable -= bitsToRead; requiredBits -= bitsToRead; inputBitsAvailable -= bitsToRead; accumBits += bitsToRead; if (inputBitsAvailable == 0) { inputIndex += 1; inputBitsAvailable = 8; } } if (accumBits == 13) { accum ^= 0b1_1000_1111_0011; } accumBits = 16 - getNumberOfLeadingZeros(accum); } if (accum != originalChecksum) { throw new OtpInfoException("Yandex secret checksum invalid"); } }
@Test(expected = Test.None.class) public void testYandexSecretValidationOk() throws EncodingException, OtpInfoException { YandexInfo.validateSecret(getBase32Vector(0)); YandexInfo.validateSecret(getBase32Vector(1)); }
@Override public int getEventType() { return eventType; }
@Test public void testGetEventType() { assertEquals(23, dataEvent.getEventType()); assertEquals(23, objectEvent.getEventType()); }
public List<CaseInsensitiveString> pathToAncestor() { if (cachedPathToAncestor == null) { cachedPathToAncestor = Collections.unmodifiableList(pathToAncestor(0)); } return cachedPathToAncestor; }
@Test public void shouldReturnOnlyPath_excludingActualAncestorNode() { PathFromAncestor path = new PathFromAncestor(new CaseInsensitiveString("grand-parent/parent/child")); assertThat(path.pathToAncestor(), is(List.of(new CaseInsensitiveString("child"), new CaseInsensitiveString("parent")))); }
@VisibleForTesting static Object convertAvroField(Object avroValue, Schema schema) { if (avroValue == null) { return null; } switch (schema.getType()) { case NULL: case INT: case LONG: case DOUBLE: case FLOAT: case BOOLEAN: return avroValue; case ENUM: case STRING: return avroValue.toString(); // can be a String or org.apache.avro.util.Utf8 case UNION: for (Schema s : schema.getTypes()) { if (s.getType() == Schema.Type.NULL) { continue; } return convertAvroField(avroValue, s); } throw new IllegalArgumentException("Found UNION schema but it doesn't contain any type"); case ARRAY: case BYTES: case FIXED: case RECORD: case MAP: default: throw new UnsupportedOperationException("Unsupported avro schema type=" + schema.getType() + " for value field schema " + schema.getName()); } }
@Test public void testConvertAvroFloat() { Object converted = BaseJdbcAutoSchemaSink.convertAvroField(Float.MIN_VALUE, createFieldAndGetSchema((builder) -> builder.name("field").type().floatType().noDefault())); Assert.assertEquals(converted, Float.MIN_VALUE); }
@Override public Serializable read(final MySQLBinlogColumnDef columnDef, final MySQLPacketPayload payload) { int time = payload.getByteBuf().readUnsignedMedium(); if (0x800000 == time) { return MySQLTimeValueUtils.ZERO_OF_TIME; } MySQLFractionalSeconds fractionalSeconds = new MySQLFractionalSeconds(columnDef.getColumnMeta(), payload); int hour = (time >> 12) % (1 << 10); int minute = (time >> 6) % (1 << 6); int second = time % (1 << 6); return LocalTime.of(hour, minute, second).withNano(fractionalSeconds.getNanos()); }
@Test void assertReadNullTime() { when(payload.getByteBuf()).thenReturn(byteBuf); when(byteBuf.readUnsignedMedium()).thenReturn(0x800000); assertThat(new MySQLTime2BinlogProtocolValue().read(columnDef, payload), is(MySQLTimeValueUtils.ZERO_OF_TIME)); }
public Object getCell(final int columnIndex) { Preconditions.checkArgument(columnIndex > 0 && columnIndex < data.size() + 1); return data.get(columnIndex - 1); }
@Test void assertGetCellWithEnum() { LocalDataQueryResultRow actual = new LocalDataQueryResultRow(FixtureEnum.FOO, FixtureEnum.BAR); assertThat(actual.getCell(1), is("FOO")); assertThat(actual.getCell(2), is("BAR")); }
@Override public void flush() throws IOException { mLocalOutputStream.flush(); }
@Test public void flush() throws Exception { mStream.flush(); mStream.close(); assertEquals(mContentHash, mStream.getContentHash().get()); Mockito.verify(mLocalOutputStream).flush(); }
@VisibleForTesting List<String> getFuseInfo() { return mFuseInfo; }
@Test public void localKernelDataCacheEnabled() { try (FuseUpdateChecker checker = getUpdateCheckerWithMountOptions("kernel_cache")) { Assert.assertTrue(containsTargetInfo(checker.getFuseInfo(), FuseUpdateChecker.LOCAL_KERNEL_DATA_CACHE)); } try (FuseUpdateChecker checker = getUpdateCheckerWithMountOptions("auto_cache")) { Assert.assertTrue(containsTargetInfo(checker.getFuseInfo(), FuseUpdateChecker.LOCAL_KERNEL_DATA_CACHE)); } }
static ProjectMeasuresQuery newProjectMeasuresQuery(List<Criterion> criteria, @Nullable Set<String> projectUuids) { ProjectMeasuresQuery query = new ProjectMeasuresQuery(); Optional.ofNullable(projectUuids).ifPresent(query::setProjectUuids); criteria.forEach(criterion -> processCriterion(criterion, query)); return query; }
@Test public void fail_to_create_query_on_language_using_in_operator_and_value() { assertThatThrownBy(() -> { newProjectMeasuresQuery(singletonList(Criterion.builder().setKey("languages").setOperator(IN).setValue("java").build()), emptySet()); }) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Languages should be set either by using 'languages = java' or 'languages IN (java, js)'"); }
public static DataflowRunner fromOptions(PipelineOptions options) { DataflowPipelineOptions dataflowOptions = PipelineOptionsValidator.validate(DataflowPipelineOptions.class, options); ArrayList<String> missing = new ArrayList<>(); if (dataflowOptions.getAppName() == null) { missing.add("appName"); } if (Strings.isNullOrEmpty(dataflowOptions.getRegion()) && isServiceEndpoint(dataflowOptions.getDataflowEndpoint())) { missing.add("region"); } if (missing.size() > 0) { throw new IllegalArgumentException( "Missing required pipeline options: " + Joiner.on(',').join(missing)); } validateWorkerSettings( PipelineOptionsValidator.validate(DataflowPipelineWorkerPoolOptions.class, options)); PathValidator validator = dataflowOptions.getPathValidator(); String gcpTempLocation; try { gcpTempLocation = dataflowOptions.getGcpTempLocation(); } catch (Exception e) { throw new IllegalArgumentException( "DataflowRunner requires gcpTempLocation, " + "but failed to retrieve a value from PipelineOptions", e); } validator.validateOutputFilePrefixSupported(gcpTempLocation); String stagingLocation; try { stagingLocation = dataflowOptions.getStagingLocation(); } catch (Exception e) { throw new IllegalArgumentException( "DataflowRunner requires stagingLocation, " + "but failed to retrieve a value from PipelineOptions", e); } validator.validateOutputFilePrefixSupported(stagingLocation); if (!isNullOrEmpty(dataflowOptions.getSaveProfilesToGcs())) { validator.validateOutputFilePrefixSupported(dataflowOptions.getSaveProfilesToGcs()); } if (dataflowOptions.getFilesToStage() != null) { // The user specifically requested these files, so fail now if they do not exist. // (automatically detected classpath elements are permitted to not exist, so later // staging will not fail on nonexistent files) dataflowOptions.getFilesToStage().stream() .forEach( stagedFileSpec -> { File localFile; if (stagedFileSpec.contains("=")) { String[] components = stagedFileSpec.split("=", 2); localFile = new File(components[1]); } else { localFile = new File(stagedFileSpec); } if (!localFile.exists()) { // should be FileNotFoundException, but for build-time backwards compatibility // cannot add checked exception throw new RuntimeException( String.format("Non-existent files specified in filesToStage: %s", localFile)); } }); } else { dataflowOptions.setFilesToStage( detectClassPathResourcesToStage(DataflowRunner.class.getClassLoader(), options)); if (dataflowOptions.getFilesToStage().isEmpty()) { throw new IllegalArgumentException("No files to stage has been found."); } else { LOG.info( "PipelineOptions.filesToStage was not specified. " + "Defaulting to files from the classpath: will stage {} files. " + "Enable logging at DEBUG level to see which files will be staged.", dataflowOptions.getFilesToStage().size()); LOG.debug("Classpath elements: {}", dataflowOptions.getFilesToStage()); } } // Verify jobName according to service requirements, truncating converting to lowercase if // necessary. String jobName = dataflowOptions.getJobName().toLowerCase(); checkArgument( jobName.matches("[a-z]([-a-z0-9]*[a-z0-9])?"), "JobName invalid; the name must consist of only the characters " + "[-a-z0-9], starting with a letter and ending with a letter " + "or number"); if (!jobName.equals(dataflowOptions.getJobName())) { LOG.info( "PipelineOptions.jobName did not match the service requirements. " + "Using {} instead of {}.", jobName, dataflowOptions.getJobName()); } dataflowOptions.setJobName(jobName); // Verify project String project = dataflowOptions.getProject(); if (project.matches("[0-9]*")) { throw new IllegalArgumentException( "Project ID '" + project + "' invalid. Please make sure you specified the Project ID, not project number."); } else if (!project.matches(PROJECT_ID_REGEXP)) { throw new IllegalArgumentException( "Project ID '" + project + "' invalid. Please make sure you specified the Project ID, not project" + " description."); } DataflowPipelineDebugOptions debugOptions = dataflowOptions.as(DataflowPipelineDebugOptions.class); // Verify the number of worker threads is a valid value if (debugOptions.getNumberOfWorkerHarnessThreads() < 0) { throw new IllegalArgumentException( "Number of worker harness threads '" + debugOptions.getNumberOfWorkerHarnessThreads() + "' invalid. Please make sure the value is non-negative."); } // Verify that if recordJfrOnGcThrashing is set, the pipeline is at least on java 11 if (dataflowOptions.getRecordJfrOnGcThrashing() && Environments.getJavaVersion() == Environments.JavaVersion.java8) { throw new IllegalArgumentException( "recordJfrOnGcThrashing is only supported on java 9 and up."); } if (dataflowOptions.isStreaming() && dataflowOptions.getGcsUploadBufferSizeBytes() == null) { dataflowOptions.setGcsUploadBufferSizeBytes(GCS_UPLOAD_BUFFER_SIZE_BYTES_DEFAULT); } // Adding the Java version to the SDK name for user's and support convenience. String agentJavaVer = "(JRE 8 environment)"; if (Environments.getJavaVersion() != Environments.JavaVersion.java8) { agentJavaVer = String.format("(JRE %s environment)", Environments.getJavaVersion().specification()); } DataflowRunnerInfo dataflowRunnerInfo = DataflowRunnerInfo.getDataflowRunnerInfo(); String userAgentName = dataflowRunnerInfo.getName(); Preconditions.checkArgument( !userAgentName.equals(""), "Dataflow runner's `name` property cannot be empty."); String userAgentVersion = dataflowRunnerInfo.getVersion(); Preconditions.checkArgument( !userAgentVersion.equals(""), "Dataflow runner's `version` property cannot be empty."); String userAgent = String.format("%s/%s%s", userAgentName, userAgentVersion, agentJavaVer).replace(" ", "_"); dataflowOptions.setUserAgent(userAgent); return new DataflowRunner(dataflowOptions); }
@Test public void testTransformTranslatorMissing() throws IOException { DataflowPipelineOptions options = buildPipelineOptions(); Pipeline p = Pipeline.create(options); p.apply(Create.of(Arrays.asList(1, 2, 3))).apply(new TestTransform()); thrown.expect(IllegalStateException.class); thrown.expectMessage(containsString("no translator registered")); SdkComponents sdkComponents = SdkComponents.create(options); RunnerApi.Pipeline pipelineProto = PipelineTranslation.toProto(p, sdkComponents, true); DataflowPipelineTranslator.fromOptions(options) .translate( p, pipelineProto, sdkComponents, DataflowRunner.fromOptions(options), Collections.emptyList()); ArgumentCaptor<Job> jobCaptor = ArgumentCaptor.forClass(Job.class); Mockito.verify(mockJobs).create(eq(PROJECT_ID), eq(REGION_ID), jobCaptor.capture()); assertValidJob(jobCaptor.getValue()); }
@VisibleForTesting public static boolean updateMapInternal(BiMap<Integer, String> map, String mapName, String command, String regex, Map<Integer, Integer> staticMapping) throws IOException { boolean updated = false; BufferedReader br = null; try { Process process = Runtime.getRuntime().exec( new String[] { "bash", "-c", command }); br = new BufferedReader( new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8)); String line = null; while ((line = br.readLine()) != null) { String[] nameId = line.split(regex); if ((nameId == null) || (nameId.length != 2)) { throw new IOException("Can't parse " + mapName + " list entry:" + line); } LOG.debug("add to " + mapName + "map:" + nameId[0] + " id:" + nameId[1]); // HDFS can't differentiate duplicate names with simple authentication final Integer key = staticMapping.get(parseId(nameId[1])); final String value = nameId[0]; if (map.containsKey(key)) { final String prevValue = map.get(key); if (value.equals(prevValue)) { // silently ignore equivalent entries continue; } reportDuplicateEntry( "Got multiple names associated with the same id: ", key, value, key, prevValue); continue; } if (map.containsValue(value)) { final Integer prevKey = map.inverse().get(value); reportDuplicateEntry( "Got multiple ids associated with the same name: ", key, value, prevKey, value); continue; } map.put(key, value); updated = true; } LOG.debug("Updated " + mapName + " map size: " + map.size()); } catch (IOException e) { LOG.error("Can't update " + mapName + " map"); throw e; } finally { if (br != null) { try { br.close(); } catch (IOException e1) { LOG.error("Can't close BufferedReader of command result", e1); } } } return updated; }
@Test public void testIdOutOfIntegerRange() throws IOException { assumeNotWindows(); String GET_ALL_USERS_CMD = "echo \"" + "nfsnobody:x:4294967294:4294967294:Anonymous NFS User:/var/lib/nfs:/sbin/nologin\n" + "nfsnobody1:x:4294967295:4294967295:Anonymous NFS User:/var/lib/nfs1:/sbin/nologin\n" + "maxint:x:2147483647:2147483647:Grid Distributed File System:/home/maxint:/bin/bash\n" + "minint:x:2147483648:2147483648:Grid Distributed File System:/home/minint:/bin/bash\n" + "archivebackup:*:1031:4294967294:Archive Backup:/home/users/archivebackup:/bin/sh\n" + "hdfs:x:11501:10787:Grid Distributed File System:/home/hdfs:/bin/bash\n" + "daemon:x:2:2:daemon:/sbin:/sbin/nologin\"" + " | cut -d: -f1,3"; String GET_ALL_GROUPS_CMD = "echo \"" + "hdfs:*:11501:hrt_hdfs\n" + "rpcuser:*:29:\n" + "nfsnobody:*:4294967294:\n" + "nfsnobody1:*:4294967295:\n" + "maxint:*:2147483647:\n" + "minint:*:2147483648:\n" + "mapred3:x:498\"" + " | cut -d: -f1,3"; // Maps for id to name map BiMap<Integer, String> uMap = HashBiMap.create(); BiMap<Integer, String> gMap = HashBiMap.create(); ShellBasedIdMapping.updateMapInternal(uMap, "user", GET_ALL_USERS_CMD, ":", EMPTY_PASS_THROUGH_MAP); assertTrue(uMap.size() == 7); assertEquals("nfsnobody", uMap.get(-2)); assertEquals("nfsnobody1", uMap.get(-1)); assertEquals("maxint", uMap.get(2147483647)); assertEquals("minint", uMap.get(-2147483648)); assertEquals("archivebackup", uMap.get(1031)); assertEquals("hdfs",uMap.get(11501)); assertEquals("daemon", uMap.get(2)); ShellBasedIdMapping.updateMapInternal(gMap, "group", GET_ALL_GROUPS_CMD, ":", EMPTY_PASS_THROUGH_MAP); assertTrue(gMap.size() == 7); assertEquals("hdfs",gMap.get(11501)); assertEquals("rpcuser", gMap.get(29)); assertEquals("nfsnobody", gMap.get(-2)); assertEquals("nfsnobody1", gMap.get(-1)); assertEquals("maxint", gMap.get(2147483647)); assertEquals("minint", gMap.get(-2147483648)); assertEquals("mapred3", gMap.get(498)); }
public Coin parse(String source, ParsePosition pos) { DecimalFormatSymbols anteSigns = null; int parseScale = COIN_SCALE; // default Coin coin = null; synchronized (numberFormat) { if (numberFormat.toPattern().contains("¤")) { for(ScaleMatcher d : denomMatchers()) { Matcher matcher = d.pattern.matcher(source); if (matcher.find()) { anteSigns = setSymbolAndCode(numberFormat, matcher.group()); parseScale = d.scale; break; } } if (parseScale == COIN_SCALE) { Matcher matcher = coinPattern.matcher(source); matcher.find(); anteSigns = setSymbolAndCode(numberFormat, matcher.group()); } } else parseScale = scale(); Number number = numberFormat.parse(source, pos); if (number != null) try { coin = Coin.valueOf( ((BigDecimal)number).movePointRight(offSatoshis(parseScale)).setScale(0, HALF_UP).longValue() ); } catch (IllegalArgumentException e) { pos.setIndex(0); } if (anteSigns != null) numberFormat.setDecimalFormatSymbols(anteSigns); } return coin; }
@Ignore("non-determinism between OpenJDK versions") @Test public void parseTest() throws java.text.ParseException { BtcFormat us = BtcFormat.getSymbolInstance(Locale.US); BtcFormat usCoded = BtcFormat.getCodeInstance(Locale.US); // Coins assertEquals(valueOf(200000000), us.parseObject("BTC2")); assertEquals(valueOf(200000000), us.parseObject("XBT2")); assertEquals(valueOf(200000000), us.parseObject("฿2")); assertEquals(valueOf(200000000), us.parseObject("Ƀ2")); assertEquals(valueOf(200000000), us.parseObject("2")); assertEquals(valueOf(200000000), usCoded.parseObject("BTC 2")); assertEquals(valueOf(200000000), usCoded.parseObject("XBT 2")); assertEquals(valueOf(200000000), us.parseObject("฿2.0")); assertEquals(valueOf(200000000), us.parseObject("Ƀ2.0")); assertEquals(valueOf(200000000), us.parseObject("2.0")); assertEquals(valueOf(200000000), us.parseObject("BTC2.0")); assertEquals(valueOf(200000000), us.parseObject("XBT2.0")); assertEquals(valueOf(200000000), usCoded.parseObject("฿ 2")); assertEquals(valueOf(200000000), usCoded.parseObject("Ƀ 2")); assertEquals(valueOf(200000000), usCoded.parseObject(" 2")); assertEquals(valueOf(200000000), usCoded.parseObject("BTC 2")); assertEquals(valueOf(200000000), usCoded.parseObject("XBT 2")); assertEquals(valueOf(202222420000000L), us.parseObject("2,022,224.20")); assertEquals(valueOf(202222420000000L), us.parseObject("฿2,022,224.20")); assertEquals(valueOf(202222420000000L), us.parseObject("Ƀ2,022,224.20")); assertEquals(valueOf(202222420000000L), us.parseObject("BTC2,022,224.20")); assertEquals(valueOf(202222420000000L), us.parseObject("XBT2,022,224.20")); assertEquals(valueOf(220200000000L), us.parseObject("2,202.0")); assertEquals(valueOf(2100000000000000L), us.parseObject("21000000.00000000")); // MilliCoins assertEquals(valueOf(200000), usCoded.parseObject("mBTC 2")); assertEquals(valueOf(200000), usCoded.parseObject("mXBT 2")); assertEquals(valueOf(200000), usCoded.parseObject("m฿ 2")); assertEquals(valueOf(200000), usCoded.parseObject("mɃ 2")); assertEquals(valueOf(200000), us.parseObject("mBTC2")); assertEquals(valueOf(200000), us.parseObject("mXBT2")); assertEquals(valueOf(200000), us.parseObject("₥฿2")); assertEquals(valueOf(200000), us.parseObject("₥Ƀ2")); assertEquals(valueOf(200000), us.parseObject("₥2")); assertEquals(valueOf(200000), usCoded.parseObject("₥BTC 2.00")); assertEquals(valueOf(200000), usCoded.parseObject("₥XBT 2.00")); assertEquals(valueOf(200000), usCoded.parseObject("₥BTC 2")); assertEquals(valueOf(200000), usCoded.parseObject("₥XBT 2")); assertEquals(valueOf(200000), usCoded.parseObject("₥฿ 2")); assertEquals(valueOf(200000), usCoded.parseObject("₥Ƀ 2")); assertEquals(valueOf(200000), usCoded.parseObject("₥ 2")); assertEquals(valueOf(202222400000L), us.parseObject("₥฿2,022,224")); assertEquals(valueOf(202222420000L), us.parseObject("₥Ƀ2,022,224.20")); assertEquals(valueOf(202222400000L), us.parseObject("m฿2,022,224")); assertEquals(valueOf(202222420000L), us.parseObject("mɃ2,022,224.20")); assertEquals(valueOf(202222400000L), us.parseObject("₥BTC2,022,224")); assertEquals(valueOf(202222400000L), us.parseObject("₥XBT2,022,224")); assertEquals(valueOf(202222400000L), us.parseObject("mBTC2,022,224")); assertEquals(valueOf(202222400000L), us.parseObject("mXBT2,022,224")); assertEquals(valueOf(202222420000L), us.parseObject("₥2,022,224.20")); assertEquals(valueOf(202222400000L), usCoded.parseObject("₥฿ 2,022,224")); assertEquals(valueOf(202222420000L), usCoded.parseObject("₥Ƀ 2,022,224.20")); assertEquals(valueOf(202222400000L), usCoded.parseObject("m฿ 2,022,224")); assertEquals(valueOf(202222420000L), usCoded.parseObject("mɃ 2,022,224.20")); assertEquals(valueOf(202222400000L), usCoded.parseObject("₥BTC 2,022,224")); assertEquals(valueOf(202222400000L), usCoded.parseObject("₥XBT 2,022,224")); assertEquals(valueOf(202222400000L), usCoded.parseObject("mBTC 2,022,224")); assertEquals(valueOf(202222400000L), usCoded.parseObject("mXBT 2,022,224")); assertEquals(valueOf(202222420000L), usCoded.parseObject("₥ 2,022,224.20")); // Microcoins assertEquals(valueOf(435), us.parseObject("µ฿4.35")); assertEquals(valueOf(435), us.parseObject("uɃ4.35")); assertEquals(valueOf(435), us.parseObject("u฿4.35")); assertEquals(valueOf(435), us.parseObject("µɃ4.35")); assertEquals(valueOf(435), us.parseObject("uBTC4.35")); assertEquals(valueOf(435), us.parseObject("uXBT4.35")); assertEquals(valueOf(435), us.parseObject("µBTC4.35")); assertEquals(valueOf(435), us.parseObject("µXBT4.35")); assertEquals(valueOf(435), usCoded.parseObject("uBTC 4.35")); assertEquals(valueOf(435), usCoded.parseObject("uXBT 4.35")); assertEquals(valueOf(435), usCoded.parseObject("µBTC 4.35")); assertEquals(valueOf(435), usCoded.parseObject("µXBT 4.35")); // fractional satoshi; round up assertEquals(valueOf(435), us.parseObject("uBTC4.345")); assertEquals(valueOf(435), us.parseObject("uXBT4.345")); // negative with mu symbol assertEquals(valueOf(-1), usCoded.parseObject("(µ฿ 0.01)")); assertEquals(valueOf(-10), us.parseObject("(µBTC0.100)")); assertEquals(valueOf(-10), us.parseObject("(µXBT0.100)")); // Same thing with addition of custom code, symbol us = BtcFormat.builder().locale(US).style(SYMBOL).symbol("£").code("XYZ").build(); usCoded = BtcFormat.builder().locale(US).scale(0).symbol("£").code("XYZ"). pattern("¤ #,##0.00").build(); // Coins assertEquals(valueOf(200000000), us.parseObject("XYZ2")); assertEquals(valueOf(200000000), us.parseObject("BTC2")); assertEquals(valueOf(200000000), us.parseObject("XBT2")); assertEquals(valueOf(200000000), us.parseObject("£2")); assertEquals(valueOf(200000000), us.parseObject("฿2")); assertEquals(valueOf(200000000), us.parseObject("Ƀ2")); assertEquals(valueOf(200000000), us.parseObject("2")); assertEquals(valueOf(200000000), usCoded.parseObject("XYZ 2")); assertEquals(valueOf(200000000), usCoded.parseObject("BTC 2")); assertEquals(valueOf(200000000), usCoded.parseObject("XBT 2")); assertEquals(valueOf(200000000), us.parseObject("£2.0")); assertEquals(valueOf(200000000), us.parseObject("฿2.0")); assertEquals(valueOf(200000000), us.parseObject("Ƀ2.0")); assertEquals(valueOf(200000000), us.parseObject("2.0")); assertEquals(valueOf(200000000), us.parseObject("XYZ2.0")); assertEquals(valueOf(200000000), us.parseObject("BTC2.0")); assertEquals(valueOf(200000000), us.parseObject("XBT2.0")); assertEquals(valueOf(200000000), usCoded.parseObject("£ 2")); assertEquals(valueOf(200000000), usCoded.parseObject("฿ 2")); assertEquals(valueOf(200000000), usCoded.parseObject("Ƀ 2")); assertEquals(valueOf(200000000), usCoded.parseObject(" 2")); assertEquals(valueOf(200000000), usCoded.parseObject("XYZ 2")); assertEquals(valueOf(200000000), usCoded.parseObject("BTC 2")); assertEquals(valueOf(200000000), usCoded.parseObject("XBT 2")); assertEquals(valueOf(202222420000000L), us.parseObject("2,022,224.20")); assertEquals(valueOf(202222420000000L), us.parseObject("£2,022,224.20")); assertEquals(valueOf(202222420000000L), us.parseObject("฿2,022,224.20")); assertEquals(valueOf(202222420000000L), us.parseObject("Ƀ2,022,224.20")); assertEquals(valueOf(202222420000000L), us.parseObject("XYZ2,022,224.20")); assertEquals(valueOf(202222420000000L), us.parseObject("BTC2,022,224.20")); assertEquals(valueOf(202222420000000L), us.parseObject("XBT2,022,224.20")); assertEquals(valueOf(220200000000L), us.parseObject("2,202.0")); assertEquals(valueOf(2100000000000000L), us.parseObject("21000000.00000000")); // MilliCoins assertEquals(valueOf(200000), usCoded.parseObject("mXYZ 2")); assertEquals(valueOf(200000), usCoded.parseObject("mBTC 2")); assertEquals(valueOf(200000), usCoded.parseObject("mXBT 2")); assertEquals(valueOf(200000), usCoded.parseObject("m£ 2")); assertEquals(valueOf(200000), usCoded.parseObject("m฿ 2")); assertEquals(valueOf(200000), usCoded.parseObject("mɃ 2")); assertEquals(valueOf(200000), us.parseObject("mXYZ2")); assertEquals(valueOf(200000), us.parseObject("mBTC2")); assertEquals(valueOf(200000), us.parseObject("mXBT2")); assertEquals(valueOf(200000), us.parseObject("₥£2")); assertEquals(valueOf(200000), us.parseObject("₥฿2")); assertEquals(valueOf(200000), us.parseObject("₥Ƀ2")); assertEquals(valueOf(200000), us.parseObject("₥2")); assertEquals(valueOf(200000), usCoded.parseObject("₥XYZ 2.00")); assertEquals(valueOf(200000), usCoded.parseObject("₥BTC 2.00")); assertEquals(valueOf(200000), usCoded.parseObject("₥XBT 2.00")); assertEquals(valueOf(200000), usCoded.parseObject("₥XYZ 2")); assertEquals(valueOf(200000), usCoded.parseObject("₥BTC 2")); assertEquals(valueOf(200000), usCoded.parseObject("₥XBT 2")); assertEquals(valueOf(200000), usCoded.parseObject("₥£ 2")); assertEquals(valueOf(200000), usCoded.parseObject("₥฿ 2")); assertEquals(valueOf(200000), usCoded.parseObject("₥Ƀ 2")); assertEquals(valueOf(200000), usCoded.parseObject("₥ 2")); assertEquals(valueOf(202222400000L), us.parseObject("₥£2,022,224")); assertEquals(valueOf(202222400000L), us.parseObject("₥฿2,022,224")); assertEquals(valueOf(202222420000L), us.parseObject("₥Ƀ2,022,224.20")); assertEquals(valueOf(202222400000L), us.parseObject("m£2,022,224")); assertEquals(valueOf(202222400000L), us.parseObject("m฿2,022,224")); assertEquals(valueOf(202222420000L), us.parseObject("mɃ2,022,224.20")); assertEquals(valueOf(202222400000L), us.parseObject("₥XYZ2,022,224")); assertEquals(valueOf(202222400000L), us.parseObject("₥BTC2,022,224")); assertEquals(valueOf(202222400000L), us.parseObject("₥XBT2,022,224")); assertEquals(valueOf(202222400000L), us.parseObject("mXYZ2,022,224")); assertEquals(valueOf(202222400000L), us.parseObject("mBTC2,022,224")); assertEquals(valueOf(202222400000L), us.parseObject("mXBT2,022,224")); assertEquals(valueOf(202222420000L), us.parseObject("₥2,022,224.20")); assertEquals(valueOf(202222400000L), usCoded.parseObject("₥£ 2,022,224")); assertEquals(valueOf(202222400000L), usCoded.parseObject("₥฿ 2,022,224")); assertEquals(valueOf(202222420000L), usCoded.parseObject("₥Ƀ 2,022,224.20")); assertEquals(valueOf(202222400000L), usCoded.parseObject("m£ 2,022,224")); assertEquals(valueOf(202222400000L), usCoded.parseObject("m฿ 2,022,224")); assertEquals(valueOf(202222420000L), usCoded.parseObject("mɃ 2,022,224.20")); assertEquals(valueOf(202222400000L), usCoded.parseObject("₥XYZ 2,022,224")); assertEquals(valueOf(202222400000L), usCoded.parseObject("₥BTC 2,022,224")); assertEquals(valueOf(202222400000L), usCoded.parseObject("₥XBT 2,022,224")); assertEquals(valueOf(202222400000L), usCoded.parseObject("mXYZ 2,022,224")); assertEquals(valueOf(202222400000L), usCoded.parseObject("mBTC 2,022,224")); assertEquals(valueOf(202222400000L), usCoded.parseObject("mXBT 2,022,224")); assertEquals(valueOf(202222420000L), usCoded.parseObject("₥ 2,022,224.20")); // Microcoins assertEquals(valueOf(435), us.parseObject("µ£4.35")); assertEquals(valueOf(435), us.parseObject("µ฿4.35")); assertEquals(valueOf(435), us.parseObject("uɃ4.35")); assertEquals(valueOf(435), us.parseObject("u£4.35")); assertEquals(valueOf(435), us.parseObject("u฿4.35")); assertEquals(valueOf(435), us.parseObject("µɃ4.35")); assertEquals(valueOf(435), us.parseObject("uXYZ4.35")); assertEquals(valueOf(435), us.parseObject("uBTC4.35")); assertEquals(valueOf(435), us.parseObject("uXBT4.35")); assertEquals(valueOf(435), us.parseObject("µXYZ4.35")); assertEquals(valueOf(435), us.parseObject("µBTC4.35")); assertEquals(valueOf(435), us.parseObject("µXBT4.35")); assertEquals(valueOf(435), usCoded.parseObject("uXYZ 4.35")); assertEquals(valueOf(435), usCoded.parseObject("uBTC 4.35")); assertEquals(valueOf(435), usCoded.parseObject("uXBT 4.35")); assertEquals(valueOf(435), usCoded.parseObject("µXYZ 4.35")); assertEquals(valueOf(435), usCoded.parseObject("µBTC 4.35")); assertEquals(valueOf(435), usCoded.parseObject("µXBT 4.35")); // fractional satoshi; round up assertEquals(valueOf(435), us.parseObject("uXYZ4.345")); assertEquals(valueOf(435), us.parseObject("uBTC4.345")); assertEquals(valueOf(435), us.parseObject("uXBT4.345")); // negative with mu symbol assertEquals(valueOf(-1), usCoded.parseObject("µ£ -0.01")); assertEquals(valueOf(-1), usCoded.parseObject("µ฿ -0.01")); assertEquals(valueOf(-10), us.parseObject("(µXYZ0.100)")); assertEquals(valueOf(-10), us.parseObject("(µBTC0.100)")); assertEquals(valueOf(-10), us.parseObject("(µXBT0.100)")); // parse() method as opposed to parseObject try { BtcFormat.getInstance().parse("abc"); fail("bad parse must raise exception"); } catch (ParseException e) {} }
@Override public long branchRegister(String resourceId, String clientId, String xid, String applicationData, String lockKeys) throws TransactionException { return DefaultResourceManager.get().branchRegister(BranchType.SAGA, resourceId, clientId, xid, applicationData, lockKeys); }
@Test public void testBranchRegister() { ResourceManager resourceManager = Mockito.mock(ResourceManager.class); Mockito.doNothing().when(resourceManager).registerResource(any(Resource.class)); DefaultResourceManager.get(); DefaultResourceManager.mockResourceManager(BranchType.SAGA, resourceManager); Assertions.assertDoesNotThrow(() -> sagaTransactionalTemplate.branchRegister("", "", "", "", "")); }
public static Credential from(String username, String password) { return new Credential(username, password); }
@Test public void testCredentialsHash() { Credential credentialA1 = Credential.from("username", "password"); Credential credentialA2 = Credential.from("username", "password"); Credential credentialB1 = Credential.from("", ""); Credential credentialB2 = Credential.from("", ""); Assert.assertEquals(credentialA1, credentialA2); Assert.assertEquals(credentialB1, credentialB2); Assert.assertNotEquals(credentialA1, credentialB1); Assert.assertNotEquals(credentialA1, credentialB2); Set<Credential> credentialSet = new HashSet<>(Arrays.asList(credentialA1, credentialA2, credentialB1, credentialB2)); Assert.assertEquals(new HashSet<>(Arrays.asList(credentialA2, credentialB1)), credentialSet); }
@Override public void connect(String id, InetSocketAddress address, int sendBufferSize, int receiveBufferSize) throws IOException { ensureNotRegistered(id); SocketChannel socketChannel = SocketChannel.open(); SelectionKey key = null; try { configureSocketChannel(socketChannel, sendBufferSize, receiveBufferSize); boolean connected = doConnect(socketChannel, address); key = registerChannel(id, socketChannel, SelectionKey.OP_CONNECT); if (connected) { // OP_CONNECT won't trigger for immediately connected channels log.debug("Immediately connected to node {}", id); immediatelyConnectedKeys.add(key); key.interestOps(0); } } catch (IOException | RuntimeException e) { if (key != null) immediatelyConnectedKeys.remove(key); channels.remove(id); socketChannel.close(); throw e; } }
@Test public void testLargeMessageSequence() throws Exception { int bufferSize = 512 * 1024; String node = "0"; int reqs = 50; InetSocketAddress addr = new InetSocketAddress("localhost", server.port); connect(node, addr); String requestPrefix = TestUtils.randomString(bufferSize); sendAndReceive(node, requestPrefix, 0, reqs); }
@Override public ExecuteContext doAfter(ExecuteContext context) { RegisterContext.INSTANCE.setRegisterWatch(context.getObject()); return context; }
@Test public void doAfter() throws NoSuchMethodException { final ExecuteContext context = interceptor .doAfter(ExecuteContext.forMemberMethod(this, String.class.getDeclaredMethod("trim"), null, null, null)); Assert.assertEquals(context.getObject(), RegisterContext.INSTANCE.getRegisterWatch()); RegisterContext.INSTANCE.setRegisterWatch(null); }
static void parseExpressions(StringBuilder expressions, NameValidator nameInConditionValidator, String exceptionInfo, Set<String> createObjects, List<Statement> list, ClassHelper classHelper, String indentation) { for (Statement statement : list) { // avoid parsing the RHS value expression again as we just did it to get the maximum values in createClazz if (statement.keyword() == Statement.Keyword.ELSE) { if (!Helper.isEmpty(statement.condition())) throw new IllegalArgumentException("condition must be empty but was " + statement.condition()); expressions.append(indentation); if (statement.isBlock()) { expressions.append("else {"); parseExpressions(expressions, nameInConditionValidator, exceptionInfo, createObjects, statement.doBlock(), classHelper, indentation + " "); expressions.append(indentation).append("}\n"); } else { expressions.append("else {").append(statement.operation().build(statement.value())).append("; }\n"); } } else if (statement.keyword() == Statement.Keyword.ELSEIF || statement.keyword() == Statement.Keyword.IF) { ParseResult parseResult = ConditionalExpressionVisitor.parse(statement.condition(), nameInConditionValidator, classHelper); if (!parseResult.ok) throw new IllegalArgumentException(exceptionInfo + " invalid condition \"" + statement.condition() + "\"" + (parseResult.invalidMessage == null ? "" : ": " + parseResult.invalidMessage)); createObjects.addAll(parseResult.guessedVariables); if (statement.keyword() == Statement.Keyword.ELSEIF) expressions.append(indentation).append("else "); expressions.append(indentation); if (statement.isBlock()) { expressions.append("if (").append(parseResult.converted).append(") {\n"); parseExpressions(expressions, nameInConditionValidator, exceptionInfo, createObjects, statement.doBlock(), classHelper, indentation + " "); expressions.append(indentation).append("}\n"); } else { expressions.append("if (").append(parseResult.converted).append(") {"). append(statement.operation().build(statement.value())).append(";}\n"); } } else { throw new IllegalArgumentException("The statement must be either 'if', 'else_if' or 'else'"); } } }
@Test public void parseConditionWithError() { NameValidator validVariable = s -> encodingManager.hasEncodedValue(s); // existing encoded value but not added IllegalArgumentException ret = assertThrows(IllegalArgumentException.class, () -> parseExpressions(new StringBuilder(), validVariable, "[HERE]", new HashSet<>(), Arrays.asList(If("max_weight > 10", MULTIPLY, "0")), s -> "", "") ); assertTrue(ret.getMessage().startsWith("[HERE] invalid condition \"max_weight > 10\": 'max_weight' not available"), ret.getMessage()); // invalid variable or constant (NameValidator returns false) ret = assertThrows(IllegalArgumentException.class, () -> parseExpressions(new StringBuilder(), validVariable, "[HERE]", new HashSet<>(), Arrays.asList(If("country == GERMANY", MULTIPLY, "0")), s -> "", "")); assertTrue(ret.getMessage().startsWith("[HERE] invalid condition \"country == GERMANY\": 'GERMANY' not available"), ret.getMessage()); // not whitelisted method ret = assertThrows(IllegalArgumentException.class, () -> parseExpressions(new StringBuilder(), validVariable, "[HERE]", new HashSet<>(), Arrays.asList(If("edge.fetchWayGeometry().size() > 2", MULTIPLY, "0")), s -> "", "")); assertTrue(ret.getMessage().startsWith("[HERE] invalid condition \"edge.fetchWayGeometry().size() > 2\": size is an illegal method"), ret.getMessage()); }
@Override public long getDelay() { return config.getLong(DELAY_IN_MILISECONDS_PROPERTY).orElse(10_000L); }
@Test public void getDelay_returnNumberFromConfig() { config.put("sonar.server.monitoring.other.initial.delay", "100000"); long delay = underTest.getDelay(); assertThat(delay).isEqualTo(100_000L); }
public static long currentTimeMillis() { return CLOCK.currentTimeMillis(); }
@Test public void testSystemOffsetClock() { Clock.SystemOffsetClock clock = new Clock.SystemOffsetClock(-999999999); long systemMillis = System.currentTimeMillis(); sleepSeconds(1); long offsetMillis = clock.currentTimeMillis(); assertTrue(format("SystemOffsetClock should be far behind the normal clock! %d < %d", offsetMillis, systemMillis), offsetMillis < systemMillis); }
@Override public Artifact uploadArtifact(String artifactName, String localPath) throws IOException { return uploadArtifact(artifactName, Paths.get(localPath)); }
@Test public void testUploadArtifactInvalidLocalPath() { when(client.create(any(BlobInfo.class), any())).thenReturn(blob); assertThrows( IOException.class, () -> gcsClient.uploadArtifact(ARTIFACT_NAME, "/" + UUID.randomUUID())); }
@Override public AvailabilityWithBacklog getAvailabilityAndBacklog(boolean isCreditAvailable) { if (findCurrentNettyPayloadQueue()) { NettyPayloadManager currentQueue = nettyPayloadManagers.get(managerIndexContainsCurrentSegment); boolean availability = isCreditAvailable; if (!isCreditAvailable && isEventOrError(currentQueue)) { availability = true; } return new AvailabilityWithBacklog(availability, getBacklog()); } return new AvailabilityWithBacklog(false, 0); }
@Test void testGetAvailabilityAndBacklog() { ResultSubpartitionView.AvailabilityWithBacklog availabilityAndBacklog1 = tieredStorageResultSubpartitionView.getAvailabilityAndBacklog(false); assertThat(availabilityAndBacklog1.getBacklog()).isEqualTo(1); assertThat(availabilityAndBacklog1.isAvailable()).isEqualTo(false); ResultSubpartitionView.AvailabilityWithBacklog availabilityAndBacklog2 = tieredStorageResultSubpartitionView.getAvailabilityAndBacklog(true); assertThat(availabilityAndBacklog2.getBacklog()).isEqualTo(1); assertThat(availabilityAndBacklog2.isAvailable()).isEqualTo(true); }
@Override public T clear() { return thisT(); }
@Test public void testClear() { assertSame(HEADERS, HEADERS.clear()); }
public ConfigCheckResult checkConfig() { Optional<Long> appId = getAppId(); if (appId.isEmpty()) { return failedApplicationStatus(INVALID_APP_ID_STATUS); } GithubAppConfiguration githubAppConfiguration = new GithubAppConfiguration(appId.get(), gitHubSettings.privateKey(), gitHubSettings.apiURLOrDefault()); return checkConfig(githubAppConfiguration); }
@Test public void checkConfig_whenAllPermissionsAreCorrect_shouldReturnSuccessfulCheck() { mockGithubConfiguration(); ArgumentCaptor<GithubAppConfiguration> appConfigurationCaptor = ArgumentCaptor.forClass(GithubAppConfiguration.class); mockGithubAppWithValidConfig(appConfigurationCaptor); mockOrganizations(appConfigurationCaptor, "org1", "org2"); ConfigCheckResult checkResult = configValidator.checkConfig(); assertSuccessfulAppConfig(checkResult); assertThat(checkResult.installations()) .extracting(InstallationStatus::organization, InstallationStatus::autoProvisioning) .containsExactlyInAnyOrder( tuple("org1", SUCCESS_CHECK), tuple("org2", ConfigStatus.failed(MISSING_ALL_AUTOPROVISIONNING_PERMISSIONS))); verifyAppConfiguration(appConfigurationCaptor.getValue()); }
static Map<String, JavaClassSignature> collectPublicAbiSignatures(Package pkg, String publicApiAnnotation) throws IOException { Map<String, JavaClassSignature> signatures = new LinkedHashMap<>(); if (isPublicAbiPackage(pkg, publicApiAnnotation)) { PublicSignatureCollector collector = new PublicSignatureCollector(); List<ClassFileTree.ClassFile> sortedClassFiles = pkg.getClassFiles().stream() .sorted(Comparator.comparing(ClassFile::getName)).toList(); for (ClassFile klazz : sortedClassFiles) { try (InputStream is = klazz.getInputStream()) { new ClassReader(is).accept(collector, 0); } } signatures.putAll(collector.getClassSignatures()); } List<ClassFileTree.Package> sortedSubPackages = pkg.getSubPackages().stream() .sorted(Comparator.comparing(Package::getFullyQualifiedName)) .toList(); for (ClassFileTree.Package subPkg : sortedSubPackages) { signatures.putAll(collectPublicAbiSignatures(subPkg, publicApiAnnotation)); } return signatures; }
@Test public void testCollectPublicAbiSignatures() throws IOException { ClassFileTree.Package rootPkg = buildClassFileTree(); Map<String, JavaClassSignature> signatures = AbiCheck .collectPublicAbiSignatures(rootPkg, Public.class.getCanonicalName()); assertThat(signatures.size(), equalTo(1)); JavaClassSignature rootSignature = signatures.get("root.Root"); // PublicSignatureCollectorTest verifies actual signatures, no need to duplicate here }
public static String formatSql(final AstNode root) { final StringBuilder builder = new StringBuilder(); new Formatter(builder).process(root, 0); return StringUtils.stripEnd(builder.toString(), "\n"); }
@Test public void shouldFormatResumeAllQueries() { // Given: final ResumeQuery query = ResumeQuery.all(Optional.empty()); // When: final String formatted = SqlFormatter.formatSql(query); // Then: assertThat(formatted, is("RESUME ALL")); }
@Override public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay readerWay, IntsRef relationFlags) { RoadAccess accessValue = YES; List<Map<String, Object>> nodeTags = readerWay.getTag("node_tags", Collections.emptyList()); // a barrier edge has the restriction in both nodes and the tags are the same if (readerWay.hasTag("gh:barrier_edge")) for (String restriction : restrictions) { Object value = nodeTags.get(0).get(restriction); if (value != null) accessValue = getRoadAccess((String) value, accessValue); } for (String restriction : restrictions) { accessValue = getRoadAccess(readerWay.getTag(restriction), accessValue); } CountryRule countryRule = readerWay.getTag("country_rule", null); if (countryRule != null) accessValue = countryRule.getAccess(readerWay, TransportationMode.CAR, accessValue); roadAccessEnc.setEnum(false, edgeId, edgeIntAccess, accessValue); }
@Test public void testPermit() { ArrayEdgeIntAccess edgeIntAccess = new ArrayEdgeIntAccess(1); int edgeId = 0; ReaderWay way = new ReaderWay(27L); way.setTag("motor_vehicle", "permit"); parser.handleWayTags(edgeId, edgeIntAccess, way, new IntsRef(1)); assertEquals(RoadAccess.PRIVATE, roadAccessEnc.getEnum(false, edgeId, edgeIntAccess)); }
@GET @Path("{id}") @Produces(MediaType.APPLICATION_JSON) public Response getDeviceKey(@PathParam("id") String id) { DeviceKey deviceKey = nullIsNotFound(get(DeviceKeyService.class).getDeviceKey(DeviceKeyId.deviceKeyId(id)), DEVICE_KEY_NOT_FOUND); return ok(codec(DeviceKey.class).encode(deviceKey, this)).build(); }
@Test public void testDelete() { expect(mockDeviceKeyService.getDeviceKey(DeviceKeyId.deviceKeyId(deviceKeyId2))) .andReturn(deviceKey2) .anyTimes(); mockDeviceKeyAdminService.removeKey(anyObject()); expectLastCall(); replay(mockDeviceKeyService); replay(mockDeviceKeyAdminService); WebTarget wt = target(); Response response = wt.path("keys/" + deviceKeyId2) .request(MediaType.APPLICATION_JSON_TYPE) .delete(); assertThat(response.getStatus(), is(HttpURLConnection.HTTP_OK)); verify(mockDeviceKeyService); verify(mockDeviceKeyAdminService); }
public static Blob bytes2Blob(byte[] bytes) { if (bytes == null) { return null; } try { return new SerialBlob(bytes); } catch (Exception e) { throw new ShouldNeverHappenException(e); } }
@Test public void testBytes2Blob() throws UnsupportedEncodingException, SQLException { assertNull(BlobUtils.bytes2Blob(null)); byte[] bs = "xxaaadd".getBytes(Constants.DEFAULT_CHARSET_NAME); assertThat(BlobUtils.bytes2Blob(bs)).isEqualTo( new SerialBlob(bs)); }
public static DataMap bytesToDataMap(Map<String, String> headers, ByteString bytes) throws MimeTypeParseException, IOException { return getContentType(headers).getCodec().readMap(bytes); }
@Test public void testJSONByteStringToDataMapWithUnsupportedContentType() throws MimeTypeParseException, IOException { // unsupport content type should fallback to JSON DataMap expectedDataMap = createTestDataMap(); ByteString byteString = ByteString.copy(JACKSON_DATA_CODEC.mapToBytes(expectedDataMap)); Map<String, String> headers = Collections.singletonMap(RestConstants.HEADER_CONTENT_TYPE, "mysuperkool/xson"); DataMap dataMap = DataMapConverter.bytesToDataMap(headers, byteString); Assert.assertEquals(dataMap, expectedDataMap); }
@Override public String authenticate(AuthenticationDataSource authData) throws AuthenticationException { String token; try { // Get Token token = getToken(authData); } catch (AuthenticationException exception) { incrementFailureMetric(ErrorCode.INVALID_AUTH_DATA); throw exception; } // Parse Token by validating String role = getPrincipal(authenticateToken(token)); AuthenticationMetrics.authenticateSuccess(getClass().getSimpleName(), getAuthMethodName()); return role; }
@Test(expectedExceptions = AuthenticationException.class) public void testAuthenticateWhenJwtIsBlank() throws AuthenticationException { AuthenticationProviderToken provider = new AuthenticationProviderToken(); provider.authenticate(new AuthenticationDataSource() { @Override public String getHttpHeader(String name) { return AuthenticationProviderToken.HTTP_HEADER_VALUE_PREFIX + " "; } @Override public boolean hasDataFromHttp() { return true; } }); }
private Deadline(long timeoutMs) { this.timeoutNanos = timeoutMs * 1000000L; }
@Test public void testDeadline() throws Exception { isFailed = false; errorMsg = ""; Thread threadTimeout = new Thread(createRunnable()); threadTimeout.setDaemon(true); threadTimeout.start(); threadTimeout.join(60000); if (isFailed) { Assert.fail(errorMsg); } }
public final void setEncodingOptions(DicomEncodingOptions encOpts) { if (encOpts == null) throw new NullPointerException(); this.encOpts = encOpts; }
@Test public void testWriteDatasetWithUndefEmptyLength() throws IOException { DicomOutputStream out = new DicomOutputStream(file); out.setEncodingOptions( new DicomEncodingOptions(false, true, true, true, true)); testWriteDataset(out, UID.ExplicitVRLittleEndian); }
public FileSystem get(Key key) { synchronized (mLock) { Value value = mCacheMap.get(key); FileSystem fs; if (value == null) { // On cache miss, create and insert a new FileSystem instance, fs = FileSystem.Factory.create(FileSystemContext.create(key.mSubject, key.mConf)); mCacheMap.put(key, new Value(fs, 1)); } else { fs = value.mFileSystem; value.mRefCount.getAndIncrement(); } return new InstanceCachingFileSystem(fs, key); } }
@Test public void getThenClose() throws IOException { Key key1 = createTestFSKey("user1"); FileSystem fs1 = mFileSystemCache.get(key1); fs1.close(); FileSystem fs2 = mFileSystemCache.get(key1); assertNotSame(getDelegatedFileSystem(fs1), getDelegatedFileSystem(fs2)); assertTrue(fs1.isClosed()); assertFalse(fs2.isClosed()); }
@GetMapping @TpsControl(pointName = "NamingInstanceQuery", name = "HttpNamingInstanceQuery") @Secured(action = ActionTypes.READ) public Result<InstanceDetailInfoVo> detail( @RequestParam(value = "namespaceId", defaultValue = Constants.DEFAULT_NAMESPACE_ID) String namespaceId, @RequestParam(value = "groupName", defaultValue = Constants.DEFAULT_GROUP) String groupName, @RequestParam("serviceName") String serviceName, @RequestParam(value = "clusterName", defaultValue = UtilsAndCommons.DEFAULT_CLUSTER_NAME) String clusterName, @RequestParam("ip") String ip, @RequestParam("port") Integer port) throws NacosException { String compositeServiceName = NamingUtils.getGroupedName(serviceName, groupName); Instance instance = instanceServiceV2.getInstance(namespaceId, compositeServiceName, clusterName, ip, port); InstanceDetailInfoVo instanceDetailInfoVo = new InstanceDetailInfoVo(); instanceDetailInfoVo.setServiceName(compositeServiceName); instanceDetailInfoVo.setIp(ip); instanceDetailInfoVo.setPort(port); instanceDetailInfoVo.setClusterName(clusterName); instanceDetailInfoVo.setWeight(instance.getWeight()); instanceDetailInfoVo.setHealthy(instance.isHealthy()); instanceDetailInfoVo.setInstanceId(instance.getInstanceId()); instanceDetailInfoVo.setMetadata(instance.getMetadata()); return Result.success(instanceDetailInfoVo); }
@Test void detail() throws Exception { Instance instance = new Instance(); instance.setInstanceId("test-id"); when(instanceServiceV2.getInstance(TEST_NAMESPACE, TEST_SERVICE_NAME, TEST_CLUSTER_NAME, TEST_IP, 9999)).thenReturn(instance); Result<InstanceDetailInfoVo> result = instanceControllerV2.detail(TEST_NAMESPACE, "DEFAULT_GROUP", "test-service", TEST_CLUSTER_NAME, TEST_IP, 9999); verify(instanceServiceV2).getInstance(TEST_NAMESPACE, TEST_SERVICE_NAME, TEST_CLUSTER_NAME, TEST_IP, 9999); assertEquals(ErrorCode.SUCCESS.getCode(), result.getCode()); assertEquals(instance.getInstanceId(), result.getData().getInstanceId()); }
public static boolean equal(Number lhs, Number rhs) { Class lhsClass = lhs.getClass(); Class rhsClass = rhs.getClass(); assert lhsClass != rhsClass; if (isDoubleRepresentable(lhsClass)) { if (isDoubleRepresentable(rhsClass)) { return equalDoubles(lhs.doubleValue(), rhs.doubleValue()); } else if (isLongRepresentable(rhsClass)) { return equalLongAndDouble(rhs.longValue(), lhs.doubleValue()); } } else if (isLongRepresentable(lhsClass)) { if (isDoubleRepresentable(rhsClass)) { return equalLongAndDouble(lhs.longValue(), rhs.doubleValue()); } else if (isLongRepresentable(rhsClass)) { return lhs.longValue() == rhs.longValue(); } } return lhs.equals(rhs); }
@SuppressWarnings("ConstantConditions") @Test(expected = Throwable.class) public void testNullLhsInEqualThrows() { equal(null, 1); }
public static void validateGroupInstanceId(String id) { Topic.validate(id, "Group instance id", message -> { throw new InvalidConfigurationException(message); }); }
@Test public void shouldAcceptValidGroupInstanceIds() { String maxLengthString = TestUtils.randomString(249); String[] validGroupInstanceIds = {"valid", "INSTANCE", "gRoUp", "ar6", "VaL1d", "_0-9_.", "...", maxLengthString}; for (String instanceId : validGroupInstanceIds) { JoinGroupRequest.validateGroupInstanceId(instanceId); } }
@Override public void putAll(Map<K, V> map) { this.map.putAll(map); }
@Test public void testPutAll() { Map<Integer, String> expectedResult = new HashMap<>(); expectedResult.put(23, "value-23"); expectedResult.put(42, "value-42"); adapter.putAll(expectedResult); assertEquals(expectedResult.size(), map.size()); for (Integer key : expectedResult.keySet()) { assertTrue(map.containsKey(key)); } }
@Override public RedisClusterNode clusterGetNodeForKey(byte[] key) { int slot = executorService.getConnectionManager().calcSlot(key); return clusterGetNodeForSlot(slot); }
@Test public void testClusterGetNodeForKey() { testInCluster(connection -> { RedisClusterNode node = connection.clusterGetNodeForKey("123".getBytes()); assertThat(node).isNotNull(); }); }
@Override public ConnectorTableMetadata getTableMetadata(ConnectorSession session, ConnectorTableHandle table) { ExampleTableHandle exampleTableHandle = (ExampleTableHandle) table; checkArgument(exampleTableHandle.getConnectorId().equals(connectorId), "tableHandle is not for this connector"); SchemaTableName tableName = new SchemaTableName(exampleTableHandle.getSchemaName(), exampleTableHandle.getTableName()); return getTableMetadata(tableName); }
@Test public void getTableMetadata() { // known table ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(SESSION, NUMBERS_TABLE_HANDLE); assertEquals(tableMetadata.getTable(), new SchemaTableName("example", "numbers")); assertEquals(tableMetadata.getColumns(), ImmutableList.of( new ColumnMetadata("text", createUnboundedVarcharType()), new ColumnMetadata("value", BIGINT))); // unknown tables should produce null assertNull(metadata.getTableMetadata(SESSION, new ExampleTableHandle(CONNECTOR_ID, "unknown", "unknown"))); assertNull(metadata.getTableMetadata(SESSION, new ExampleTableHandle(CONNECTOR_ID, "example", "unknown"))); assertNull(metadata.getTableMetadata(SESSION, new ExampleTableHandle(CONNECTOR_ID, "unknown", "numbers"))); }
public boolean isAllBindingTables(final Collection<String> logicTableNames) { if (logicTableNames.isEmpty()) { return false; } Optional<BindingTableRule> bindingTableRule = findBindingTableRule(logicTableNames); if (!bindingTableRule.isPresent()) { return false; } Collection<String> result = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); result.addAll(bindingTableRule.get().getAllLogicTables()); return !result.isEmpty() && result.containsAll(logicTableNames); }
@Test void assertIsAllBindingTable() { assertTrue(createMaximumShardingRule().isAllBindingTables(Collections.singleton("logic_Table"))); assertTrue(createMaximumShardingRule().isAllBindingTables(Collections.singleton("logic_table"))); assertTrue(createMaximumShardingRule().isAllBindingTables(Collections.singleton("sub_Logic_Table"))); assertTrue(createMaximumShardingRule().isAllBindingTables(Collections.singleton("sub_logic_table"))); assertTrue(createMaximumShardingRule().isAllBindingTables(Arrays.asList("logic_Table", "sub_Logic_Table"))); assertTrue(createMaximumShardingRule().isAllBindingTables(Arrays.asList("logic_table", "sub_logic_Table"))); assertFalse(createMaximumShardingRule().isAllBindingTables(Arrays.asList("logic_table", "sub_logic_Table", "new_table"))); assertFalse(createMaximumShardingRule().isAllBindingTables(Collections.emptyList())); assertFalse(createMaximumShardingRule().isAllBindingTables(Collections.singleton("new_Table"))); }
@VisibleForTesting static Path resolveEntropy(Path path, EntropyInjectingFileSystem efs, boolean injectEntropy) throws IOException { final String entropyInjectionKey = efs.getEntropyInjectionKey(); if (entropyInjectionKey == null) { return path; } else { final URI originalUri = path.toUri(); final String checkpointPath = originalUri.getPath(); final int indexOfKey = checkpointPath.indexOf(entropyInjectionKey); if (indexOfKey == -1) { return path; } else { final StringBuilder buffer = new StringBuilder(checkpointPath.length()); buffer.append(checkpointPath, 0, indexOfKey); if (injectEntropy) { buffer.append(efs.generateEntropy()); } buffer.append( checkpointPath, indexOfKey + entropyInjectionKey.length(), checkpointPath.length()); final String rewrittenPath = buffer.toString(); try { return new Path( new URI( originalUri.getScheme(), originalUri.getAuthority(), rewrittenPath, originalUri.getQuery(), originalUri.getFragment()) .normalize()); } catch (URISyntaxException e) { // this could only happen if the injected entropy string contains invalid // characters throw new IOException( "URI format error while processing path for entropy injection", e); } } } }
@Test void testEntropyNotFullSegment() throws Exception { EntropyInjectingFileSystem efs = new TestEntropyInjectingFs("_entropy_key_", "pqr"); Path path = new Path("s3://myhost:122/entropy-_entropy_key_-suffix/file"); assertThat(EntropyInjector.resolveEntropy(path, efs, true)) .isEqualTo(new Path("s3://myhost:122/entropy-pqr-suffix/file")); assertThat(EntropyInjector.resolveEntropy(path, efs, false)) .isEqualTo(new Path("s3://myhost:122/entropy--suffix/file")); }
@Override @SuppressWarnings("rawtypes") public void report(SortedMap<String, Gauge> gauges, SortedMap<String, Counter> counters, SortedMap<String, Histogram> histograms, SortedMap<String, Meter> meters, SortedMap<String, Timer> timers) { if (loggerProxy.isEnabled(marker)) { StringBuilder b = new StringBuilder(); for (Entry<String, Gauge> entry : gauges.entrySet()) { logGauge(b, entry.getKey(), entry.getValue()); } for (Entry<String, Counter> entry : counters.entrySet()) { logCounter(b, entry.getKey(), entry.getValue()); } for (Entry<String, Histogram> entry : histograms.entrySet()) { logHistogram(b, entry.getKey(), entry.getValue()); } for (Entry<String, Meter> entry : meters.entrySet()) { logMeter(b, entry.getKey(), entry.getValue()); } for (Entry<String, Timer> entry : timers.entrySet()) { logTimer(b, entry.getKey(), entry.getValue()); } } }
@Test public void reportsHistogramValuesDefault() { final Histogram histogram = histogram(); when(logger.isInfoEnabled(marker)).thenReturn(true); infoReporter().report(map(), map(), map("test.histogram", histogram), map(), map()); verify(logger).info(marker, "type=HISTOGRAM, name=prefix.test.histogram, count=1, min=4, max=2, mean=3.0, " + "stddev=5.0, p50=6.0, p75=7.0, p95=8.0, p98=9.0, p99=10.0, p999=11.0"); }
@InterfaceAudience.Public @InterfaceStability.Evolving public static UserGroupInformation createRemoteUser(String user) { return createRemoteUser(user, AuthMethod.SIMPLE); }
@Test (timeout = 30000) public void testCreateRemoteUser() { UserGroupInformation ugi = UserGroupInformation.createRemoteUser("user1"); assertEquals(AuthenticationMethod.SIMPLE, ugi.getAuthenticationMethod()); assertTrue (ugi.toString().contains("(auth:SIMPLE)")); ugi = UserGroupInformation.createRemoteUser("user1", AuthMethod.KERBEROS); assertEquals(AuthenticationMethod.KERBEROS, ugi.getAuthenticationMethod()); assertTrue (ugi.toString().contains("(auth:KERBEROS)")); }
public static ReadChangeStream readChangeStream() { return ReadChangeStream.create(); }
@Test public void testReadChangeStreamPassWithoutValidationDuringApply() { BigtableIO.ReadChangeStream readChangeStream = BigtableIO.readChangeStream() .withProjectId("project") .withInstanceId("instance") .withTableId("table") .withoutValidation(); // No RunTime exception as seen in previous test with validation. Only error that the pipeline // is not ran. thrown.expect(PipelineRunMissingException.class); p.apply(readChangeStream); }
List<BlockInfo> parseLine(String line) throws IOException { if (currentState == State.DEFAULT) { if (line.contains("<INodeSection>")) { transitionTo(State.INODE_SECTION); } else { return Collections.emptyList(); } } if (line.contains("<inode>")) { transitionTo(State.INODE); } if (line.contains("<type>FILE</type>")) { transitionTo(State.FILE); } List<String> replicationStrings = valuesFromXMLString(line, "replication"); if (!replicationStrings.isEmpty()) { if (replicationStrings.size() > 1) { throw new IOException(String.format("Found %s replication strings", replicationStrings.size())); } transitionTo(State.FILE_WITH_REPLICATION); currentReplication = Short.parseShort(replicationStrings.get(0)); } Matcher blockMatcher = BLOCK_PATTERN.matcher(line); List<BlockInfo> blockInfos = new ArrayList<>(); while (blockMatcher.find()) { if (currentState != State.FILE_WITH_REPLICATION) { throw new IOException( "Found a block string when in state: " + currentState); } long id = Long.parseLong(blockMatcher.group(1)); long gs = Long.parseLong(blockMatcher.group(2)); long size = Long.parseLong(blockMatcher.group(3)); blockInfos.add(new BlockInfo(id, gs, size, currentReplication)); } if (line.contains("</inode>")) { transitionTo(State.INODE_SECTION); } if (line.contains("</INodeSection>")) { transitionTo(State.DEFAULT); } return blockInfos; }
@Test public void testBlocksFromLine() throws Exception { String[] lines = { "<INodeSection><lastInodeId>1" + "</lastInodeId><inode><id>2</id><type>FILE</type>" + "<name>fake-file</name>" + "<replication>3</replication><mtime>3</mtime>" + "<atime>4</atime>" + "<perferredBlockSize>5</perferredBlockSize>" + "<permission>hdfs:hdfs:rw-------</permission>" + "<blocks><block><id>6</id><genstamp>7</genstamp>" + "<numBytes>8</numBytes></block>" + "<block><id>9</id><genstamp>10</genstamp>" + "<numBytes>11</numBytes></block></inode>", "<inode><type>DIRECTORY</type></inode>", "<inode><type>FILE</type>", "<replication>12</replication>", "<blocks><block><id>13</id><genstamp>14</genstamp>" + "<numBytes>15</numBytes></block>", "</inode>", "</INodeSection>" }; short replCount = 0; // This is ignored Map<BlockInfo, Short> expectedBlockCount = new HashMap<>(); expectedBlockCount.put(new BlockInfo(6, 7, 8, replCount), (short) 3); expectedBlockCount.put(new BlockInfo(9, 10, 11, replCount), (short) 3); expectedBlockCount.put(new BlockInfo(13, 14, 15, replCount), (short) 12); final Map<BlockInfo, Short> actualBlockCount = new HashMap<>(); XMLParser parser = new XMLParser(); for (String line : lines) { for (BlockInfo info : parser.parseLine(line)) { actualBlockCount.put(info, info.getReplication()); } } for (Map.Entry<BlockInfo, Short> expect : expectedBlockCount.entrySet()) { assertEquals(expect.getValue(), actualBlockCount.get(expect.getKey())); } }
public static String trim(String str) { return str == null ? null : str.trim(); }
@Test void testTrim() { assertEquals("left blank", StringUtils.trim(" left blank")); assertEquals("right blank", StringUtils.trim("right blank ")); assertEquals("bi-side blank", StringUtils.trim(" bi-side blank ")); }
@Override public int run(String launcherVersion, String launcherMd5, ServerUrlGenerator urlGenerator, Map<String, String> env, Map<String, String> context) { int exitValue = 0; LOG.info("Agent launcher is version: {}", CurrentGoCDVersion.getInstance().fullVersion()); String[] command = new String[]{}; try { AgentBootstrapperArgs bootstrapperArgs = AgentBootstrapperArgs.fromProperties(context); ServerBinaryDownloader agentDownloader = new ServerBinaryDownloader(urlGenerator, bootstrapperArgs); agentDownloader.downloadIfNecessary(DownloadableFile.AGENT); ServerBinaryDownloader pluginZipDownloader = new ServerBinaryDownloader(urlGenerator, bootstrapperArgs); pluginZipDownloader.downloadIfNecessary(DownloadableFile.AGENT_PLUGINS); ServerBinaryDownloader tfsImplDownloader = new ServerBinaryDownloader(urlGenerator, bootstrapperArgs); tfsImplDownloader.downloadIfNecessary(DownloadableFile.TFS_IMPL); command = agentInvocationCommand(agentDownloader.getMd5(), launcherMd5, pluginZipDownloader.getMd5(), tfsImplDownloader.getMd5(), env, context, agentDownloader.getExtraProperties()); LOG.info("Launching Agent with command: {}", join(command, " ")); Process agent = invoke(command); // The next lines prevent the child process from blocking on Windows AgentOutputAppender agentOutputAppenderForStdErr = new AgentOutputAppender(GO_AGENT_STDERR_LOG); AgentOutputAppender agentOutputAppenderForStdOut = new AgentOutputAppender(GO_AGENT_STDOUT_LOG); if (new SystemEnvironment().consoleOutToStdout()) { agentOutputAppenderForStdErr.writeTo(AgentOutputAppender.Outstream.STDERR); agentOutputAppenderForStdOut.writeTo(AgentOutputAppender.Outstream.STDOUT); } agent.getOutputStream().close(); AgentConsoleLogThread stdErrThd = new AgentConsoleLogThread(agent.getErrorStream(), agentOutputAppenderForStdErr); stdErrThd.start(); AgentConsoleLogThread stdOutThd = new AgentConsoleLogThread(agent.getInputStream(), agentOutputAppenderForStdOut); stdOutThd.start(); Shutdown shutdownHook = new Shutdown(agent); Runtime.getRuntime().addShutdownHook(shutdownHook); try { exitValue = agent.waitFor(); } catch (InterruptedException ie) { LOG.error("Agent was interrupted. Terminating agent and respawning. {}", ie.toString()); agent.destroy(); } finally { removeShutdownHook(shutdownHook); stdErrThd.stopAndJoin(); stdOutThd.stopAndJoin(); } } catch (Exception e) { LOG.error("Exception while executing command: {} - {}", join(command, " "), e.toString()); exitValue = EXCEPTION_OCCURRED; } return exitValue; }
@Test public void shouldStartSubprocessWithCommandLine() throws InterruptedException { final List<String> cmd = new ArrayList<>(); String expectedAgentMd5 = TEST_AGENT.getMd5(); String expectedAgentPluginsMd5 = TEST_AGENT_PLUGINS.getMd5(); String expectedTfsMd5 = TEST_TFS_IMPL.getMd5(); AgentProcessParentImpl bootstrapper = createBootstrapper(cmd); int returnCode = bootstrapper.run("launcher_version", "bar", getURLGenerator(), new HashMap<>(), context()); assertThat(returnCode, is(42)); assertThat(cmd.toArray(new String[]{}), equalTo(new String[]{ (getProperty("java.home") + FileSystems.getDefault().getSeparator() + "bin" + FileSystems.getDefault().getSeparator() + "java"), "-Dagent.plugins.md5=" + expectedAgentPluginsMd5, "-Dagent.binary.md5=" + expectedAgentMd5, "-Dagent.launcher.md5=bar", "-Dagent.tfs.md5=" + expectedTfsMd5, "-Dagent.bootstrapper.version=UNKNOWN", "-jar", "agent.jar", "-serverUrl", "http://localhost:" + server.getPort() + "/go/", "-sslVerificationMode", "NONE", "-rootCertFile", new File("/path/to/cert.pem").getAbsolutePath() })); }
public void removeSubscriberForRedo(String serviceName, String groupName, String cluster) { String key = ServiceInfo.getKey(NamingUtils.getGroupedName(serviceName, groupName), cluster); synchronized (subscribes) { SubscriberRedoData redoData = subscribes.get(key); if (null != redoData && !redoData.isExpectedRegistered()) { subscribes.remove(key); } } }
@Test void testRemoveSubscriberForRedo() { ConcurrentMap<String, SubscriberRedoData> subscribes = getSubscriberRedoDataMap(); assertTrue(subscribes.isEmpty()); redoService.cacheSubscriberForRedo(SERVICE, GROUP, CLUSTER); assertFalse(subscribes.isEmpty()); redoService.subscriberDeregister(SERVICE, GROUP, CLUSTER); redoService.removeSubscriberForRedo(SERVICE, GROUP, CLUSTER); assertTrue(subscribes.isEmpty()); }
public static AvroGenericCoder of(Schema schema) { return AvroGenericCoder.of(schema); }
@Test public void testDeterminismCyclicClass() { assertNonDeterministic( AvroCoder.of(Cyclic.class), reasonField(Cyclic.class, "cyclicField", "appears recursively")); assertNonDeterministic( AvroCoder.of(CyclicField.class), reasonField(Cyclic.class, "cyclicField", Cyclic.class.getName() + " appears recursively")); assertNonDeterministic( AvroCoder.of(IndirectCycle1.class), reasonField( IndirectCycle2.class, "field2", IndirectCycle1.class.getName() + " appears recursively")); }
public void cleanPluginData() { PLUGIN_MAP.clear(); }
@Test public void testCleanPluginData() throws NoSuchFieldException, IllegalAccessException { PluginData firstCachedPluginData = PluginData.builder().name(mockName1).build(); PluginData secondCachedPluginData = PluginData.builder().name(mockName2).build(); ConcurrentHashMap<String, PluginData> pluginMap = getFieldByName(pluginMapStr); pluginMap.put(mockName1, firstCachedPluginData); pluginMap.put(mockName2, secondCachedPluginData); assertNotNull(pluginMap.get(mockName1)); assertNotNull(pluginMap.get(mockName2)); BaseDataCache.getInstance().cleanPluginData(); assertNull(pluginMap.get(mockName1)); assertNull(pluginMap.get(mockName2)); }
@Subscribe public void onChatMessage(ChatMessage event) { if (event.getType() == ChatMessageType.GAMEMESSAGE || event.getType() == ChatMessageType.SPAM) { String message = Text.removeTags(event.getMessage()); Matcher dodgyCheckMatcher = DODGY_CHECK_PATTERN.matcher(message); Matcher dodgyProtectMatcher = DODGY_PROTECT_PATTERN.matcher(message); Matcher dodgyBreakMatcher = DODGY_BREAK_PATTERN.matcher(message); Matcher bindingNecklaceCheckMatcher = BINDING_CHECK_PATTERN.matcher(message); Matcher bindingNecklaceUsedMatcher = BINDING_USED_PATTERN.matcher(message); Matcher ringOfForgingCheckMatcher = RING_OF_FORGING_CHECK_PATTERN.matcher(message); Matcher amuletOfChemistryCheckMatcher = AMULET_OF_CHEMISTRY_CHECK_PATTERN.matcher(message); Matcher amuletOfChemistryUsedMatcher = AMULET_OF_CHEMISTRY_USED_PATTERN.matcher(message); Matcher amuletOfChemistryBreakMatcher = AMULET_OF_CHEMISTRY_BREAK_PATTERN.matcher(message); Matcher amuletOfBountyCheckMatcher = AMULET_OF_BOUNTY_CHECK_PATTERN.matcher(message); Matcher amuletOfBountyUsedMatcher = AMULET_OF_BOUNTY_USED_PATTERN.matcher(message); Matcher chronicleAddMatcher = CHRONICLE_ADD_PATTERN.matcher(message); Matcher chronicleUseAndCheckMatcher = CHRONICLE_USE_AND_CHECK_PATTERN.matcher(message); Matcher slaughterActivateMatcher = BRACELET_OF_SLAUGHTER_ACTIVATE_PATTERN.matcher(message); Matcher slaughterCheckMatcher = BRACELET_OF_SLAUGHTER_CHECK_PATTERN.matcher(message); Matcher expeditiousActivateMatcher = EXPEDITIOUS_BRACELET_ACTIVATE_PATTERN.matcher(message); Matcher expeditiousCheckMatcher = EXPEDITIOUS_BRACELET_CHECK_PATTERN.matcher(message); Matcher bloodEssenceCheckMatcher = BLOOD_ESSENCE_CHECK_PATTERN.matcher(message); Matcher bloodEssenceExtractMatcher = BLOOD_ESSENCE_EXTRACT_PATTERN.matcher(message); Matcher braceletOfClayCheckMatcher = BRACELET_OF_CLAY_CHECK_PATTERN.matcher(message); if (message.contains(RING_OF_RECOIL_BREAK_MESSAGE)) { notifier.notify(config.recoilNotification(), "Your Ring of Recoil has shattered"); } else if (dodgyBreakMatcher.find()) { notifier.notify(config.dodgyNotification(), "Your dodgy necklace has crumbled to dust."); updateDodgyNecklaceCharges(MAX_DODGY_CHARGES); } else if (dodgyCheckMatcher.find()) { updateDodgyNecklaceCharges(Integer.parseInt(dodgyCheckMatcher.group(1))); } else if (dodgyProtectMatcher.find()) { updateDodgyNecklaceCharges(Integer.parseInt(dodgyProtectMatcher.group(1))); } else if (amuletOfChemistryCheckMatcher.find()) { updateAmuletOfChemistryCharges(Integer.parseInt(amuletOfChemistryCheckMatcher.group(1))); } else if (amuletOfChemistryUsedMatcher.find()) { final String match = amuletOfChemistryUsedMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateAmuletOfChemistryCharges(charges); } else if (amuletOfChemistryBreakMatcher.find()) { notifier.notify(config.amuletOfChemistryNotification(), "Your amulet of chemistry has crumbled to dust."); updateAmuletOfChemistryCharges(MAX_AMULET_OF_CHEMISTRY_CHARGES); } else if (amuletOfBountyCheckMatcher.find()) { updateAmuletOfBountyCharges(Integer.parseInt(amuletOfBountyCheckMatcher.group(1))); } else if (amuletOfBountyUsedMatcher.find()) { updateAmuletOfBountyCharges(Integer.parseInt(amuletOfBountyUsedMatcher.group(1))); } else if (message.equals(AMULET_OF_BOUNTY_BREAK_TEXT)) { updateAmuletOfBountyCharges(MAX_AMULET_OF_BOUNTY_CHARGES); } else if (message.contains(BINDING_BREAK_TEXT)) { notifier.notify(config.bindingNotification(), BINDING_BREAK_TEXT); // This chat message triggers before the used message so add 1 to the max charges to ensure proper sync updateBindingNecklaceCharges(MAX_BINDING_CHARGES + 1); } else if (bindingNecklaceUsedMatcher.find()) { final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); if (equipment.contains(ItemID.BINDING_NECKLACE)) { updateBindingNecklaceCharges(getItemCharges(ItemChargeConfig.KEY_BINDING_NECKLACE) - 1); } } else if (bindingNecklaceCheckMatcher.find()) { final String match = bindingNecklaceCheckMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateBindingNecklaceCharges(charges); } else if (ringOfForgingCheckMatcher.find()) { final String match = ringOfForgingCheckMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateRingOfForgingCharges(charges); } else if (message.equals(RING_OF_FORGING_USED_TEXT) || message.equals(RING_OF_FORGING_VARROCK_PLATEBODY)) { final ItemContainer inventory = client.getItemContainer(InventoryID.INVENTORY); final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); // Determine if the player smelted with a Ring of Forging equipped. if (equipment == null) { return; } if (equipment.contains(ItemID.RING_OF_FORGING) && (message.equals(RING_OF_FORGING_USED_TEXT) || inventory.count(ItemID.IRON_ORE) > 1)) { int charges = Ints.constrainToRange(getItemCharges(ItemChargeConfig.KEY_RING_OF_FORGING) - 1, 0, MAX_RING_OF_FORGING_CHARGES); updateRingOfForgingCharges(charges); } } else if (message.equals(RING_OF_FORGING_BREAK_TEXT)) { notifier.notify(config.ringOfForgingNotification(), "Your ring of forging has melted."); // This chat message triggers before the used message so add 1 to the max charges to ensure proper sync updateRingOfForgingCharges(MAX_RING_OF_FORGING_CHARGES + 1); } else if (chronicleAddMatcher.find()) { final String match = chronicleAddMatcher.group(1); if (match.equals("one")) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1); } else { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, Integer.parseInt(match)); } } else if (chronicleUseAndCheckMatcher.find()) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, Integer.parseInt(chronicleUseAndCheckMatcher.group(1))); } else if (message.equals(CHRONICLE_ONE_CHARGE_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1); } else if (message.equals(CHRONICLE_EMPTY_TEXT) || message.equals(CHRONICLE_NO_CHARGES_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 0); } else if (message.equals(CHRONICLE_FULL_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1000); } else if (slaughterActivateMatcher.find()) { final String found = slaughterActivateMatcher.group(1); if (found == null) { updateBraceletOfSlaughterCharges(MAX_SLAYER_BRACELET_CHARGES); notifier.notify(config.slaughterNotification(), BRACELET_OF_SLAUGHTER_BREAK_TEXT); } else { updateBraceletOfSlaughterCharges(Integer.parseInt(found)); } } else if (slaughterCheckMatcher.find()) { updateBraceletOfSlaughterCharges(Integer.parseInt(slaughterCheckMatcher.group(1))); } else if (expeditiousActivateMatcher.find()) { final String found = expeditiousActivateMatcher.group(1); if (found == null) { updateExpeditiousBraceletCharges(MAX_SLAYER_BRACELET_CHARGES); notifier.notify(config.expeditiousNotification(), EXPEDITIOUS_BRACELET_BREAK_TEXT); } else { updateExpeditiousBraceletCharges(Integer.parseInt(found)); } } else if (expeditiousCheckMatcher.find()) { updateExpeditiousBraceletCharges(Integer.parseInt(expeditiousCheckMatcher.group(1))); } else if (bloodEssenceCheckMatcher.find()) { updateBloodEssenceCharges(Integer.parseInt(bloodEssenceCheckMatcher.group(1))); } else if (bloodEssenceExtractMatcher.find()) { updateBloodEssenceCharges(getItemCharges(ItemChargeConfig.KEY_BLOOD_ESSENCE) - Integer.parseInt(bloodEssenceExtractMatcher.group(1))); } else if (message.contains(BLOOD_ESSENCE_ACTIVATE_TEXT)) { updateBloodEssenceCharges(MAX_BLOOD_ESSENCE_CHARGES); } else if (braceletOfClayCheckMatcher.find()) { updateBraceletOfClayCharges(Integer.parseInt(braceletOfClayCheckMatcher.group(1))); } else if (message.equals(BRACELET_OF_CLAY_USE_TEXT) || message.equals(BRACELET_OF_CLAY_USE_TEXT_TRAHAEARN)) { final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); // Determine if the player mined with a Bracelet of Clay equipped. if (equipment != null && equipment.contains(ItemID.BRACELET_OF_CLAY)) { final ItemContainer inventory = client.getItemContainer(InventoryID.INVENTORY); // Charge is not used if only 1 inventory slot is available when mining in Prifddinas boolean ignore = inventory != null && inventory.count() == 27 && message.equals(BRACELET_OF_CLAY_USE_TEXT_TRAHAEARN); if (!ignore) { int charges = Ints.constrainToRange(getItemCharges(ItemChargeConfig.KEY_BRACELET_OF_CLAY) - 1, 0, MAX_BRACELET_OF_CLAY_CHARGES); updateBraceletOfClayCharges(charges); } } } else if (message.equals(BRACELET_OF_CLAY_BREAK_TEXT)) { notifier.notify(config.braceletOfClayNotification(), "Your bracelet of clay has crumbled to dust"); updateBraceletOfClayCharges(MAX_BRACELET_OF_CLAY_CHARGES); } } }
@Test public void testRofBreak() { // Create equipment inventory with ring of forging ItemContainer equipmentItemContainer = mock(ItemContainer.class); when(client.getItemContainer(InventoryID.EQUIPMENT)).thenReturn(equipmentItemContainer); when(equipmentItemContainer.contains(ItemID.RING_OF_FORGING)).thenReturn(true); when(equipmentItemContainer.getItems()).thenReturn(new Item[0]); // Run message to break ring and then use ring, to simulate actual client behavior ChatMessage breakMessage = new ChatMessage(null, ChatMessageType.GAMEMESSAGE, "", BREAK_RING_OF_FORGING, "", 0); itemChargePlugin.onChatMessage(breakMessage); verify(configManager).setRSProfileConfiguration(ItemChargeConfig.GROUP, ItemChargeConfig.KEY_RING_OF_FORGING, 141); when(configManager.getRSProfileConfiguration(ItemChargeConfig.GROUP, ItemChargeConfig.KEY_RING_OF_FORGING, Integer.class)).thenReturn(141); ChatMessage useMessage = new ChatMessage(null, ChatMessageType.GAMEMESSAGE, "", USED_RING_OF_FORGING, "", 0); itemChargePlugin.onChatMessage(useMessage); verify(configManager).setRSProfileConfiguration(ItemChargeConfig.GROUP, ItemChargeConfig.KEY_RING_OF_FORGING, 140); }
@Override public Map<String, String> getSourcesMap() { return Collections.unmodifiableMap(sourcesMap); }
@Test void getSourcesMap() { assertThatExceptionOfType(UnsupportedOperationException.class).isThrownBy(() -> { Map<String, String> retrieved = kiePMMLFactoryModel.getSourcesMap(); retrieved.put("KEY", "VALUE"); }); }
public void check(List<String> words) throws MnemonicException { toEntropy(words); }
@Test(expected = MnemonicException.MnemonicChecksumException.class) public void testBadChecksum() throws Exception { List<String> words = WHITESPACE_SPLITTER.splitToList("bless cloud wheel regular tiny venue bird web grief security dignity zoo"); mc.check(words); }
private String serializeUpsert(SeaTunnelRow row) { String key = keyExtractor.apply(row); Map<String, Object> document = toDocumentMap(row, seaTunnelRowType); String documentStr; try { documentStr = objectMapper.writeValueAsString(document); } catch (JsonProcessingException e) { throw CommonError.jsonOperationError( "Elasticsearch", "document:" + document.toString(), e); } if (key != null) { Map<String, String> upsertMetadata = createMetadata(row, key); String upsertMetadataStr; try { upsertMetadataStr = objectMapper.writeValueAsString(upsertMetadata); } catch (JsonProcessingException e) { throw CommonError.jsonOperationError( "Elasticsearch", "upsertMetadata:" + upsertMetadata.toString(), e); } /** * format example: { "update" : {"_index" : "${your_index}", "_id" : * "${your_document_id}"} }\n { "doc" : ${your_document_json}, "doc_as_upsert" : true } */ return new StringBuilder() .append("{ \"update\" :") .append(upsertMetadataStr) .append(" }") .append("\n") .append("{ \"doc\" :") .append(documentStr) .append(", \"doc_as_upsert\" : true }") .toString(); } Map<String, String> indexMetadata = createMetadata(row); String indexMetadataStr; try { indexMetadataStr = objectMapper.writeValueAsString(indexMetadata); } catch (JsonProcessingException e) { throw CommonError.jsonOperationError( "Elasticsearch", "indexMetadata:" + indexMetadata.toString(), e); } /** * format example: { "index" : {"_index" : "${your_index}", "_id" : "${your_document_id}"} * }\n ${your_document_json} */ return new StringBuilder() .append("{ \"index\" :") .append(indexMetadataStr) .append(" }") .append("\n") .append(documentStr) .toString(); }
@Test public void testSerializeUpsert() { String index = "st_index"; String primaryKey = "id"; Map<String, Object> confMap = new HashMap<>(); confMap.put(SinkConfig.INDEX.key(), index); confMap.put(SinkConfig.PRIMARY_KEYS.key(), Arrays.asList(primaryKey)); ReadonlyConfig pluginConf = ReadonlyConfig.fromMap(confMap); ElasticsearchClusterInfo clusterInfo = ElasticsearchClusterInfo.builder().clusterVersion("8.0.0").build(); IndexInfo indexInfo = new IndexInfo(index, pluginConf); SeaTunnelRowType schema = new SeaTunnelRowType( new String[] {primaryKey, "name"}, new SeaTunnelDataType[] {STRING_TYPE, STRING_TYPE}); final ElasticsearchRowSerializer serializer = new ElasticsearchRowSerializer(clusterInfo, indexInfo, schema); String id = "0001"; String name = "jack"; SeaTunnelRow row = new SeaTunnelRow(new Object[] {id, name}); row.setRowKind(RowKind.UPDATE_AFTER); String expected = "{ \"update\" :{\"_index\":\"" + index + "\",\"_id\":\"" + id + "\"} }\n" + "{ \"doc\" :{\"name\":\"" + name + "\",\"id\":\"" + id + "\"}, \"doc_as_upsert\" : true }"; String upsertStr = serializer.serializeRow(row); Assertions.assertEquals(expected, upsertStr); }
public Class<?> getTargetClass() { return targetClass; }
@Test void testEmptyConstructor() { NacosDeserializationException exception = new NacosDeserializationException(); assertEquals(Constants.Exception.DESERIALIZE_ERROR_CODE, exception.getErrCode()); assertNull(exception.getMessage()); assertNull(exception.getTargetClass()); }
public static File generate(String content, int width, int height, File targetFile) { String extName = FileUtil.extName(targetFile); switch (extName) { case QR_TYPE_SVG: String svg = generateAsSvg(content, new QrConfig(width, height)); FileUtil.writeString(svg, targetFile, StandardCharsets.UTF_8); break; case QR_TYPE_TXT: String txt = generateAsAsciiArt(content, new QrConfig(width, height)); FileUtil.writeString(txt, targetFile, StandardCharsets.UTF_8); break; default: final BufferedImage image = generate(content, width, height); ImgUtil.write(image, targetFile); break; } return targetFile; }
@Test @Disabled public void generateToStreamTest() { final QrConfig qrConfig = QrConfig.create() .setForeColor(Color.BLUE) .setBackColor(new Color(0,200,255)) .setWidth(0) .setHeight(0).setMargin(1); final String filepath = "d:/test/qr_stream_to_txt.txt"; try (final BufferedOutputStream outputStream = FileUtil.getOutputStream(filepath)) { QrCodeUtil.generate("https://hutool.cn/", qrConfig,"txt", outputStream); }catch (final IOException e){ e.printStackTrace(); } //final BufferedReader reader = FileUtil.getReader(filepath, StandardCharsets.UTF_8); //reader.lines().forEach(Console::log); }
public static String create() { return FriendlyId.createFriendlyId(); }
@Test void create() { String id = IdUtils.create(); assertThat(id, notNullValue()); }
@Override public ColumnStatistics buildColumnStatistics() { if (hasEntries && collectKeyStats) { MapStatistics mapStatistics = new MapStatistics(entries.build()); return new MapColumnStatistics(nonNullValueCount, null, rawSize, storageSize, mapStatistics); } return new ColumnStatistics(nonNullValueCount, null, rawSize, storageSize); }
@Test public void testAddEmptyMapStatistics() { MapColumnStatisticsBuilder builder = new MapColumnStatisticsBuilder(true); ColumnStatistics columnStatistics = builder.buildColumnStatistics(); assertEquals(columnStatistics.getClass(), ColumnStatistics.class); assertEquals(columnStatistics.getNumberOfValues(), 0); assertNull(columnStatistics.getMapStatistics()); }
public static DivideUpstream buildDefaultAliveDivideUpstream(final String upstreamUrl) { return DivideUpstream.builder().upstreamHost(LOCALHOST) .protocol("http://").upstreamUrl(upstreamUrl) .weight(DEFAULT_WEIGHT).warmup(Constants.WARMUP_TIME) .timestamp(System.currentTimeMillis()).build(); }
@Test public void buildDefaultDivideUpstreamWithPort() { DivideUpstream divideUpstream = CommonUpstreamUtils.buildDefaultAliveDivideUpstream(HOST); Assert.assertNotNull(divideUpstream); Assert.assertEquals(HOST, divideUpstream.getUpstreamUrl()); Assert.assertEquals(divideUpstream.getUpstreamHost(), "localhost"); }
public void deleteAllCommentsByBoardId(final Long boardId) { String sql = "DELETE FROM comment WHERE board_id = :boardId"; namedParameterJdbcTemplate.update(sql, new MapSqlParameterSource("boardId", boardId)); }
@Test void 게시글_id에_해당되는_것을_모두_지운다() { // given Comment savedComment = commentJpaRepository.save(댓글_생성()); // when commentJdbcRepository.deleteAllCommentsByBoardId(savedComment.getBoardId()); // then Optional<Comment> result = commentJpaRepository.findById(1L); assertThat(result).isEmpty(); }
public static AuthorizationsCollector parse(File file) throws ParseException { if (file == null) { LOG.warn("parsing NULL file, so fallback on default configuration!"); return AuthorizationsCollector.emptyImmutableCollector(); } if (!file.exists()) { LOG.warn( String.format( "parsing not existing file %s, so fallback on default configuration!", file.getAbsolutePath())); return AuthorizationsCollector.emptyImmutableCollector(); } try { Reader reader = Files.newBufferedReader(file.toPath(), UTF_8); return parse(reader); } catch (IOException fex) { LOG.warn( String.format( "parsing not existing file %s, so fallback on default configuration!", file.getAbsolutePath()), fex); return AuthorizationsCollector.emptyImmutableCollector(); } }
@Test public void testParseSingleLineACL() throws ParseException { Reader conf = new StringReader("topic /weather/italy/anemometer"); AuthorizationsCollector authorizations = ACLFileParser.parse(conf); // Verify assertTrue(authorizations.canRead(new Topic("/weather/italy/anemometer"), "", "")); assertTrue(authorizations.canWrite(new Topic("/weather/italy/anemometer"), "", "")); }
@Override public final void isEqualTo(@Nullable Object other) { @SuppressWarnings("UndefinedEquals") // the contract of this method is to follow Multimap.equals boolean isEqual = Objects.equal(actual, other); if (isEqual) { return; } // Fail but with a more descriptive message: if ((actual instanceof ListMultimap && other instanceof SetMultimap) || (actual instanceof SetMultimap && other instanceof ListMultimap)) { String actualType = (actual instanceof ListMultimap) ? "ListMultimap" : "SetMultimap"; String otherType = (other instanceof ListMultimap) ? "ListMultimap" : "SetMultimap"; failWithoutActual( fact("expected", other), fact("an instance of", otherType), fact("but was", actualCustomStringRepresentationForPackageMembersToCall()), fact("an instance of", actualType), simpleFact( lenientFormat( "a %s cannot equal a %s if either is non-empty", actualType, otherType))); } else if (actual instanceof ListMultimap) { containsExactlyEntriesIn((Multimap<?, ?>) checkNotNull(other)).inOrder(); } else if (actual instanceof SetMultimap) { containsExactlyEntriesIn((Multimap<?, ?>) checkNotNull(other)); } else { super.isEqualTo(other); } }
@Test public void listMultimapIsEqualTo_passes() { ImmutableListMultimap<String, String> multimapA = ImmutableListMultimap.<String, String>builder() .putAll("kurt", "kluever", "russell", "cobain") .build(); ImmutableListMultimap<String, String> multimapB = ImmutableListMultimap.<String, String>builder() .putAll("kurt", "kluever", "russell", "cobain") .build(); assertThat(multimapA.equals(multimapB)).isTrue(); assertThat(multimapA).isEqualTo(multimapB); }
@Override public void execute(List<RegisteredMigrationStep> steps, MigrationStatusListener listener) { Profiler globalProfiler = Profiler.create(LOGGER); globalProfiler.startInfo(GLOBAL_START_MESSAGE, databaseMigrationState.getTotalMigrations()); boolean allStepsExecuted = false; try { for (RegisteredMigrationStep step : steps) { this.execute(step); listener.onMigrationStepCompleted(); } allStepsExecuted = true; } finally { long dbMigrationDuration = 0L; if (allStepsExecuted) { dbMigrationDuration = globalProfiler.stopInfo(GLOBAL_END_MESSAGE, databaseMigrationState.getCompletedMigrations(), databaseMigrationState.getTotalMigrations(), "success"); } else { dbMigrationDuration = globalProfiler.stopError(GLOBAL_END_MESSAGE, databaseMigrationState.getCompletedMigrations(), databaseMigrationState.getTotalMigrations(), "failure"); } telemetryDbMigrationTotalTimeProvider.setDbMigrationTotalTime(dbMigrationDuration); telemetryDbMigrationStepsProvider.setDbMigrationCompletedSteps(databaseMigrationState.getCompletedMigrations()); telemetryDbMigrationSuccessProvider.setDbMigrationSuccess(allStepsExecuted); } }
@Test void execute_throws_MigrationStepExecutionException_on_first_failing_step_execution_throws_SQLException() { migrationContainer.add(MigrationStep2.class, SqlExceptionFailingMigrationStep.class, MigrationStep3.class); List<RegisteredMigrationStep> steps = asList( registeredStepOf(1, MigrationStep2.class), registeredStepOf(2, SqlExceptionFailingMigrationStep.class), registeredStepOf(3, MigrationStep3.class)); ((SpringComponentContainer) migrationContainer).startComponents(); try { underTest.execute(steps, migrationStatusListener); fail("a MigrationStepExecutionException should have been thrown"); } catch (MigrationStepExecutionException e) { assertThat(e).hasMessage("Execution of migration step #2 '2-SqlExceptionFailingMigrationStep' failed"); assertThat(e).hasCause(SqlExceptionFailingMigrationStep.THROWN_EXCEPTION); } finally { assertThat(logTester.logs()).hasSize(6); assertLogLevel(Level.INFO, "Executing 5 DB migrations...", "3/5 #1 '1-MigrationStep2'...", "3/5 #1 '1-MigrationStep2': success | time=", "3/5 #2 '2-SqlExceptionFailingMigrationStep'..."); assertLogLevel(Level.ERROR, "3/5 #2 '2-SqlExceptionFailingMigrationStep': failure | time=", "Executed 2/5 DB migrations: failure | time="); } verify(migrationStatusListener, times(1)).onMigrationStepCompleted(); }
@Override @CheckForNull public String getDescription() { return this.description; }
@Test public void keep_2000_first_characters_of_description() { String veryLongString = repeat("a", 3_000); ComponentImpl underTest = buildSimpleComponent(FILE, "file") .setDescription(veryLongString) .build(); String expectedDescription = repeat("a", 2_000 - 3) + "..."; assertThat(underTest.getDescription()).isEqualTo(expectedDescription); }
@Override public KsMaterializedQueryResult<WindowedRow> get( final GenericKey key, final int partition, final Range<Instant> windowStart, final Range<Instant> windowEnd, final Optional<Position> position ) { try { final ReadOnlySessionStore<GenericKey, GenericRow> store = stateStore .store(QueryableStoreTypes.sessionStore(), partition); return KsMaterializedQueryResult.rowIterator( findSession(store, key, windowStart, windowEnd).iterator()); } catch (final Exception e) { throw new MaterializationException("Failed to get value from materialized table", e); } }
@Test public void shouldReturnValueIfSessionStartsBetweenBounds() { // Given: final Instant wend = UPPER_INSTANT.plusMillis(5); givenSingleSession(LOWER_INSTANT.plusMillis(1), wend); // When: final Iterator<WindowedRow> rowIterator = table.get(A_KEY, PARTITION, WINDOW_START_BOUNDS, Range.all()).rowIterator; // Then: assertThat(rowIterator.next(), is( WindowedRow.of( SCHEMA, sessionKey(LOWER_INSTANT.plusMillis(1), wend), A_VALUE, wend.toEpochMilli() ) )); }
<T extends PipelineOptions> T as(Class<T> iface) { checkNotNull(iface); checkArgument(iface.isInterface(), "Not an interface: %s", iface); T existingOption = computedProperties.interfaceToProxyCache.getInstance(iface); if (existingOption == null) { synchronized (this) { // double check existingOption = computedProperties.interfaceToProxyCache.getInstance(iface); if (existingOption == null) { Registration<T> registration = PipelineOptionsFactory.CACHE .get() .validateWellFormed(iface, computedProperties.knownInterfaces); List<PropertyDescriptor> propertyDescriptors = registration.getPropertyDescriptors(); Class<T> proxyClass = registration.getProxyClass(); existingOption = InstanceBuilder.ofType(proxyClass) .fromClass(proxyClass) .withArg(InvocationHandler.class, this) .build(); computedProperties = computedProperties.updated(iface, existingOption, propertyDescriptors); } } } return existingOption; }
@Test public void testJsonConversionForContainerTypes() throws Exception { List<String> list = ImmutableList.of("a", "b", "c"); Map<String, String> map = ImmutableMap.of("d", "x", "e", "y", "f", "z"); Set<String> set = ImmutableSet.of("g", "h", "i"); ContainerTypes options = PipelineOptionsFactory.as(ContainerTypes.class); options.setList(list); options.setMap(map); options.setSet(set); ContainerTypes options2 = serializeDeserialize(ContainerTypes.class, options); assertEquals(list, options2.getList()); assertEquals(map, options2.getMap()); assertEquals(set, options2.getSet()); }
@Override public SfmSketchState createSingleState() { return new SingleSfmSketchState(); }
@Test public void testCreateSingleStatePresent() { SfmSketchState state = factory.createSingleState(); long emptySize = state.getEstimatedSize(); SfmSketch sketch = SfmSketch.create(16, 16); state.setSketch(sketch); assertEquals(sketch, state.getSketch()); assertEquals(state.getEstimatedSize() - emptySize, sketch.getRetainedSizeInBytes()); }
public RuntimeOptionsBuilder parse(Class<?> clazz) { RuntimeOptionsBuilder args = new RuntimeOptionsBuilder(); for (Class<?> classWithOptions = clazz; hasSuperClass( classWithOptions); classWithOptions = classWithOptions.getSuperclass()) { CucumberOptions options = requireNonNull(optionsProvider).getOptions(classWithOptions); if (options != null) { addDryRun(options, args); addMonochrome(options, args); addTags(classWithOptions, options, args); addPlugins(options, args); addPublish(options, args); addName(options, args); addSnippets(options, args); addGlue(options, args); addFeatures(options, args); addObjectFactory(options, args); addUuidGenerator(options, args); } } addDefaultFeaturePathIfNoFeaturePathIsSpecified(args, clazz); addDefaultGlueIfNoOverridingGlueIsSpecified(args, clazz); return args; }
@Test void override_monochrome_flag_from_baseclass() { RuntimeOptions runtimeOptions = parser().parse(SubClassWithMonoChromeTrue.class).build(); assertTrue(runtimeOptions.isMonochrome()); }
public static String formatSql(final AstNode root) { final StringBuilder builder = new StringBuilder(); new Formatter(builder).process(root, 0); return StringUtils.stripEnd(builder.toString(), "\n"); }
@Test public void shouldSupportExplicitEmitChangesOnPersistentQuery() { // Given: final Statement statement = parseSingle("CREATE STREAM X AS SELECT ITEMID FROM ORDERS EMIT CHANGES;"); // When: final String result = SqlFormatter.formatSql(statement); // Then: assertThat(result, is("CREATE STREAM X AS SELECT ITEMID\n" + "FROM ORDERS ORDERS\n" + "EMIT CHANGES")); }
public static ServerInfoConfig load() { return new ServerInfoConfig(); }
@Test public void testLoad() { ServerInfoConfig serverInfoConfig = ServerInfoConfig.load(); assert(serverInfoConfig != null); }
@SuppressWarnings({"BooleanExpressionComplexity", "CyclomaticComplexity"}) public static boolean isScalablePushQuery( final Statement statement, final KsqlExecutionContext ksqlEngine, final KsqlConfig ksqlConfig, final Map<String, Object> overrides ) { if (!isPushV2Enabled(ksqlConfig, overrides)) { return false; } if (! (statement instanceof Query)) { return false; } final Query query = (Query) statement; final SourceFinder sourceFinder = new SourceFinder(); sourceFinder.process(query.getFrom(), null); // It will be present if it's not a join, which we don't handle if (!sourceFinder.getSourceName().isPresent()) { return false; } // Find all of the writers to this particular source. final SourceName sourceName = sourceFinder.getSourceName().get(); final Set<QueryId> upstreamQueries = ksqlEngine.getQueriesWithSink(sourceName); // See if the config or override have set the stream to be "latest" final boolean isLatest = isLatest(ksqlConfig, overrides); // Cannot be a pull query, i.e. must be a push return !query.isPullQuery() // Group by is not supported && !query.getGroupBy().isPresent() // Windowing is not supported && !query.getWindow().isPresent() // Having clause is not supported && !query.getHaving().isPresent() // Partition by is not supported && !query.getPartitionBy().isPresent() // There must be an EMIT CHANGES clause && (query.getRefinement().isPresent() && query.getRefinement().get().getOutputRefinement() == OutputRefinement.CHANGES) // Must be reading from "latest" && isLatest // We only handle a single sink source at the moment from a CTAS/CSAS && upstreamQueries.size() == 1 // ROWPARTITION and ROWOFFSET are not currently supported in SPQs && !containsDisallowedColumns(query); }
@Test public void isScalablePushQuery_true_noLatest() { try(MockedStatic<ColumnExtractor> columnExtractor = mockStatic(ColumnExtractor.class)) { // When: expectIsSPQ(ColumnName.of("foo"), columnExtractor); // Then: assertThat(ScalablePushUtil.isScalablePushQuery(query, ksqlEngine, ksqlConfig, ImmutableMap.of()), equalTo(true)); } }
@Override public Num getValue(int index) { return values.get(index); }
@Test public void returnsWithSellAndBuyTrades() { BarSeries sampleBarSeries = new MockBarSeries(numFunction, 2, 1, 3, 5, 6, 3, 20); TradingRecord tradingRecord = new BaseTradingRecord(Trade.buyAt(0, sampleBarSeries), Trade.sellAt(1, sampleBarSeries), Trade.buyAt(3, sampleBarSeries), Trade.sellAt(4, sampleBarSeries), Trade.sellAt(5, sampleBarSeries), Trade.buyAt(6, sampleBarSeries)); Returns strategyReturns = new Returns(sampleBarSeries, tradingRecord, Returns.ReturnType.ARITHMETIC); assertNumEquals(NaN.NaN, strategyReturns.getValue(0)); assertNumEquals(-0.5, strategyReturns.getValue(1)); assertNumEquals(0, strategyReturns.getValue(2)); assertNumEquals(0, strategyReturns.getValue(3)); assertNumEquals(1d / 5, strategyReturns.getValue(4)); assertNumEquals(0, strategyReturns.getValue(5)); assertNumEquals(1 - (20d / 3), strategyReturns.getValue(6)); }
private void checkZooKeeperReplicas(List<Condition> warnings) { if (zk.getReplicas() == 2) { warnings.add(StatusUtils.buildWarningCondition("ZooKeeperReplicas", "Running ZooKeeper with two nodes is not advisable as both replicas will be needed to avoid downtime. It is recommended that a minimum of three replicas are used.")); } else if (zk.getReplicas() % 2 == 0) { warnings.add(StatusUtils.buildWarningCondition("ZooKeeperReplicas", "Running ZooKeeper with an odd number of replicas is recommended.")); } }
@Test public void checkZookeeperReplicas() { Map<String, Object> kafkaOptions = new HashMap<>(); kafkaOptions.put(KafkaConfiguration.DEFAULT_REPLICATION_FACTOR, 2); kafkaOptions.put(KafkaConfiguration.MIN_INSYNC_REPLICAS, 1); Kafka kafka = ResourceUtils.createKafka(NAMESPACE, NAME, 2, IMAGE, HEALTH_DELAY, HEALTH_TIMEOUT, null, kafkaOptions, emptyMap(), new EphemeralStorage(), new EphemeralStorage(), null, null, null, null); ZooKeeperSpecChecker checker = generateChecker(kafka); List<Condition> warnings = checker.run(); assertThat(warnings, hasSize(1)); Condition warning = warnings.get(0); assertThat(warning.getReason(), is("ZooKeeperReplicas")); assertThat(warning.getStatus(), is("True")); assertThat(warning.getMessage(), is("Running ZooKeeper with two nodes is not advisable as both replicas will be needed to avoid downtime. It is recommended that a minimum of three replicas are used.")); }
public static String generatePartitionMetadataTableName(String databaseId) { // There are 11 characters in the name format. // Maximum Spanner database ID length is 30 characters. // UUID always generates a String with 36 characters. // Since the Postgres table name length is 63, we may need to truncate the table name depending // on the database length. String fullString = String.format(PARTITION_METADATA_TABLE_NAME_FORMAT, databaseId, UUID.randomUUID()) .replaceAll("-", "_"); if (fullString.length() < MAX_TABLE_NAME_LENGTH) { return fullString; } return fullString.substring(0, MAX_TABLE_NAME_LENGTH); }
@Test public void testGenerateMetadataTableNameRemovesHyphens() { final String tableName = NameGenerator.generatePartitionMetadataTableName("my-database-id-12345"); assertFalse(tableName.contains("-")); }
@Override public Object read(final PostgreSQLPacketPayload payload, final int parameterValueLength) { return payload.getByteBuf().readFloat(); }
@Test void assertRead() { when(byteBuf.readFloat()).thenReturn(1F); assertThat(new PostgreSQLFloatBinaryProtocolValue().read(new PostgreSQLPacketPayload(byteBuf, StandardCharsets.UTF_8), 4), is(1.0F)); }
public InetSocketAddress getManagedPort( final UdpChannel udpChannel, final InetSocketAddress bindAddress) throws BindException { InetSocketAddress address = bindAddress; if (bindAddress.getPort() != 0) { portSet.add(bindAddress.getPort()); } else if (!isOsWildcard) { // do not map if not a subscription and does not have a control address. We want to use an ephemeral port // for the control channel on publications. if (!isSender || udpChannel.hasExplicitControl()) { address = new InetSocketAddress(bindAddress.getAddress(), allocateOpenPort()); } } return address; }
@Test void shouldPassThrough0WithNullRanges() throws BindException { final InetSocketAddress bindAddress = new InetSocketAddress("localhost", 0); final WildcardPortManager manager = new WildcardPortManager(WildcardPortManager.EMPTY_PORT_RANGE, false); assertThat(manager.getManagedPort( udpChannelPort0, bindAddress), is(new InetSocketAddress("localhost", 0))); }
public static String resolverConfigDataType(String dataId) { return resolverConfigDataType(FILE_CONFIG.getConfig(getDataTypeKey()),dataId,DEFAULT_DATA_TYPE); }
@Test void resolverConfigDataType() { String dataType; dataType = ConfigProcessor.resolverConfigDataType("yaml", "a.yaml", "properties"); Assertions.assertEquals(dataType, "yaml"); dataType = ConfigProcessor.resolverConfigDataType("", "a.yaml", "properties"); Assertions.assertEquals(dataType, "yaml"); dataType = ConfigProcessor.resolverConfigDataType("", "a.txt", "properties"); Assertions.assertEquals(dataType, "properties"); dataType = ConfigProcessor.resolverConfigDataType("", "a", "properties"); Assertions.assertEquals(dataType, "properties"); dataType = ConfigProcessor.resolverConfigDataType("a.yaml"); Assertions.assertEquals(dataType, "yaml"); dataType = ConfigProcessor.resolverConfigDataType("a.properties"); Assertions.assertEquals(dataType, "properties"); dataType = ConfigProcessor.resolverConfigDataType("a.txt"); Assertions.assertEquals(dataType, "properties"); dataType = ConfigProcessor.resolverConfigDataType("a"); Assertions.assertEquals(dataType, "properties"); }
@Override public ListenableFuture<LoadFileResponse> load(List<LoadSubTask> subTasks, boolean skipIfExists, UfsReadOptions options) throws AccessControlException, IOException { List<ListenableFuture<Void>> futures = new ArrayList<>(); List<LoadFailure> errors = Collections.synchronizedList(new ArrayList<>()); AtomicInteger numSkipped = new AtomicInteger(); AtomicLong skippedLength = new AtomicLong(); for (LoadSubTask task : subTasks) { if (task.hasLoadMetadataSubtask()) { UfsStatus status = UfsStatus.fromProto(task.getLoadMetadataSubtask().getUfsStatus()); loadMetadata(status, errors); } if (task.hasLoadDataSubtask()) { LoadDataSubTask subtask = task.getLoadDataSubtask(); if (subtask.getLength() <= 0) { continue; } boolean countAsSkipped = skipIfExists && isAllPageCached(subtask.getUfsStatus(), subtask.getOffsetInFile(), subtask.getLength()); if (countAsSkipped) { numSkipped.incrementAndGet(); skippedLength.addAndGet(subtask.getLength()); continue; } try { ListenableFuture<Void> loadFuture = submitLoadDataSubTask(subtask, options, errors); futures.add(loadFuture); } catch (RejectedExecutionException ex) { LOG.warn("Load task overloaded."); errors.add(LoadFailure.newBuilder().setSubtask( LoadSubTask.newBuilder().setLoadDataSubtask(subtask).build()) .setCode(Status.RESOURCE_EXHAUSTED.getCode().value()) .setRetryable(true).setMessage(ex.getMessage()).build()); } } } return Futures.whenAllComplete(futures).call( () -> LoadFileResponse.newBuilder().addAllFailures(errors) .setBytesSkipped(skippedLength.get()).setNumSkipped(numSkipped.get()) // Status is a required field, put it as a placeholder .setStatus(TaskStatus.SUCCESS).build(), GrpcExecutors.READER_EXECUTOR); }
@Test public void testLoad() throws Exception { int numPages = 10; long length = mPageSize * numPages; String ufsPath = mTestFolder.newFile("test").getAbsolutePath(); byte[] buffer = BufferUtils.getIncreasingByteArray((int) length); BufferUtils.writeBufferToFile(ufsPath, buffer); loadFileData(ufsPath); List<PageId> cachedPages = mCacheManager.getCachedPageIdsByFileId(new AlluxioURI(ufsPath).hash(), length); assertEquals(numPages, cachedPages.size()); int start = 0; for (PageId pageId : cachedPages) { byte[] buff = new byte[(int) mPageSize]; mCacheManager.get(pageId, (int) mPageSize, buff, 0); assertTrue(BufferUtils.equalIncreasingByteArray(start, (int) mPageSize, buff)); start += mPageSize; } assertTrue(mWorker.getMetaManager().getFromMetaStore(ufsPath).isPresent()); }
public static BigDecimal cast(final Integer value, final int precision, final int scale) { if (value == null) { return null; } return cast(value.longValue(), precision, scale); }
@Test public void shouldCastStringRoundUp() { // When: final BigDecimal decimal = DecimalUtil.cast("1.19", 2, 1); // Then: assertThat(decimal, is(new BigDecimal("1.2"))); }
@Override public void upgrade() { if (clusterConfigService.get(MigrationCompleted.class) != null) { LOG.debug("Migration already completed."); return; } final UpdateResult updateResult = searchesCollection .updateMany( and( isDashboard(), atLeastOneQueryHasNonEmptyQueryString() ), makeQueryStringEmpty(), forNonEmptyQueryStrings() ); writeMigrationCompleted(updateResult.getModifiedCount()); }
@Test public void writesMigrationCompletedAfterSuccess() { this.migration.upgrade(); final MigrationCompleted migrationCompleted = captureMigrationCompleted(); assertThat(migrationCompleted.modifiedViewsCount()).isZero(); }
public boolean isLaunchIntentsActivity(Activity activity) { final Intent helperIntent = activity.getPackageManager().getLaunchIntentForPackage(activity.getPackageName()); final String activityName = activity.getComponentName().getClassName(); final String launchIntentActivityName = helperIntent.getComponent().getClassName(); return activityName.equals(launchIntentActivityName); }
@Test public void isLaunchIntentsActivity_activityIsNotMainActivity_returnFalse() throws Exception { Activity activity = getActivityMock("other.activity"); final AppLaunchHelper uut = getUUT(); boolean result = uut.isLaunchIntentsActivity(activity); assertFalse(result); }
@Override public void close() { if (ch.isOpen()) { ch.close(); } }
@Test public void testResolveAAAA() throws Exception { DnsNameResolver resolver = newResolver(ResolvedAddressTypes.IPV6_ONLY).build(); try { testResolve0(resolver, EXCLUSIONS_RESOLVE_AAAA, null); } finally { resolver.close(); } }
@Override public V get() throws InterruptedException, ExecutionException { return resolve(super.get()); }
@Test public void test_get_Object_withTimeout() throws Exception { Object value = "value"; DeserializingCompletableFuture<Object> future = new DeserializingCompletableFuture<>(serializationService, deserialize); future.complete(value); assertEquals(value, future.get(1, TimeUnit.MILLISECONDS)); }
@Override protected void reportActivity(UUID sessionId, TransportProtos.SessionInfoProto currentSessionInfo, long timeToReport, ActivityReportCallback<UUID> callback) { log.debug("Reporting activity state for session with id: [{}]. Time to report: [{}].", sessionId, timeToReport); SessionMetaData session = sessions.get(sessionId); TransportProtos.SubscriptionInfoProto subscriptionInfo = TransportProtos.SubscriptionInfoProto.newBuilder() .setAttributeSubscription(session != null && session.isSubscribedToAttributes()) .setRpcSubscription(session != null && session.isSubscribedToRPC()) .setLastActivityTime(timeToReport) .build(); TransportProtos.SessionInfoProto sessionInfo = session != null ? session.getSessionInfo() : currentSessionInfo; process(sessionInfo, subscriptionInfo, new TransportServiceCallback<>() { @Override public void onSuccess(Void msgAcknowledged) { callback.onSuccess(sessionId, timeToReport); } @Override public void onError(Throwable e) { callback.onFailure(sessionId, e); } }); }
@Test void givenKeyAndTimeToReportAndSessionExists_whenReportingActivity_thenShouldReportActivityWithSubscriptionsAndSessionInfoFromSession() { // GIVEN long expectedTime = 123L; boolean expectedAttributesSubscription = true; boolean expectedRPCSubscription = true; TransportProtos.SessionInfoProto expectedSessionInfo = TransportProtos.SessionInfoProto.getDefaultInstance(); SessionMsgListener listenerMock = mock(SessionMsgListener.class); SessionMetaData session = new SessionMetaData(expectedSessionInfo, TransportProtos.SessionType.ASYNC, listenerMock); session.setSubscribedToAttributes(expectedAttributesSubscription); session.setSubscribedToRPC(expectedRPCSubscription); sessions.put(SESSION_ID, session); ActivityReportCallback<UUID> callbackMock = mock(ActivityReportCallback.class); TransportProtos.SessionInfoProto sessionInfo = TransportProtos.SessionInfoProto.newBuilder() .setSessionIdMSB(SESSION_ID.getMostSignificantBits()) .setSessionIdLSB(SESSION_ID.getLeastSignificantBits()) .build(); doCallRealMethod().when(transportServiceMock).reportActivity(SESSION_ID, sessionInfo, expectedTime, callbackMock); // WHEN transportServiceMock.reportActivity(SESSION_ID, sessionInfo, expectedTime, callbackMock); // THEN ArgumentCaptor<TransportProtos.SessionInfoProto> sessionInfoCaptor = ArgumentCaptor.forClass(TransportProtos.SessionInfoProto.class); ArgumentCaptor<TransportProtos.SubscriptionInfoProto> subscriptionInfoCaptor = ArgumentCaptor.forClass(TransportProtos.SubscriptionInfoProto.class); ArgumentCaptor<TransportServiceCallback<Void>> callbackCaptor = ArgumentCaptor.forClass(TransportServiceCallback.class); verify(transportServiceMock).process(sessionInfoCaptor.capture(), subscriptionInfoCaptor.capture(), callbackCaptor.capture()); assertThat(sessionInfoCaptor.getValue()).isEqualTo(expectedSessionInfo); TransportProtos.SubscriptionInfoProto expectedSubscriptionInfo = TransportProtos.SubscriptionInfoProto.newBuilder() .setAttributeSubscription(expectedAttributesSubscription) .setRpcSubscription(expectedRPCSubscription) .setLastActivityTime(expectedTime) .build(); assertThat(subscriptionInfoCaptor.getValue()).isEqualTo(expectedSubscriptionInfo); TransportServiceCallback<Void> queueCallback = callbackCaptor.getValue(); queueCallback.onSuccess(null); verify(callbackMock).onSuccess(SESSION_ID, expectedTime); var throwable = new Throwable(); queueCallback.onError(throwable); verify(callbackMock).onFailure(SESSION_ID, throwable); }
@CanIgnoreReturnValue public final Ordered containsExactly(@Nullable Object @Nullable ... varargs) { List<@Nullable Object> expected = (varargs == null) ? newArrayList((@Nullable Object) null) : asList(varargs); return containsExactlyElementsIn( expected, varargs != null && varargs.length == 1 && varargs[0] instanceof Iterable); }
@Test public void iterableContainsExactlyInOrderWithNull() { assertThat(asList(3, null, 5)).containsExactly(3, null, 5).inOrder(); }
public static OmemoService<?, ?, ?, ?, ?, ?, ?, ?, ?> getInstance() { if (INSTANCE == null) { throw new IllegalStateException("No OmemoService registered"); } return INSTANCE; }
@Test(expected = IllegalStateException.class) public void getInstanceFailsWhenNullTest() { OmemoService.getInstance(); }
@Override public int read(final byte[] b) throws IOException { return this.read(b, 0, b.length); }
@Test public void testEncryptDecryptZeroBytes() throws Exception { final byte[] content = RandomUtils.nextBytes(0); final ByteArrayInputStream plain = new ByteArrayInputStream(content); final PlainFileKey key = Crypto.generateFileKey(PlainFileKey.Version.AES256GCM); final SDSSession session = new SDSSession(new Host(new TestProtocol()), new DisabledX509TrustManager(), new DefaultX509KeyManager()) { @Override public SDSApiClient getClient() { return new SDSApiClient(new MockHttpClient()); } }; final TransferStatus status = new TransferStatus(); final ObjectWriter writer = session.getClient().getJSON().getContext(null).writerFor(FileKey.class); final ByteArrayOutputStream out = new ByteArrayOutputStream(); writer.writeValue(out, TripleCryptConverter.toSwaggerFileKey(key)); status.setFilekey(ByteBuffer.wrap(out.toByteArray())); final TripleCryptEncryptingInputStream encryptInputStream = new TripleCryptEncryptingInputStream(session, plain, Crypto.createFileEncryptionCipher(key), status); final ByteArrayOutputStream os = new ByteArrayOutputStream(); IOUtils.copy(encryptInputStream, os, 42); encryptInputStream.close(); out.close(); final ByteArrayInputStream is = new ByteArrayInputStream(os.toByteArray()); final ObjectReader reader = session.getClient().getJSON().getContext(null).readerFor(FileKey.class); final FileKey fileKey = reader.readValue(status.getFilekey().array()); final TripleCryptDecryptingInputStream cryptInputStream = new TripleCryptDecryptingInputStream(is, Crypto.createFileDecryptionCipher(TripleCryptConverter.toCryptoPlainFileKey(fileKey)), TripleCryptConverter.base64StringToByteArray(fileKey.getTag())); final byte[] compare = new byte[content.length]; IOUtils.read(cryptInputStream, compare); assertArrayEquals(content, compare); }
public AstNode rewrite(final AstNode node, final C context) { return rewriter.process(node, context); }
@Test public void shouldRewriteCSASWithPartitionBy() { final CreateStreamAsSelect csas = new CreateStreamAsSelect( location, sourceName, query, false, false, csasProperties ); when(mockRewriter.apply(query, context)).thenReturn(rewrittenQuery); when(expressionRewriter.apply(expression, context)).thenReturn(rewrittenExpression); final AstNode rewritten = rewriter.rewrite(csas, context); assertThat( rewritten, equalTo( new CreateStreamAsSelect( location, sourceName, rewrittenQuery, false, false, csasProperties ) ) ); }
@SuppressWarnings("ResultOfMethodCallIgnored") public static boolean tryShutdownExecutorElegantly(ExecutorService executor, Duration timeout) { try { executor.shutdown(); executor.awaitTermination(timeout.toMillis(), TimeUnit.MILLISECONDS); } catch (InterruptedException ie) { // Let it go. } if (!executor.isTerminated()) { shutdownExecutorForcefully(executor, Duration.ZERO, false); } return executor.isTerminated(); }
@Test void testTryShutdownExecutorElegantlyInterruptedWithForcefulShutdown() { MockExecutorService executor = new MockExecutorService(5); executor.interruptAfterNumForcefulShutdown(0); assertThat(ComponentClosingUtils.tryShutdownExecutorElegantly(executor, Duration.ofDays(1))) .isFalse(); assertThat(executor.forcefullyShutdownCount).isOne(); }
@Override public final int position() { return pos; }
@Test(expected = IllegalArgumentException.class) public void testPositionNewPos_highPos() { out.position(out.buffer.length + 1); }
public int getRefreshNodesResourcesFailedRetrieved() { return numRefreshNodesResourcesFailedRetrieved.value(); }
@Test public void testRefreshNodesResourcesRetrievedFailed() { long totalBadBefore = metrics.getRefreshNodesResourcesFailedRetrieved(); badSubCluster.getRefreshNodesResourcesFailed(); Assert.assertEquals(totalBadBefore + 1, metrics.getRefreshNodesResourcesFailedRetrieved()); }
public void appendClosedIssuesUuidsToResponse(List<String> closedIssuesUuids, OutputStream outputStream) throws IOException { for (String uuid : closedIssuesUuids) { AbstractMessageLite messageLite = protobufObjectGenerator.generateClosedIssueMessage(uuid); messageLite.writeDelimitedTo(outputStream); } }
@Test public void appendClosedIssuesToResponse_outputStreamIsCalledAtLeastOnce() throws IOException { OutputStream outputStream = mock(OutputStream.class); underTest.appendClosedIssuesUuidsToResponse(List.of("uuid", "uuid2"), outputStream); verify(outputStream, atLeastOnce()).write(any(byte[].class), anyInt(), anyInt()); }
public static <T> int count(Iterable<T> iterable, Predicate<? super T> predicate) { return count(iterable, predicate, FJIterate.DEFAULT_MIN_FORK_SIZE, FJIterate.FORK_JOIN_POOL); }
@Test public void count() { this.iterables.each(this::basicCount); }
@Override public Schema readSchema(InputStream in) throws IOException { DataInputStream dataInputStream = new DataInputStream(in); if (dataInputStream.readByte() != 0) { throw new IOException("Unknown data format. Magic number does not match"); } else { int schemaId = dataInputStream.readInt(); try { return schemaRegistryClient.getById(schemaId); } catch (RestClientException e) { throw new IOException( format("Could not find schema with id %s in registry", schemaId), e); } } }
@Test void testMagicByteVerification() throws Exception { MockSchemaRegistryClient client = new MockSchemaRegistryClient(); int schemaId = client.register("testTopic", Schema.create(Schema.Type.BOOLEAN)); ConfluentSchemaRegistryCoder coder = new ConfluentSchemaRegistryCoder(client); ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream(); DataOutputStream dataOutputStream = new DataOutputStream(byteOutStream); dataOutputStream.writeByte(5); dataOutputStream.writeInt(schemaId); dataOutputStream.flush(); try (ByteArrayInputStream byteInStream = new ByteArrayInputStream(byteOutStream.toByteArray())) { assertThatThrownBy(() -> coder.readSchema(byteInStream)) .isInstanceOf(IOException.class); } }
public ConcurrentLongHashMap<CompletableFuture<Producer>> getProducers() { return producers; }
@Test(timeOut = 30000) public void testProducerSuccessOnEncryptionRequiredTopic() throws Exception { resetChannel(); setChannelConnected(); // Set encryption_required to true Policies policies = mock(Policies.class); policies.encryption_required = true; policies.topicDispatchRate = new HashMap<>(); policies.clusterSubscribeRate = new HashMap<>(); // add `clusterDispatchRate` otherwise there will be a NPE // `org.apache.pulsar.broker.service.persistent.DispatchRateLimiter.getPoliciesDispatchRate` policies.clusterDispatchRate = new HashMap<>(); // add `clusterDispatchRate` otherwise there will be a NPE // `org.apache.pulsar.broker.service.AbstractTopic.updateNamespaceSubscriptionDispatchRate` policies.subscriptionDispatchRate = new HashMap<>(); // add `clusterDispatchRate` otherwise there will be a NPE // `org.apache.pulsar.broker.service.AbstractTopic.updateNamespaceReplicatorDispatchRate` policies.replicatorDispatchRate = new HashMap<>(); pulsarTestContext.getPulsarResources().getNamespaceResources() .createPolicies(TopicName.get(encryptionRequiredTopicName).getNamespaceObject(), policies); // test success case: encrypted producer can connect ByteBuf clientCommand = Commands.newProducer(encryptionRequiredTopicName, 1 /* producer id */, 1 /* request id */, "encrypted-producer", true, Collections.emptyMap(), false); channel.writeInbound(clientCommand); Object response = getResponse(); assertEquals(response.getClass(), CommandProducerSuccess.class); PersistentTopic topicRef = (PersistentTopic) brokerService.getTopicReference(encryptionRequiredTopicName).get(); assertNotNull(topicRef); assertEquals(topicRef.getProducers().size(), 1); channel.finish(); }