focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static FilterPredicate rewrite(FilterPredicate pred) { Objects.requireNonNull(pred, "pred cannot be null"); return pred.accept(INSTANCE); }
@Test public void testComplex() { assertEquals(complexCollapsed, rewrite(complex)); }
public static List<RMNode> queryRMNodes(RMContext context, EnumSet<NodeState> acceptedStates) { // nodes contains nodes that are NEW, RUNNING, UNHEALTHY or DECOMMISSIONING. ArrayList<RMNode> results = new ArrayList<RMNode>(); boolean hasActive = false; boolean hasInactive = false; for (NodeState nodeState : acceptedStates) { if (!hasInactive && nodeState.isInactiveState()) { hasInactive = true; } if (!hasActive && nodeState.isActiveState()) { hasActive = true; } if (hasActive && hasInactive) { break; } } if (hasActive) { for (RMNode rmNode : context.getRMNodes().values()) { if (acceptedStates.contains(rmNode.getState())) { results.add(rmNode); } } } // inactiveNodes contains nodes that are DECOMMISSIONED, LOST, OR REBOOTED if (hasInactive) { for (RMNode rmNode : context.getInactiveRMNodes().values()) { if ((rmNode != null) && acceptedStates.contains(rmNode.getState())) { results.add(rmNode); } } } return results; }
@Test public void testQueryRMNodes() throws Exception { RMContext rmContext = mock(RMContext.class); NodeId node1 = NodeId.newInstance("node1", 1234); RMNode rmNode1 = mock(RMNode.class); ConcurrentMap<NodeId, RMNode> inactiveList = new ConcurrentHashMap<NodeId, RMNode>(); when(rmNode1.getState()).thenReturn(NodeState.SHUTDOWN); inactiveList.put(node1, rmNode1); when(rmContext.getInactiveRMNodes()).thenReturn(inactiveList); List<RMNode> result = RMServerUtils.queryRMNodes(rmContext, EnumSet.of(NodeState.SHUTDOWN)); Assert.assertTrue(result.size() != 0); assertThat(result.get(0)).isEqualTo(rmNode1); when(rmNode1.getState()).thenReturn(NodeState.DECOMMISSIONED); result = RMServerUtils.queryRMNodes(rmContext, EnumSet.of(NodeState.DECOMMISSIONED)); Assert.assertTrue(result.size() != 0); assertThat(result.get(0)).isEqualTo(rmNode1); when(rmNode1.getState()).thenReturn(NodeState.LOST); result = RMServerUtils.queryRMNodes(rmContext, EnumSet.of(NodeState.LOST)); Assert.assertTrue(result.size() != 0); assertThat(result.get(0)).isEqualTo(rmNode1); when(rmNode1.getState()).thenReturn(NodeState.REBOOTED); result = RMServerUtils.queryRMNodes(rmContext, EnumSet.of(NodeState.REBOOTED)); Assert.assertTrue(result.size() != 0); assertThat(result.get(0)).isEqualTo(rmNode1); }
public static <K, V> StateSerdes<K, V> withBuiltinTypes( final String topic, final Class<K> keyClass, final Class<V> valueClass) { return new StateSerdes<>(topic, Serdes.serdeFrom(keyClass), Serdes.serdeFrom(valueClass)); }
@Test public void shouldReturnSerdesForBuiltInKeyAndValueTypesForBuiltinTypes() { final Class[] supportedBuildInTypes = new Class[] { String.class, Short.class, Integer.class, Long.class, Float.class, Double.class, byte[].class, ByteBuffer.class, Bytes.class }; for (final Class keyClass : supportedBuildInTypes) { for (final Class valueClass : supportedBuildInTypes) { assertNotNull(StateSerdes.withBuiltinTypes("anyName", keyClass, valueClass)); } } }
public void mirrorKeys() { /* how to mirror? width = 55 [0..15] [20..35] [40..55] phase 1: multiple by -1 [0] [-20] [-40] phase 2: add keyboard width [55] [35] [15] phase 3: subtracting the key's width [40] [20] [0] cool? */ final int keyboardWidth = getMinWidth(); for (Key k : getKeys()) { var newX = -1 * k.x; // phase 1 newX += keyboardWidth; // phase 2 newX -= k.width; // phase 3 k.x = newX; } }
@Test public void testKeyboardPopupSupportsMirrorMultipleRowsNotFullBalanced() throws Exception { String popupCharacters = "qwertasd"; // asd // qwert AnyPopupKeyboard keyboard = new AnyPopupKeyboard( new DefaultAddOn(getApplicationContext(), getApplicationContext()), getApplicationContext(), popupCharacters, SIMPLE_KeyboardDimens, "POP_KEYBOARD"); int vGap = (int) SIMPLE_KeyboardDimens.getRowVerticalGap(); int keyHeight = (int) SIMPLE_KeyboardDimens.getNormalKeyHeight(); int hGap = (int) SIMPLE_KeyboardDimens.getKeyHorizontalGap(); final int keyWidth = (int) (SIMPLE_KeyboardDimens.getKeyboardMaxWidth() - SIMPLE_KeyboardDimens.getKeyHorizontalGap() * popupCharacters.length()) / 10; Assert.assertEquals(8, keyboard.getKeys().size()); assertKeyValues(keyboard, 'q', vGap + keyHeight + vGap, 0); assertKeyValues(keyboard, 'w', vGap + keyHeight + vGap, keyWidth); assertKeyValues(keyboard, 'e', vGap + keyHeight + vGap, hGap + 2 * keyWidth); assertKeyValues(keyboard, 'r', vGap + keyHeight + vGap, 2 * hGap + 3 * keyWidth); assertKeyValues(keyboard, 't', vGap + keyHeight + vGap, 3 * hGap + 4 * keyWidth); assertKeyValues(keyboard, 'a', vGap, 0); assertKeyValues(keyboard, 's', vGap, keyWidth); assertKeyValues(keyboard, 'd', vGap, hGap + 2 * keyWidth); keyboard.mirrorKeys(); // same order, mirrored X position Assert.assertEquals(8, keyboard.getKeys().size()); assertKeyValues(keyboard, 'q', vGap + keyHeight + vGap, 5 * hGap + 4 * keyWidth); assertKeyValues(keyboard, 'w', vGap + keyHeight + vGap, 4 * hGap + 3 * keyWidth); assertKeyValues(keyboard, 'e', vGap + keyHeight + vGap, 3 * hGap + 2 * keyWidth); assertKeyValues(keyboard, 'r', vGap + keyHeight + vGap, 2 * hGap + keyWidth); assertKeyValues(keyboard, 't', vGap + keyHeight + vGap, hGap); assertKeyValues(keyboard, 'a', vGap, 5 * hGap + 4 * keyWidth); assertKeyValues(keyboard, 's', vGap, 4 * hGap + 3 * keyWidth); assertKeyValues(keyboard, 'd', vGap, 3 * hGap + 2 * keyWidth); }
@Override public <T> T read(String path, Predicate... filters) { notEmpty(path, "path can not be null or empty"); return read(pathFromCache(path, filters)); }
@Test public void cached_path_with_predicates() { Filter feq = Filter.filter(Criteria.where("category").eq("reference")); Filter fne = Filter.filter(Criteria.where("category").ne("reference")); DocumentContext JsonDoc = JsonPath.parse(JSON_DOCUMENT); List<String> eq = JsonDoc.read("$.store.book[?].category", feq); List<String> ne = JsonDoc.read("$.store.book[?].category", fne); Assertions.assertThat(eq).contains("reference"); Assertions.assertThat(ne).doesNotContain("reference"); }
@Override public boolean supportsANSI92EntryLevelSQL() { return false; }
@Test void assertSupportsANSI92EntryLevelSQL() { assertFalse(metaData.supportsANSI92EntryLevelSQL()); }
public static List<String> loadAndModifyConfiguration(String[] args) throws FlinkException { return ConfigurationParserUtils.loadAndModifyConfiguration( filterCmdArgs(args, ModifiableClusterConfigurationParserFactory.options()), BashJavaUtils.class.getSimpleName()); }
@TestTemplate void testloadAndModifyConfigurationRemoveKeyValuesMatched() throws Exception { String removeKey = "removeKey"; String removeValue = "removeValue"; String[] args = { "--configDir", confDir.toFile().getAbsolutePath(), String.format("-D%s=%s", removeKey, removeValue), "--removeKeyValue", String.format("%s=%s", removeKey, removeValue) }; List<String> list = FlinkConfigLoader.loadAndModifyConfiguration(args); if (standardYaml) { assertThat(list).containsExactly("test:", " key: " + TEST_CONFIG_VALUE); } else { assertThat(list).containsExactlyInAnyOrder(TEST_CONFIG_KEY + ": " + TEST_CONFIG_VALUE); } }
@Override public Map<Errors, Integer> errorCounts() { if (data.errorCode() != Errors.NONE.code()) // Minor optimization since the top-level error applies to all partitions return Collections.singletonMap(error(), data.partitionErrors().size() + 1); Map<Errors, Integer> errors = errorCounts(data.partitionErrors().stream().map(p -> Errors.forCode(p.errorCode()))); updateErrorCounts(errors, Errors.forCode(data.errorCode())); // top level error return errors; }
@Test public void testErrorCountsNoTopLevelError() { List<StopReplicaPartitionError> errors = new ArrayList<>(); errors.add(new StopReplicaPartitionError().setTopicName("foo").setPartitionIndex(0)); errors.add(new StopReplicaPartitionError().setTopicName("foo").setPartitionIndex(1) .setErrorCode(Errors.CLUSTER_AUTHORIZATION_FAILED.code())); StopReplicaResponse response = new StopReplicaResponse(new StopReplicaResponseData() .setErrorCode(Errors.NONE.code()) .setPartitionErrors(errors)); Map<Errors, Integer> errorCounts = response.errorCounts(); assertEquals(2, errorCounts.size()); assertEquals(2, errorCounts.get(Errors.NONE).intValue()); assertEquals(1, errorCounts.get(Errors.CLUSTER_AUTHORIZATION_FAILED).intValue()); }
static void createUploadDir( final Path uploadDir, final Logger log, final boolean initialCreation) throws IOException { if (!Files.exists(uploadDir)) { if (initialCreation) { log.info("Upload directory {} does not exist. ", uploadDir); } else { log.warn( "Upload directory {} has been deleted externally. " + "Previously uploaded files are no longer available.", uploadDir); } checkAndCreateUploadDir(uploadDir, log); } }
@Test void testCreateUploadDir(@TempDir File file) throws Exception { final Path testUploadDir = file.toPath().resolve("testUploadDir"); assertThat(Files.exists(testUploadDir)).isFalse(); RestServerEndpoint.createUploadDir(testUploadDir, NOPLogger.NOP_LOGGER, true); assertThat(Files.exists(testUploadDir)).isTrue(); }
public long getRetainedSizeInBytes() { // Under the hood, BitSet stores a long[] array of BitSet.size() bits return INSTANCE_SIZE + BITSET_INSTANCE_SIZE + SizeOf.sizeOfLongArray(bitSet.size() / Long.SIZE); }
@Test public static void testRetainedSize() { int instanceSizes = ClassLayout.parseClass(Bitmap.class).instanceSize() + ClassLayout.parseClass(BitSet.class).instanceSize(); // The underlying BitSet stores a long[] array of size length / 64, // even though toBytes() returns a truncated array of bytes. Bitmap bitmap = new Bitmap(1024); assertEquals(bitmap.getRetainedSizeInBytes(), instanceSizes + SizeOf.sizeOfLongArray(1024 / 64)); }
public static ScenarioBeanWrapper<?> navigateToObject(Object rootObject, List<String> steps) { return navigateToObject(rootObject, steps, true); }
@Test public void navigateToObjectFakeFieldTest() { Dispute dispute = new Dispute(); List<String> pathToProperty = List.of("fakeField"); String message = "Impossible to find field with name 'fakeField' in class " + Dispute.class.getCanonicalName(); assertThatThrownBy(() -> ScenarioBeanUtil.navigateToObject(dispute, pathToProperty, true)) .isInstanceOf(ScenarioException.class) .hasMessage(message); }
@JsonProperty public void setArchive(boolean archive) { this.archive = archive; }
@Test void includesCallerData() { FileAppenderFactory<ILoggingEvent> fileAppenderFactory = new FileAppenderFactory<>(); fileAppenderFactory.setArchive(false); assertThat(fileAppenderFactory.build(new LoggerContext(), "test", new DropwizardLayoutFactory(), new NullLevelFilterFactory<>(), new AsyncLoggingEventAppenderFactory())) .isInstanceOfSatisfying(AsyncAppender.class, asyncAppender -> assertThat(asyncAppender.isIncludeCallerData()).isFalse()); fileAppenderFactory.setIncludeCallerData(true); assertThat(fileAppenderFactory.build(new LoggerContext(), "test", new DropwizardLayoutFactory(), new NullLevelFilterFactory<>(), new AsyncLoggingEventAppenderFactory())) .isInstanceOfSatisfying(AsyncAppender.class, asyncAppender -> assertThat(asyncAppender.isIncludeCallerData()).isTrue()); }
public static Map<String, String> getStringMapNullableValues(String property, JsonNode node) { Preconditions.checkArgument(node.has(property), "Cannot parse missing map: %s", property); JsonNode pNode = node.get(property); Preconditions.checkArgument( pNode != null && !pNode.isNull() && pNode.isObject(), "Cannot parse string map from non-object value: %s: %s", property, pNode); Map<String, String> map = Maps.newHashMap(); Iterator<String> fields = pNode.fieldNames(); while (fields.hasNext()) { String field = fields.next(); map.put(field, getStringOrNull(field, pNode)); } return map; }
@Test public void getStringMapNullableValues() throws JsonProcessingException { assertThatThrownBy( () -> JsonUtil.getStringMapNullableValues("items", JsonUtil.mapper().readTree("{}"))) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Cannot parse missing map: items"); assertThatThrownBy( () -> JsonUtil.getStringMapNullableValues( "items", JsonUtil.mapper().readTree("{\"items\": null}"))) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Cannot parse string map from non-object value: items: null"); assertThatThrownBy( () -> JsonUtil.getStringMapNullableValues( "items", JsonUtil.mapper().readTree("{\"items\": {\"a\":\"23\", \"b\":45}}"))) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Cannot parse to a string value: b: 45"); Map<String, String> itemsWithNullableValues = Maps.newHashMap(); itemsWithNullableValues.put("a", null); itemsWithNullableValues.put("b", null); itemsWithNullableValues.put("c", "23"); assertThat( JsonUtil.getStringMapNullableValues( "items", JsonUtil.mapper() .readTree("{\"items\": {\"a\": null, \"b\": null, \"c\": \"23\"}}"))) .isEqualTo(itemsWithNullableValues); String json = JsonUtil.generate( gen -> { gen.writeStartObject(); JsonUtil.writeStringMap("items", itemsWithNullableValues, gen); gen.writeEndObject(); }, false); assertThat(JsonUtil.getStringMapNullableValues("items", JsonUtil.mapper().readTree(json))) .isEqualTo(itemsWithNullableValues); }
@SuppressWarnings("ConstantConditions") public void addAction(@NonNull Action a) { if (a == null) { throw new IllegalArgumentException("Action must be non-null"); } getActions().add(a); }
@Test public void addAction_null() { assertThrows(IllegalArgumentException.class, () -> thing.addAction(null)); }
public MultiMap<Value, T, List<T>> get(final KeyDefinition keyDefinition) { return tree.get(keyDefinition); }
@Test void testFindByUUID() throws Exception { assertThat(map.get(UUIDKey.UNIQUE_UUID).keySet()).containsExactlyInAnyOrder(toni.uuidKey.getSingleValue(), eder.uuidKey.getSingleValue(), michael.uuidKey.getSingleValue()); }
@Udf public String ucase( @UdfParameter(description = "The string to upper-case") final String input) { if (input == null) { return null; } return input.toUpperCase(); }
@Test public void shouldReturnEmptyForEmptyInput() { final String result = udf.ucase(""); assertThat(result, is("")); }
@Override public boolean matches(Job localJob, Job storageProviderJob) { if (storageProviderJob.getVersion() == localJob.getVersion() + 1 && localJob.hasState(PROCESSING) && !storageProviderJob.hasState(PROCESSING)) { return jobSteward.getThreadProcessingJob(localJob) == null; } return false; }
@Test void ifJobIsHavingConcurrentStateChangeAndStorageProviderJobHasNotAVersionOfPlus1ItWillNotMatch() { final Job localJob = aJobInProgress().withVersion(2).build(); final Job storageProviderJob = aCopyOf(localJob).withVersion(5).withState(new SucceededState(ofMillis(10), ofMillis(6))).build(); boolean matchesAllowedStateChange = allowedStateChange.matches(localJob, storageProviderJob); assertThat(matchesAllowedStateChange).isFalse(); }
static String describe(Throwable t) { if (t == null) { return null; } String typeDescription = t.getClass().getSimpleName(); String message = t.getMessage(); return typeDescription + (message != null ? ": '" + message + "'" : ""); }
@Test void exceptionWithNoMessageDescribedAsItsClassSimpleName() { assertThat( ExceptionUtils.describe(new IllegalArgumentException()), is("IllegalArgumentException")); }
public List<String> tokenize(String text) { List<String> tokens = new ArrayList<>(); Matcher regexMatcher = regexExpression.matcher(text); int lastIndexOfPrevMatch = 0; while (regexMatcher.find(lastIndexOfPrevMatch)) // this is where the magic happens: // the regexp is used to find a matching pattern for substitution { int beginIndexOfNextMatch = regexMatcher.start(); String prevToken = text.substring(lastIndexOfPrevMatch, beginIndexOfNextMatch); if (!prevToken.isEmpty()) { tokens.add(prevToken); } String currentMatch = regexMatcher.group(); tokens.add(currentMatch); lastIndexOfPrevMatch = regexMatcher.end(); if (lastIndexOfPrevMatch < text.length() && text.charAt(lastIndexOfPrevMatch) != '_') { // beause it is sometimes positioned after the "_", but it should be positioned // before the "_" --lastIndexOfPrevMatch; } } String tail = text.substring(lastIndexOfPrevMatch); if (!tail.isEmpty()) { tokens.add(tail); } return tokens; }
@Test void testTokenize_happyPath_4() { // given CompoundCharacterTokenizer tokenizer = new CompoundCharacterTokenizer( new HashSet<>(Arrays.asList(new String[] { "_67_112_96_", "_74_112_76_" }))); String text = "_94_67_112_96_112_91_103_"; // when List<String> tokens = tokenizer.tokenize(text); // then assertEquals(Arrays.asList("_94", "_67_112_96_", "_112_91_103_"), tokens); }
public boolean eval(ContentFile<?> file) { // TODO: detect the case where a column is missing from the file using file's max field id. return new MetricsEvalVisitor().eval(file); }
@Test public void testIntegerLtEq() { boolean shouldRead = new InclusiveMetricsEvaluator(SCHEMA, lessThanOrEqual("id", INT_MIN_VALUE - 25)).eval(FILE); assertThat(shouldRead).as("Should not read: id range below lower bound (5 < 30)").isFalse(); shouldRead = new InclusiveMetricsEvaluator(SCHEMA, lessThanOrEqual("id", INT_MIN_VALUE - 1)).eval(FILE); assertThat(shouldRead).as("Should not read: id range below lower bound (29 < 30)").isFalse(); shouldRead = new InclusiveMetricsEvaluator(SCHEMA, lessThanOrEqual("id", INT_MIN_VALUE)).eval(FILE); assertThat(shouldRead).as("Should read: one possible id").isTrue(); shouldRead = new InclusiveMetricsEvaluator(SCHEMA, lessThanOrEqual("id", INT_MAX_VALUE)).eval(FILE); assertThat(shouldRead).as("Should read: many possible ids").isTrue(); }
@Override public boolean autoCommitFailureClosesAllResultSets() { return false; }
@Test void assertAutoCommitFailureClosesAllResultSets() { assertFalse(metaData.autoCommitFailureClosesAllResultSets()); }
public long checkPhyPos(long timeStartMs, long maxOffset) { long minFirst = Long.MAX_VALUE; int firstSlotIndex = getSlotIndex(timeStartMs); for (int i = 0; i < slotsTotal * 2; i++) { int slotIndex = (firstSlotIndex + i) % (slotsTotal * 2); localBuffer.get().position(slotIndex * Slot.SIZE); if ((timeStartMs + i * precisionMs) / precisionMs != localBuffer.get().getLong()) { continue; } long first = localBuffer.get().getLong(); long last = localBuffer.get().getLong(); if (last > maxOffset) { if (first < minFirst) { minFirst = first; } } } return minFirst; }
@Test public void testCheckPhyPos() { long delayedTime = defaultDelay + precisionMs; timerWheel.putSlot(delayedTime, 1, 100, 1, 0); timerWheel.putSlot(delayedTime + 5 * precisionMs, 2, 200, 2, 0); timerWheel.putSlot(delayedTime + 10 * precisionMs, 3, 300, 3, 0); assertEquals(1, timerWheel.checkPhyPos(delayedTime, 50)); assertEquals(2, timerWheel.checkPhyPos(delayedTime, 100)); assertEquals(3, timerWheel.checkPhyPos(delayedTime, 200)); assertEquals(Long.MAX_VALUE, timerWheel.checkPhyPos(delayedTime, 300)); assertEquals(Long.MAX_VALUE, timerWheel.checkPhyPos(delayedTime, 400)); assertEquals(2, timerWheel.checkPhyPos(delayedTime + 5 * precisionMs, 50)); assertEquals(2, timerWheel.checkPhyPos(delayedTime + 5 * precisionMs, 100)); assertEquals(3, timerWheel.checkPhyPos(delayedTime + 5 * precisionMs, 200)); assertEquals(Long.MAX_VALUE, timerWheel.checkPhyPos(delayedTime + 5 * precisionMs, 300)); assertEquals(Long.MAX_VALUE, timerWheel.checkPhyPos(delayedTime + 5 * precisionMs, 400)); }
public static byte[] signMessage( final RawPrivateTransaction privateTransaction, final Credentials credentials) { final byte[] encodedTransaction = encode(privateTransaction); final Sign.SignatureData signatureData = Sign.signMessage(encodedTransaction, credentials.getEcKeyPair()); return encode(privateTransaction, signatureData); }
@Test public void testSignLegacyTransaction() { final String expected = "0xf8d4808203e8832dc6c094627306090abab3a6e1400e9345bc60c78a8bef578080820fe8a0e0b547d71d7a23d52382288b3a2a5a1610e0b504c404cc5009d7ada97d9015b2a076e997a83856d876fa2397b74510890eea3b73ffeda33daa4188120dac42d62fa0035695b4cc4b0941e60551d7a19cf30603db5bfc23e5ac43a56f57f25f75486af842a0035695b4cc4b0941e60551d7a19cf30603db5bfc23e5ac43a56f57f25f75486aa02a8d9b56a0fe9cd94d60be4413bcb721d3a7be27ed8e28b3a6346df874ee141b8a72657374726963746564"; final RawPrivateTransaction privateTransactionCreation = RawPrivateTransaction.createTransaction( BigInteger.ZERO, BigInteger.valueOf(1000), BigInteger.valueOf(3000000), "0x627306090abab3a6e1400e9345bc60c78a8bef57", "0x", MOCK_ENCLAVE_KEY, MOCK_PRIVATE_FOR, RESTRICTED); final long chainId = 2018; final String privateKey = "8f2a55949038a9610f50fb23b5883af3b4ecb3c3bb792cbcefbd1542c692be63"; final Credentials credentials = Credentials.create(privateKey); final String privateRawTransaction = Numeric.toHexString( PrivateTransactionEncoder.signMessage( privateTransactionCreation, chainId, credentials)); assertEquals(expected, privateRawTransaction); }
public static int scanForGap( final UnsafeBuffer termBuffer, final int termId, final int termOffset, final int limitOffset, final GapHandler handler) { int offset = termOffset; do { final int frameLength = frameLengthVolatile(termBuffer, offset); if (frameLength <= 0) { break; } offset += align(frameLength, FRAME_ALIGNMENT); } while (offset < limitOffset); final int gapBeginOffset = offset; if (offset < limitOffset) { final int limit = limitOffset - ALIGNED_HEADER_LENGTH; while (offset < limit) { offset += FRAME_ALIGNMENT; if (0 != termBuffer.getIntVolatile(offset)) { offset -= ALIGNED_HEADER_LENGTH; break; } } final int gapLength = (offset - gapBeginOffset) + ALIGNED_HEADER_LENGTH; handler.onGap(termId, gapBeginOffset, gapLength); } return gapBeginOffset; }
@Test void shouldReportSingleGapWhenBufferNotFull() { final int tail = align(HEADER_LENGTH, FRAME_ALIGNMENT); final int highWaterMark = FRAME_ALIGNMENT * 3; when(termBuffer.getIntVolatile(tail - align(HEADER_LENGTH, FRAME_ALIGNMENT))).thenReturn(HEADER_LENGTH); when(termBuffer.getIntVolatile(tail)).thenReturn(0); when(termBuffer.getIntVolatile(highWaterMark - align(HEADER_LENGTH, FRAME_ALIGNMENT))) .thenReturn(HEADER_LENGTH); assertEquals(tail, TermGapScanner.scanForGap(termBuffer, TERM_ID, tail, highWaterMark, gapHandler)); verify(gapHandler).onGap(TERM_ID, tail, align(HEADER_LENGTH, FRAME_ALIGNMENT)); }
public PartialConfig parseFile(File file) { FileInputStream inputStream = null; try { inputStream = new FileInputStream(file); return loader.fromXmlPartial(inputStream, PartialConfig.class); } catch (JDOMParseException jdomex) { throw new RuntimeException("Syntax error in xml file: " + file.getName(), jdomex); } catch (IOException ioex) { throw new RuntimeException("IO error when trying to parse xml file: " + file.getName(), ioex); } catch (Exception ex) { throw new RuntimeException("Failed to parse xml file: " + file.getName(), ex); } finally { if (inputStream != null) try { inputStream.close(); } catch (IOException e) { LOGGER.error("Failed to close file: {}", file, e); } } }
@Test public void shouldParseFileWithOnePipelineGroup() throws Exception { GoConfigMother mother = new GoConfigMother(); PipelineConfigs group1 = mother.cruiseConfigWithOnePipelineGroup().getGroups().get(0); File file = helper.addFileWithPipelineGroup("group1.gocd.xml", group1); PartialConfig part = xmlPartialProvider.parseFile(file); PipelineConfigs groupRead = part.getGroups().get(0); assertThat(groupRead,is(group1)); assertThat(groupRead.size(),is(group1.size())); assertThat(groupRead.get(0),is(group1.get(0))); }
@VisibleForTesting AmazonS3 getAmazonS3Client() { return this.amazonS3.get(); }
@Test public void testGetPathStyleAccessEnabledWithS3Options() throws URISyntaxException { S3FileSystem s3FileSystem = new S3FileSystem(s3OptionsWithCustomEndpointAndPathStyleAccessEnabled()); URL s3Url = s3FileSystem.getAmazonS3Client().getUrl("bucket", "file"); assertEquals("https://s3.custom.dns/bucket/file", s3Url.toURI().toString()); }
@Override public SecretsPluginInfo pluginInfoFor(GoPluginDescriptor descriptor) { String pluginId = descriptor.id(); return new SecretsPluginInfo(descriptor, securityConfigSettings(pluginId), image(pluginId)); }
@Test public void shouldBuildPluginInfoWithImage() { GoPluginDescriptor descriptor = GoPluginDescriptor.builder().id("plugin1").build(); Image icon = new Image("content_type", "data", "hash"); when(extension.getIcon(descriptor.id())).thenReturn(icon); SecretsPluginInfo pluginInfo = new SecretsPluginInfoBuilder(extension).pluginInfoFor(descriptor); assertThat(pluginInfo.getImage(), is(icon)); }
public static boolean equals(ByteBuf a, int aStartIndex, ByteBuf b, int bStartIndex, int length) { checkNotNull(a, "a"); checkNotNull(b, "b"); // All indexes and lengths must be non-negative checkPositiveOrZero(aStartIndex, "aStartIndex"); checkPositiveOrZero(bStartIndex, "bStartIndex"); checkPositiveOrZero(length, "length"); if (a.writerIndex() - length < aStartIndex || b.writerIndex() - length < bStartIndex) { return false; } final int longCount = length >>> 3; final int byteCount = length & 7; if (a.order() == b.order()) { for (int i = longCount; i > 0; i --) { if (a.getLong(aStartIndex) != b.getLong(bStartIndex)) { return false; } aStartIndex += 8; bStartIndex += 8; } } else { for (int i = longCount; i > 0; i --) { if (a.getLong(aStartIndex) != swapLong(b.getLong(bStartIndex))) { return false; } aStartIndex += 8; bStartIndex += 8; } } for (int i = byteCount; i > 0; i --) { if (a.getByte(aStartIndex) != b.getByte(bStartIndex)) { return false; } aStartIndex ++; bStartIndex ++; } return true; }
@Test public void notEqualsBufferSubsections() { byte[] b1 = new byte[50]; byte[] b2 = new byte[256]; Random rand = new Random(); rand.nextBytes(b1); rand.nextBytes(b2); final int iB1 = b1.length / 2; final int iB2 = iB1 + b1.length; final int length = b1.length - iB1; System.arraycopy(b1, iB1, b2, iB2, length); // Randomly pick an index in the range that will be compared and make the value at that index differ between // the 2 arrays. int diffIndex = random(rand, iB1, iB1 + length - 1); ++b1[diffIndex]; assertFalse(ByteBufUtil.equals(Unpooled.wrappedBuffer(b1), iB1, Unpooled.wrappedBuffer(b2), iB2, length)); }
public static boolean shouldIgnoreStatistics(String createdBy, PrimitiveTypeName columnType) { if (columnType != PrimitiveTypeName.BINARY && columnType != PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY) { // the bug only applies to binary columns return false; } if (Strings.isNullOrEmpty(createdBy)) { // created_by is not populated, which could have been caused by // parquet-mr during the same time as PARQUET-251, see PARQUET-297 warnOnce("Ignoring statistics because created_by is null or empty! See PARQUET-251 and PARQUET-297"); return true; } try { ParsedVersion version = VersionParser.parse(createdBy); if (!"parquet-mr".equals(version.application)) { // assume other applications don't have this bug return false; } if (Strings.isNullOrEmpty(version.version)) { warnOnce("Ignoring statistics because created_by did not contain a semver (see PARQUET-251): " + createdBy); return true; } SemanticVersion semver = SemanticVersion.parse(version.version); if (semver.compareTo(PARQUET_251_FIXED_VERSION) < 0 && !(semver.compareTo(CDH_5_PARQUET_251_FIXED_START) >= 0 && semver.compareTo(CDH_5_PARQUET_251_FIXED_END) < 0)) { warnOnce("Ignoring statistics because this file was created prior to " + PARQUET_251_FIXED_VERSION + ", see PARQUET-251"); return true; } // this file was created after the fix return false; } catch (RuntimeException | SemanticVersionParseException | VersionParseException e) { // couldn't parse the created_by field, log what went wrong, don't trust the stats, // but don't make this fatal. warnParseErrorOnce(createdBy, e); return true; } }
@Test public void testDistributionCorruptStatistics() { assertTrue(CorruptStatistics.shouldIgnoreStatistics( "parquet-mr version 1.5.0-cdh5.4.999 (build abcd)", PrimitiveTypeName.BINARY)); assertFalse(CorruptStatistics.shouldIgnoreStatistics( "parquet-mr version 1.5.0-cdh5.5.0-SNAPSHOT (build 956ed6c14c611b4c4eaaa1d6e5b9a9c6d4dfa336)", PrimitiveTypeName.BINARY)); assertFalse(CorruptStatistics.shouldIgnoreStatistics( "parquet-mr version 1.5.0-cdh5.5.0 (build abcd)", PrimitiveTypeName.BINARY)); assertFalse(CorruptStatistics.shouldIgnoreStatistics( "parquet-mr version 1.5.0-cdh5.5.1 (build abcd)", PrimitiveTypeName.BINARY)); assertFalse(CorruptStatistics.shouldIgnoreStatistics( "parquet-mr version 1.5.0-cdh5.6.0 (build abcd)", PrimitiveTypeName.BINARY)); assertTrue(CorruptStatistics.shouldIgnoreStatistics( "parquet-mr version 1.4.10 (build abcd)", PrimitiveTypeName.BINARY)); assertTrue(CorruptStatistics.shouldIgnoreStatistics( "parquet-mr version 1.5.0 (build abcd)", PrimitiveTypeName.BINARY)); assertTrue(CorruptStatistics.shouldIgnoreStatistics( "parquet-mr version 1.5.1 (build abcd)", PrimitiveTypeName.BINARY)); assertTrue(CorruptStatistics.shouldIgnoreStatistics( "parquet-mr version 1.6.0 (build abcd)", PrimitiveTypeName.BINARY)); assertTrue(CorruptStatistics.shouldIgnoreStatistics( "parquet-mr version 1.7.0 (build abcd)", PrimitiveTypeName.BINARY)); }
public GoConfigHolder loadConfigHolder(final String content, Callback callback) throws Exception { CruiseConfig configForEdit; CruiseConfig config; LOGGER.debug("[Config Save] Loading config holder"); configForEdit = deserializeConfig(content); if (callback != null) callback.call(configForEdit); config = preprocessAndValidate(configForEdit); return new GoConfigHolder(config, configForEdit); }
@Test void shouldOnlySupportUniquePipelineTemplates() { String content = configWithTemplates( """ <templates> <pipeline name='erbshe'> <stage name='stage1'> <jobs> <job name='job1'><tasks><exec command='echo'><runif status='passed' /></exec></tasks></job> </jobs> </stage> </pipeline> <pipeline name='erbshe'> <stage name='stage1'> <jobs> <job name='job1'><tasks><exec command='echo'><runif status='passed' /></exec></tasks></job> </jobs> </stage> </pipeline> </templates>"""); assertThatThrownBy(() -> xmlLoader.loadConfigHolder(content)) .as("should not allow same template names") .hasMessageContaining("Duplicate unique value [erbshe] declared for identity constraint"); }
public static String buildGlueExpression(Map<Column, Domain> partitionPredicates) { List<String> perColumnExpressions = new ArrayList<>(); int expressionLength = 0; for (Map.Entry<Column, Domain> partitionPredicate : partitionPredicates.entrySet()) { String columnName = partitionPredicate.getKey().getName(); if (JSQL_PARSER_RESERVED_KEYWORDS.contains(columnName.toUpperCase(ENGLISH))) { // The column name is a reserved keyword in the grammar of the SQL parser used internally by Glue API continue; } Domain domain = partitionPredicate.getValue(); if (domain != null && !domain.isAll()) { Optional<String> columnExpression = buildGlueExpressionForSingleDomain(columnName, domain); if (columnExpression.isPresent()) { int newExpressionLength = expressionLength + columnExpression.get().length(); if (expressionLength > 0) { newExpressionLength += CONJUNCT_SEPARATOR.length(); } if (newExpressionLength > GLUE_EXPRESSION_CHAR_LIMIT) { continue; } perColumnExpressions.add((columnExpression.get())); expressionLength = newExpressionLength; } } } return Joiner.on(CONJUNCT_SEPARATOR).join(perColumnExpressions); }
@Test public void testBuildGlueExpressionTupleDomainEqualAndRangeString() { Map<Column, Domain> predicates = new PartitionFilterBuilder(HIVE_TYPE_TRANSLATOR) .addStringValues("col1", "2020-01-01", "2020-01-31") .addRanges("col1", Range.range(VarcharType.VARCHAR, utf8Slice("2020-03-01"), true, utf8Slice("2020-03-31"), true)) .build(); String expression = buildGlueExpression(predicates); assertEquals(expression, "((col1 >= '2020-03-01' AND col1 <= '2020-03-31') OR (col1 in ('2020-01-01', '2020-01-31')))"); }
@Udf public String chr(@UdfParameter( description = "Decimal codepoint") final Integer decimalCode) { if (decimalCode == null) { return null; } if (!Character.isValidCodePoint(decimalCode)) { return null; } final char[] resultChars = Character.toChars(decimalCode); return String.valueOf(resultChars); }
@Test public void shouldConvertFromUTF16StringWithSlash() { final String result = udf.chr("\\u004b"); assertThat(result, is("K")); }
public final void containsKey(@Nullable Object key) { check("keySet()").that(checkNotNull(actual).keySet()).contains(key); }
@Test public void containsKeyFailure() { ImmutableMap<String, String> actual = ImmutableMap.of("kurt", "kluever"); expectFailureWhenTestingThat(actual).containsKey("greg"); assertFailureKeys("value of", "expected to contain", "but was", "map was"); assertFailureValue("value of", "map.keySet()"); assertFailureValue("expected to contain", "greg"); assertFailureValue("but was", "[kurt]"); }
@Override public <V> MultiLabel generateOutput(V label) { if (label instanceof Collection) { Collection<?> c = (Collection<?>) label; List<Pair<String,Boolean>> dimensions = new ArrayList<>(); for (Object o : c) { dimensions.add(MultiLabel.parseElement(o.toString())); } return MultiLabel.createFromPairList(dimensions); } return MultiLabel.parseString(label.toString()); }
@Test public void testGenerateOutput_emptySet() { MultiLabelFactory factory = new MultiLabelFactory(); Set<String> labels = new HashSet<>(); MultiLabel output = factory.generateOutput(labels); assertEquals(0, output.getLabelSet().size()); assertEquals("", output.getLabelString()); }
public void init() throws ServerException { if (status != Status.UNDEF) { throw new IllegalStateException("Server already initialized"); } status = Status.BOOTING; verifyDir(homeDir); verifyDir(tempDir); Properties serverInfo = new Properties(); try { InputStream is = getResource(name + ".properties"); serverInfo.load(is); is.close(); } catch (IOException ex) { throw new RuntimeException("Could not load server information file: " + name + ".properties"); } initLog(); log.info("++++++++++++++++++++++++++++++++++++++++++++++++++++++"); log.info("Server [{}] starting", name); log.info(" Built information:"); log.info(" Version : {}", serverInfo.getProperty(name + ".version", "undef")); log.info(" Source Repository : {}", serverInfo.getProperty(name + ".source.repository", "undef")); log.info(" Source Revision : {}", serverInfo.getProperty(name + ".source.revision", "undef")); log.info(" Built by : {}", serverInfo.getProperty(name + ".build.username", "undef")); log.info(" Built timestamp : {}", serverInfo.getProperty(name + ".build.timestamp", "undef")); log.info(" Runtime information:"); log.info(" Home dir: {}", homeDir); log.info(" Config dir: {}", (config == null) ? configDir : "-"); log.info(" Log dir: {}", logDir); log.info(" Temp dir: {}", tempDir); initConfig(); log.debug("Loading services"); List<Service> list = loadServices(); try { log.debug("Initializing services"); initServices(list); log.info("Services initialized"); } catch (ServerException ex) { log.error("Services initialization failure, destroying initialized services"); destroyServices(); throw ex; } Status status = Status.valueOf(getConfig().get(getPrefixedName(CONF_STARTUP_STATUS), Status.NORMAL.toString())); setStatus(status); log.info("Server [{}] started!, status [{}]", name, status); }
@Test @TestException(exception = ServerException.class, msgRegExp = "S08.*") @TestDir public void invalidSservice() throws Exception { String dir = TestDirHelper.getTestDir().getAbsolutePath(); Configuration conf = new Configuration(false); conf.set("server.services", "foo"); Server server = new Server("server", dir, dir, dir, dir, conf); server.init(); }
@Override public ExecuteContext doThrow(ExecuteContext context) { // If the request to the registry fails, the registry has lost contact RegisterContext.INSTANCE.compareAndSet(true, false); return context; }
@Test public void doThrow() throws NoSuchMethodException { RegisterContext.INSTANCE.setAvailable(true); interceptor.doThrow(buildContext()); Assert.assertFalse(RegisterContext.INSTANCE.isAvailable()); }
public void bind(SocketAddress localAddress) { bind(localAddress, Integer.MAX_VALUE); }
@Test public void test_bind_whenBacklogNegative() { Reactor reactor = newReactor(); try (AsyncServerSocket socket = newAsyncServerSocket(reactor)) { SocketAddress localAddress = createLoopBackAddressWithEphemeralPort(); assertThrows(IllegalArgumentException.class, () -> socket.bind(localAddress, -1)); } }
public static boolean hasPrefix(String path, String prefix) throws InvalidPathException { // normalize path and prefix(e.g. "/a/b/../c" -> "/a/c", "/a/b/" --> "/a/b") path = cleanPath(path); prefix = cleanPath(prefix); if (prefix.equals("/")) { return true; } if (!path.startsWith(prefix)) { return false; } return path.length() == prefix.length() // path == prefix // Include cases like `prefix=/a/b/, path=/a/b/c/` || prefix.endsWith("/") // Exclude cases like `prefix=/a/b/c, path=/a/b/ccc` || path.charAt(prefix.length()) == '/'; }
@Test public void hasPrefix() throws InvalidPathException { assertTrue(PathUtils.hasPrefix("/", "/")); assertTrue(PathUtils.hasPrefix("/a/b/c", "/")); assertTrue(PathUtils.hasPrefix("/a", "/a")); assertTrue(PathUtils.hasPrefix("/a/b/c/", "/a/b/c")); assertTrue(PathUtils.hasPrefix("/a", "/a/")); assertTrue(PathUtils.hasPrefix("/a/b/c", "/a")); assertTrue(PathUtils.hasPrefix("/a/b/c", "/a/b")); assertTrue(PathUtils.hasPrefix("/a/b/c", "/a/b/c")); assertTrue(PathUtils.hasPrefix("/a/b/c/d/e", "/a/b/")); assertTrue(PathUtils.hasPrefix("/a/b/./c/../d", "/a/../a/b/d")); assertFalse(PathUtils.hasPrefix("/a/b/../c", "/a/b")); assertFalse(PathUtils.hasPrefix("/", "/a")); assertFalse(PathUtils.hasPrefix("/", "/a/b/c")); assertFalse(PathUtils.hasPrefix("/a", "/a/b/c")); assertFalse(PathUtils.hasPrefix("/a/b", "/a/b/c")); assertFalse(PathUtils.hasPrefix("/a/b/c", "/aa")); assertFalse(PathUtils.hasPrefix("/a/b/c", "/a/bb")); assertFalse(PathUtils.hasPrefix("/a/b/c", "/a/b/cc")); assertFalse(PathUtils.hasPrefix("/aa/b/c", "/a")); assertFalse(PathUtils.hasPrefix("/a/bb/c", "/a/b")); assertFalse(PathUtils.hasPrefix("/a/b/cc", "/a/b/c")); }
public Map<String, BigDecimal> getConversionsForBase(String base) throws FixerException { try { URI uri = URI.create("https://data.fixer.io/api/latest?access_key=" + apiKey + "&base=" + base); HttpResponse<String> response = client.send(HttpRequest.newBuilder() .GET() .uri(uri) .build(), HttpResponse.BodyHandlers.ofString()); if (response.statusCode() < 200 || response.statusCode() >= 300) { throw new FixerException("Bad response: " + response.statusCode() + " " + response.toString()); } FixerResponse parsedResponse = SystemMapper.jsonMapper().readValue(response.body(), FixerResponse.class); if (parsedResponse.success) return parsedResponse.rates; else throw new FixerException("Got failed response!"); } catch (IOException | InterruptedException e) { throw new FixerException(e); } }
@Test public void testGetConversionsForBase() throws IOException, InterruptedException { HttpResponse<String> httpResponse = mock(HttpResponse.class); when(httpResponse.statusCode()).thenReturn(200); when(httpResponse.body()).thenReturn(jsonFixture("fixtures/fixer.res.json")); HttpClient httpClient = mock(HttpClient.class); when(httpClient.send(any(HttpRequest.class), any(BodyHandler.class))).thenReturn(httpResponse); FixerClient fixerClient = new FixerClient(httpClient, "foo"); Map<String, BigDecimal> conversions = fixerClient.getConversionsForBase("EUR"); assertThat(conversions.get("CAD")).isEqualTo(new BigDecimal("1.560132")); }
static String createValidParamName(String name, int idx) { if (name == null || name.equals("")) { return "param" + idx; } else { if (!SourceVersion.isName(name)) { name = "_" + name; } return name; } }
@Test public void testCreateValidParamName() { assertEquals(createValidParamName("param", 1), ("param")); assertEquals(createValidParamName("", 1), ("param1")); assertEquals(createValidParamName("class", 1), ("_class")); }
@Override public DynamicTableSource createDynamicTableSource(Context context) { Configuration conf = FlinkOptions.fromMap(context.getCatalogTable().getOptions()); StoragePath path = new StoragePath(conf.getOptional(FlinkOptions.PATH).orElseThrow(() -> new ValidationException("Option [path] should not be empty."))); setupTableOptions(conf.getString(FlinkOptions.PATH), conf); ResolvedSchema schema = context.getCatalogTable().getResolvedSchema(); setupConfOptions(conf, context.getObjectIdentifier(), context.getCatalogTable(), schema); return new HoodieTableSource( SerializableSchema.create(schema), path, context.getCatalogTable().getPartitionKeys(), conf.getString(FlinkOptions.PARTITION_DEFAULT_NAME), conf); }
@Test void testSetupHiveOptionsForSource() { // definition with simple primary key and partition path ResolvedSchema schema1 = SchemaBuilder.instance() .field("f0", DataTypes.INT().notNull()) .field("f1", DataTypes.VARCHAR(20)) .field("f2", DataTypes.TIMESTAMP(3)) .field("ts", DataTypes.TIMESTAMP(3)) .primaryKey("f0") .build(); final MockContext sourceContext1 = MockContext.getInstance(this.conf, schema1, "f2"); final HoodieTableSource tableSource1 = (HoodieTableSource) new HoodieTableFactory().createDynamicTableSource(sourceContext1); final Configuration conf1 = tableSource1.getConf(); assertThat(conf1.getString(FlinkOptions.HIVE_SYNC_DB), is("db1")); assertThat(conf1.getString(FlinkOptions.HIVE_SYNC_TABLE), is("t1")); assertThat(conf1.getString(FlinkOptions.HIVE_SYNC_PARTITION_EXTRACTOR_CLASS_NAME), is(MultiPartKeysValueExtractor.class.getName())); // set up hive style partitioning is true. this.conf.setString(FlinkOptions.HIVE_SYNC_DB, "db2"); this.conf.setString(FlinkOptions.HIVE_SYNC_TABLE, "t2"); this.conf.setBoolean(FlinkOptions.HIVE_STYLE_PARTITIONING, true); final MockContext sourceContext2 = MockContext.getInstance(this.conf, schema1, "f2"); final HoodieTableSource tableSource2 = (HoodieTableSource) new HoodieTableFactory().createDynamicTableSource(sourceContext2); final Configuration conf2 = tableSource2.getConf(); assertThat(conf2.getString(FlinkOptions.HIVE_SYNC_DB), is("db2")); assertThat(conf2.getString(FlinkOptions.HIVE_SYNC_TABLE), is("t2")); assertThat(conf2.getString(FlinkOptions.HIVE_SYNC_PARTITION_EXTRACTOR_CLASS_NAME), is(MultiPartKeysValueExtractor.class.getName())); }
@Override public UrlPattern doGetPattern() { return UrlPattern.create(CALLBACK_PATH + "saml"); }
@Test public void do_get_pattern() { assertThat(underTest.doGetPattern().matches("/oauth2/callback/saml")).isTrue(); assertThat(underTest.doGetPattern().matches("/oauth2/callback/")).isFalse(); assertThat(underTest.doGetPattern().matches("/oauth2/callback/test")).isFalse(); assertThat(underTest.doGetPattern().matches("/oauth2/")).isFalse(); }
public static Object newInstance(String name) { try { return forName(name).getDeclaredConstructor().newInstance(); } catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { throw new IllegalStateException(e.getMessage(), e); } }
@Test void testNewInstance0() { Assertions.assertThrows( IllegalStateException.class, () -> ClassUtils.newInstance(PrivateHelloServiceImpl.class.getName())); }
public static CsvRecordConverter<Map<String, String>> mapConverter() { return MapCsvRecordConverter.SINGLETON; }
@Test void shouldConvertAsMap() { Map<String, String> map = CsvRecordConverters.mapConverter().convertRecord(record); assertNotNull(map); assertEquals(3, map.size()); assertEquals("1", map.get("A")); assertEquals("2", map.get("B")); assertEquals("3", map.get("C")); }
@VisibleForTesting public void validateNoticeExists(Long id) { if (id == null) { return; } NoticeDO notice = noticeMapper.selectById(id); if (notice == null) { throw exception(NOTICE_NOT_FOUND); } }
@Test public void testValidateNoticeExists_noExists() { assertServiceException(() -> noticeService.validateNoticeExists(randomLongId()), NOTICE_NOT_FOUND); }
public ClientCall<?, ?> call() { return call; }
@Test void call() { assertThat(request.call()).isSameAs(call); }
@Udf(description = "Returns the sine of an INT value") public Double sin( @UdfParameter( value = "value", description = "The value in radians to get the sine of." ) final Integer value ) { return sin(value == null ? null : value.doubleValue()); }
@Test public void shouldHandleNegative() { assertThat(udf.sin(-0.43), closeTo(-0.41687080242921076, 0.000000000000001)); assertThat(udf.sin(-Math.PI), closeTo(0, 0.000000000000001)); assertThat(udf.sin(-2 * Math.PI), closeTo(0, 0.000000000000001)); assertThat(udf.sin(-6), closeTo(0.27941549819892586, 0.000000000000001)); assertThat(udf.sin(-6L), closeTo(0.27941549819892586, 0.000000000000001)); }
@Override public HttpResponseOutputStream<StorageObject> write(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { final DelayedHttpEntityCallable<StorageObject> command = new DelayedHttpEntityCallable<StorageObject>(file) { @Override public StorageObject call(final HttpEntity entity) throws BackgroundException { try { // POST /upload/storage/v1/b/myBucket/o final StringBuilder uri = new StringBuilder(String.format("%supload/storage/v1/b/%s/o?uploadType=resumable", session.getClient().getRootUrl(), containerService.getContainer(file).getName())); if(containerService.getContainer(file).attributes().getCustom().containsKey(GoogleStorageAttributesFinderFeature.KEY_REQUESTER_PAYS)) { uri.append(String.format("&userProject=%s", session.getHost().getCredentials().getUsername())); } if(!Acl.EMPTY.equals(status.getAcl())) { if(status.getAcl().isCanned()) { uri.append("&predefinedAcl="); if(Acl.CANNED_PRIVATE.equals(status.getAcl())) { uri.append("private"); } else if(Acl.CANNED_PUBLIC_READ.equals(status.getAcl())) { uri.append("publicRead"); } else if(Acl.CANNED_PUBLIC_READ_WRITE.equals(status.getAcl())) { uri.append("publicReadWrite"); } else if(Acl.CANNED_AUTHENTICATED_READ.equals(status.getAcl())) { uri.append("authenticatedRead"); } else if(Acl.CANNED_BUCKET_OWNER_FULLCONTROL.equals(status.getAcl())) { uri.append("bucketOwnerFullControl"); } else if(Acl.CANNED_BUCKET_OWNER_READ.equals(status.getAcl())) { uri.append("bucketOwnerRead"); } // Reset in status to skip setting ACL in upload filter already applied as canned ACL status.setAcl(Acl.EMPTY); } } final HttpEntityEnclosingRequestBase request = new HttpPost(uri.toString()); final StringBuilder metadata = new StringBuilder(); metadata.append(String.format("{\"name\": \"%s\"", containerService.getKey(file))); metadata.append(",\"metadata\": {"); for(Iterator<Map.Entry<String, String>> iter = status.getMetadata().entrySet().iterator(); iter.hasNext(); ) { final Map.Entry<String, String> item = iter.next(); metadata.append(String.format("\"%s\": \"%s\"", item.getKey(), item.getValue())); if(iter.hasNext()) { metadata.append(","); } } metadata.append("}"); if(StringUtils.isNotBlank(status.getMime())) { metadata.append(String.format(", \"contentType\": \"%s\"", status.getMime())); } if(StringUtils.isNotBlank(status.getStorageClass())) { metadata.append(String.format(", \"storageClass\": \"%s\"", status.getStorageClass())); } if(null != status.getModified()) { metadata.append(String.format(", \"customTime\": \"%s\"", new ISO8601DateFormatter().format(status.getModified(), TimeZone.getTimeZone("UTC")))); } metadata.append("}"); request.setEntity(new StringEntity(metadata.toString(), ContentType.create("application/json", StandardCharsets.UTF_8.name()))); if(StringUtils.isNotBlank(status.getMime())) { // Set to the media MIME type of the upload data to be transferred in subsequent requests. request.addHeader("X-Upload-Content-Type", status.getMime()); } request.addHeader(HTTP.CONTENT_TYPE, MEDIA_TYPE); final HttpClient client = session.getHttpClient(); final HttpResponse response = client.execute(request); try { switch(response.getStatusLine().getStatusCode()) { case HttpStatus.SC_OK: break; default: throw new DefaultHttpResponseExceptionMappingService().map("Upload {0} failed", new HttpResponseException(response.getStatusLine().getStatusCode(), GoogleStorageExceptionMappingService.parse(response)), file); } } finally { EntityUtils.consume(response.getEntity()); } if(response.containsHeader(HttpHeaders.LOCATION)) { final String putTarget = response.getFirstHeader(HttpHeaders.LOCATION).getValue(); // Upload the file final HttpPut put = new HttpPut(putTarget); put.setEntity(entity); final HttpResponse putResponse = client.execute(put); try { switch(putResponse.getStatusLine().getStatusCode()) { case HttpStatus.SC_OK: case HttpStatus.SC_CREATED: return session.getClient().getObjectParser().parseAndClose(new InputStreamReader( putResponse.getEntity().getContent(), StandardCharsets.UTF_8), StorageObject.class); default: throw new DefaultHttpResponseExceptionMappingService().map("Upload {0} failed", new HttpResponseException(putResponse.getStatusLine().getStatusCode(), GoogleStorageExceptionMappingService.parse(putResponse)), file); } } finally { EntityUtils.consume(putResponse.getEntity()); } } else { throw new DefaultHttpResponseExceptionMappingService().map("Upload {0} failed", new HttpResponseException(response.getStatusLine().getStatusCode(), GoogleStorageExceptionMappingService.parse(response)), file); } } catch(IOException e) { throw new GoogleStorageExceptionMappingService().map("Upload {0} failed", e, file); } } @Override public long getContentLength() { return status.getLength(); } }; return this.write(file, status, command); }
@Test(expected = InteroperabilityException.class) public void testWriteInvalidStorageClass() throws Exception { final Path container = new Path("cyberduck-test-eu", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path test = new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); final TransferStatus status = new TransferStatus(); status.setStorageClass("invalid"); try { new GoogleStorageWriteFeature(session).write(test, status, new DisabledConnectionCallback()).close(); fail(); } catch(BackgroundException e) { assertEquals("Invalid argument. Please contact your web hosting service provider for assistance.", e.getDetail()); throw e; } }
public static TypeBuilder<Schema> builder() { return new TypeBuilder<>(new SchemaCompletion(), new NameContext()); }
@Test void testInt() { Schema.Type type = Schema.Type.INT; Schema simple = SchemaBuilder.builder().intType(); Schema expected = primitive(type, simple); Schema built1 = SchemaBuilder.builder().intBuilder().prop("p", "v").endInt(); assertEquals(expected, built1); }
public static Map<String, Map<String, String>> convertRegister(Map<String, Map<String, String>> register) { Map<String, Map<String, String>> newRegister = new HashMap<>(); for (Map.Entry<String, Map<String, String>> entry : register.entrySet()) { String serviceName = entry.getKey(); Map<String, String> serviceUrls = entry.getValue(); if (StringUtils.isNotContains(serviceName, ':') && StringUtils.isNotContains(serviceName, '/')) { for (Map.Entry<String, String> entry2 : serviceUrls.entrySet()) { String serviceUrl = entry2.getKey(); String serviceQuery = entry2.getValue(); Map<String, String> params = StringUtils.parseQueryString(serviceQuery); String group = params.get(GROUP_KEY); String version = params.get(VERSION_KEY); // params.remove("group"); // params.remove("version"); String name = serviceName; if (StringUtils.isNotEmpty(group)) { name = group + "/" + name; } if (StringUtils.isNotEmpty(version)) { name = name + ":" + version; } Map<String, String> newUrls = newRegister.computeIfAbsent(name, k -> new HashMap<>()); newUrls.put(serviceUrl, StringUtils.toQueryString(params)); } } else { newRegister.put(serviceName, serviceUrls); } } return newRegister; }
@Test void testConvertRegister2() { String key = "dubbo.test.api.HelloService"; Map<String, Map<String, String>> register = new HashMap<String, Map<String, String>>(); Map<String, String> service = new HashMap<String, String>(); service.put("dubbo://127.0.0.1:20880/com.xxx.XxxService", "version=1.0.0&group=test&dubbo.version=2.0.0"); register.put(key, service); Map<String, Map<String, String>> newRegister = UrlUtils.convertRegister(register); Map<String, String> newService = new HashMap<String, String>(); newService.put("dubbo://127.0.0.1:20880/com.xxx.XxxService", "dubbo.version=2.0.0&group=test&version=1.0.0"); assertEquals(newService, newRegister.get("test/dubbo.test.api.HelloService:1.0.0")); }
public static DataflowRunner fromOptions(PipelineOptions options) { DataflowPipelineOptions dataflowOptions = PipelineOptionsValidator.validate(DataflowPipelineOptions.class, options); ArrayList<String> missing = new ArrayList<>(); if (dataflowOptions.getAppName() == null) { missing.add("appName"); } if (Strings.isNullOrEmpty(dataflowOptions.getRegion()) && isServiceEndpoint(dataflowOptions.getDataflowEndpoint())) { missing.add("region"); } if (missing.size() > 0) { throw new IllegalArgumentException( "Missing required pipeline options: " + Joiner.on(',').join(missing)); } validateWorkerSettings( PipelineOptionsValidator.validate(DataflowPipelineWorkerPoolOptions.class, options)); PathValidator validator = dataflowOptions.getPathValidator(); String gcpTempLocation; try { gcpTempLocation = dataflowOptions.getGcpTempLocation(); } catch (Exception e) { throw new IllegalArgumentException( "DataflowRunner requires gcpTempLocation, " + "but failed to retrieve a value from PipelineOptions", e); } validator.validateOutputFilePrefixSupported(gcpTempLocation); String stagingLocation; try { stagingLocation = dataflowOptions.getStagingLocation(); } catch (Exception e) { throw new IllegalArgumentException( "DataflowRunner requires stagingLocation, " + "but failed to retrieve a value from PipelineOptions", e); } validator.validateOutputFilePrefixSupported(stagingLocation); if (!isNullOrEmpty(dataflowOptions.getSaveProfilesToGcs())) { validator.validateOutputFilePrefixSupported(dataflowOptions.getSaveProfilesToGcs()); } if (dataflowOptions.getFilesToStage() != null) { // The user specifically requested these files, so fail now if they do not exist. // (automatically detected classpath elements are permitted to not exist, so later // staging will not fail on nonexistent files) dataflowOptions.getFilesToStage().stream() .forEach( stagedFileSpec -> { File localFile; if (stagedFileSpec.contains("=")) { String[] components = stagedFileSpec.split("=", 2); localFile = new File(components[1]); } else { localFile = new File(stagedFileSpec); } if (!localFile.exists()) { // should be FileNotFoundException, but for build-time backwards compatibility // cannot add checked exception throw new RuntimeException( String.format("Non-existent files specified in filesToStage: %s", localFile)); } }); } else { dataflowOptions.setFilesToStage( detectClassPathResourcesToStage(DataflowRunner.class.getClassLoader(), options)); if (dataflowOptions.getFilesToStage().isEmpty()) { throw new IllegalArgumentException("No files to stage has been found."); } else { LOG.info( "PipelineOptions.filesToStage was not specified. " + "Defaulting to files from the classpath: will stage {} files. " + "Enable logging at DEBUG level to see which files will be staged.", dataflowOptions.getFilesToStage().size()); LOG.debug("Classpath elements: {}", dataflowOptions.getFilesToStage()); } } // Verify jobName according to service requirements, truncating converting to lowercase if // necessary. String jobName = dataflowOptions.getJobName().toLowerCase(); checkArgument( jobName.matches("[a-z]([-a-z0-9]*[a-z0-9])?"), "JobName invalid; the name must consist of only the characters " + "[-a-z0-9], starting with a letter and ending with a letter " + "or number"); if (!jobName.equals(dataflowOptions.getJobName())) { LOG.info( "PipelineOptions.jobName did not match the service requirements. " + "Using {} instead of {}.", jobName, dataflowOptions.getJobName()); } dataflowOptions.setJobName(jobName); // Verify project String project = dataflowOptions.getProject(); if (project.matches("[0-9]*")) { throw new IllegalArgumentException( "Project ID '" + project + "' invalid. Please make sure you specified the Project ID, not project number."); } else if (!project.matches(PROJECT_ID_REGEXP)) { throw new IllegalArgumentException( "Project ID '" + project + "' invalid. Please make sure you specified the Project ID, not project" + " description."); } DataflowPipelineDebugOptions debugOptions = dataflowOptions.as(DataflowPipelineDebugOptions.class); // Verify the number of worker threads is a valid value if (debugOptions.getNumberOfWorkerHarnessThreads() < 0) { throw new IllegalArgumentException( "Number of worker harness threads '" + debugOptions.getNumberOfWorkerHarnessThreads() + "' invalid. Please make sure the value is non-negative."); } // Verify that if recordJfrOnGcThrashing is set, the pipeline is at least on java 11 if (dataflowOptions.getRecordJfrOnGcThrashing() && Environments.getJavaVersion() == Environments.JavaVersion.java8) { throw new IllegalArgumentException( "recordJfrOnGcThrashing is only supported on java 9 and up."); } if (dataflowOptions.isStreaming() && dataflowOptions.getGcsUploadBufferSizeBytes() == null) { dataflowOptions.setGcsUploadBufferSizeBytes(GCS_UPLOAD_BUFFER_SIZE_BYTES_DEFAULT); } // Adding the Java version to the SDK name for user's and support convenience. String agentJavaVer = "(JRE 8 environment)"; if (Environments.getJavaVersion() != Environments.JavaVersion.java8) { agentJavaVer = String.format("(JRE %s environment)", Environments.getJavaVersion().specification()); } DataflowRunnerInfo dataflowRunnerInfo = DataflowRunnerInfo.getDataflowRunnerInfo(); String userAgentName = dataflowRunnerInfo.getName(); Preconditions.checkArgument( !userAgentName.equals(""), "Dataflow runner's `name` property cannot be empty."); String userAgentVersion = dataflowRunnerInfo.getVersion(); Preconditions.checkArgument( !userAgentVersion.equals(""), "Dataflow runner's `version` property cannot be empty."); String userAgent = String.format("%s/%s%s", userAgentName, userAgentVersion, agentJavaVer).replace(" ", "_"); dataflowOptions.setUserAgent(userAgent); return new DataflowRunner(dataflowOptions); }
@Test public void testInvalidStagingLocation() throws IOException { DataflowPipelineOptions options = buildPipelineOptions(); options.setStagingLocation("file://my/staging/location"); try { DataflowRunner.fromOptions(options); fail("fromOptions should have failed"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("Expected a valid 'gs://' path but was given")); } options.setStagingLocation("my/staging/location"); try { DataflowRunner.fromOptions(options); fail("fromOptions should have failed"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("Expected a valid 'gs://' path but was given")); } }
@Override public ProtobufSystemInfo.Section toProtobuf() { ProtobufSystemInfo.Section.Builder protobuf = ProtobufSystemInfo.Section.newBuilder(); protobuf.setName("System"); setAttribute(protobuf, "Version", server.getVersion()); setAttribute(protobuf, "Official Distribution", officialDistribution.check()); setAttribute(protobuf, "Home Dir", config.get(PATH_HOME.getKey()).orElse(null)); setAttribute(protobuf, "Data Dir", config.get(PATH_DATA.getKey()).orElse(null)); setAttribute(protobuf, "Temp Dir", config.get(PATH_TEMP.getKey()).orElse(null)); setAttribute(protobuf, "Processors", Runtime.getRuntime().availableProcessors()); return protobuf.build(); }
@Test public void return_server_version() { when(server.getVersion()).thenReturn("6.6"); ProtobufSystemInfo.Section section = underTest.toProtobuf(); assertThatAttributeIs(section, "Version", "6.6"); }
@Override public int getMinorVersion() { return -1; }
@Test public void testGetMinorVersion() { if (driver.getMinorVersion() >= 0) { fail("getMinorVersion"); } }
@Override public void register(Component component) { checkComponent(component); checkArgument(component.getType() == Component.Type.FILE, "component must be a file"); checkState(analysisMetadataHolder.isPullRequest() || !analysisMetadataHolder.isFirstAnalysis(), "No file can be registered on first branch analysis"); addedComponents.add(component); }
@Test public void register_fails_with_ISE_if_called_on_first_analysis() { when(analysisMetadataHolder.isFirstAnalysis()).thenReturn(true); Component component = newComponent(Component.Type.FILE); assertThatThrownBy(() -> underTest.register(component)) .isInstanceOf(IllegalStateException.class) .hasMessage("No file can be registered on first branch analysis"); }
public static int bitWidthOf(long value) { // handle edge cases if (value == Long.MIN_VALUE) { return Long.SIZE - 1; } else if (value == Integer.MIN_VALUE) { return Integer.SIZE - 1; } else if (value == Short.MIN_VALUE) { return Short.SIZE - 1; } else if (value == Byte.MIN_VALUE) { return Byte.SIZE - 1; } value = Math.abs(value); return Long.SIZE - Long.numberOfLeadingZeros(value); }
@Test public void testBitWidthOfLong() { assertEquals(0, HazelcastIntegerType.bitWidthOf(0)); assertEquals(1, HazelcastIntegerType.bitWidthOf(1)); assertEquals(1, HazelcastIntegerType.bitWidthOf(-1)); assertEquals(2, HazelcastIntegerType.bitWidthOf(2)); assertEquals(2, HazelcastIntegerType.bitWidthOf(-2)); assertEquals(10, HazelcastIntegerType.bitWidthOf(555)); assertEquals(10, HazelcastIntegerType.bitWidthOf(-555)); assertEquals(Long.SIZE - 1, HazelcastIntegerType.bitWidthOf(Long.MAX_VALUE)); assertEquals(Long.SIZE - 1, HazelcastIntegerType.bitWidthOf(Long.MIN_VALUE)); assertEquals(Integer.SIZE - 1, HazelcastIntegerType.bitWidthOf(Integer.MAX_VALUE)); assertEquals(Integer.SIZE - 1, HazelcastIntegerType.bitWidthOf(Integer.MIN_VALUE)); assertEquals(Short.SIZE - 1, HazelcastIntegerType.bitWidthOf(Short.MAX_VALUE)); assertEquals(Short.SIZE - 1, HazelcastIntegerType.bitWidthOf(Short.MIN_VALUE)); assertEquals(Byte.SIZE - 1, HazelcastIntegerType.bitWidthOf(Byte.MAX_VALUE)); assertEquals(Byte.SIZE - 1, HazelcastIntegerType.bitWidthOf(Byte.MIN_VALUE)); }
@Override public String getName() { return "Jenkins"; }
@Test public void getName() { assertThat(underTest.getName()).isEqualTo("Jenkins"); }
@Override public synchronized String getDatabaseName() { return databaseName; }
@Test public void testGetDatabaseNameShouldReturnCorrectValue() { assertThat(testManager.getDatabaseName()).matches(DATABASE_NAME); }
@Override public ContentHandler getNewContentHandler() { if (type == HANDLER_TYPE.BODY) { return new BodyContentHandler( new WriteOutContentHandler(new ToTextContentHandler(), writeLimit, throwOnWriteLimitReached, parseContext)); } else if (type == HANDLER_TYPE.IGNORE) { return new DefaultHandler(); } ContentHandler formatHandler = getFormatHandler(); if (writeLimit < 0) { return formatHandler; } return new WriteOutContentHandler(formatHandler, writeLimit, throwOnWriteLimitReached, parseContext); }
@Test public void testIgnore() throws Exception { Parser p = new MockParser(OVER_DEFAULT); ContentHandler handler = new BasicContentHandlerFactory(BasicContentHandlerFactory.HANDLER_TYPE.IGNORE, -1) .getNewContentHandler(); assertTrue(handler instanceof DefaultHandler); p.parse(null, handler, null, null); //unfortunatley, the DefaultHandler does not return "", assertContains("org.xml.sax.helpers.DefaultHandler", handler.toString()); //tests that no write limit exception is thrown p = new MockParser(100); handler = new BasicContentHandlerFactory(BasicContentHandlerFactory.HANDLER_TYPE.IGNORE, 5) .getNewContentHandler(); assertTrue(handler instanceof DefaultHandler); p.parse(null, handler, null, null); assertContains("org.xml.sax.helpers.DefaultHandler", handler.toString()); }
public static byte[] jsonNodeToBinary(JsonNode node, String about) { try { byte[] value = node.binaryValue(); if (value == null) { throw new IllegalArgumentException(about + ": expected Base64-encoded binary data."); } return value; } catch (IOException e) { throw new UncheckedIOException(about + ": unable to retrieve Base64-encoded binary data", e); } }
@Test public void testBinaryNode() throws IOException { byte[] expected = new byte[] {5, 2, 9, 4, 1, 8, 7, 0, 3, 6}; StringWriter writer = new StringWriter(); ObjectMapper mapper = new ObjectMapper(); mapper.writeTree(mapper.createGenerator(writer), new BinaryNode(expected)); JsonNode textNode = mapper.readTree(writer.toString()); assertTrue(textNode.isTextual(), String.format("Expected a JSON string but was: %s", textNode)); byte[] actual = MessageUtil.jsonNodeToBinary(textNode, "Test base64 JSON string"); assertArrayEquals(expected, actual); }
public String views(Namespace ns) { return SLASH.join("v1", prefix, "namespaces", RESTUtil.encodeNamespace(ns), "views"); }
@Test public void views() { Namespace ns = Namespace.of("ns"); assertThat(withPrefix.views(ns)).isEqualTo("v1/ws/catalog/namespaces/ns/views"); assertThat(withoutPrefix.views(ns)).isEqualTo("v1/namespaces/ns/views"); }
@Override protected Result[] run(String value) { final Grok grok = grokPatternRegistry.cachedGrokForPattern(this.pattern, this.namedCapturesOnly); // the extractor instance is rebuilt every second anyway final Match match = grok.match(value); final Map<String, Object> matches = match.captureFlattened(); final List<Result> results = new ArrayList<>(matches.size()); for (final Map.Entry<String, Object> entry : matches.entrySet()) { // never add null values to the results, those don't make sense for us if (entry.getValue() != null) { results.add(new Result(entry.getValue(), entry.getKey(), -1, -1)); } } return results.toArray(new Result[0]); }
@Test public void testDateWithComma() { final GrokExtractor extractor = makeExtractor("%{GREEDY:timestamp;date;yyyy-MM-dd'T'HH:mm:ss,SSSX}"); final Extractor.Result[] results = extractor.run("2015-07-31T10:05:36,773Z"); assertEquals("ISO date is parsed", 1, results.length); Object value = results[0].getValue(); assertTrue(value instanceof Instant); DateTime date = new DateTime(((Instant) value).toEpochMilli(), DateTimeZone.UTC); assertEquals(2015, date.getYear()); assertEquals(7, date.getMonthOfYear()); assertEquals(31, date.getDayOfMonth()); assertEquals(10, date.getHourOfDay()); assertEquals(5, date.getMinuteOfHour()); assertEquals(36, date.getSecondOfMinute()); assertEquals(773, date.getMillisOfSecond()); }
public static void updateDetailMessage( @Nullable Throwable root, @Nullable Function<Throwable, String> throwableToMessage) { if (throwableToMessage == null) { return; } Throwable it = root; while (it != null) { String newMessage = throwableToMessage.apply(it); if (newMessage != null) { updateDetailMessageOfThrowable(it, newMessage); } it = it.getCause(); } }
@Test void testUpdateDetailMessageOfBasicThrowable() { Throwable rootThrowable = new OutOfMemoryError("old message"); ExceptionUtils.updateDetailMessage(rootThrowable, t -> "new message"); assertThat(rootThrowable.getMessage()).isEqualTo("new message"); }
public static boolean startsWith(byte[] bs, byte[] head) { if (bs.length < head.length) { return false; } for (int i = 0; i < head.length; i++) { if (head[i] != bs[i]) { return false; } } return true; }
@Test public void startsWith() { Assert.assertTrue(CodecUtils.startsWith(new byte[] { 1, 2, 3 }, new byte[] { 1, 2 })); Assert.assertFalse(CodecUtils.startsWith(new byte[] { 2, 3 }, new byte[] { 1, 2 })); Assert.assertFalse(CodecUtils.startsWith(new byte[] { 3 }, new byte[] { 1, 2 })); }
public boolean isEffectivelyGloballyDistributed() { return totalNodes == effectiveFinalRedundancy(); }
@Test void effectively_globally_distributed_is_correct() { assertFalse(createRedundancy(4, 2, 10).isEffectivelyGloballyDistributed()); assertFalse(createRedundancy(5, 1, 10).isEffectivelyGloballyDistributed()); assertFalse(createRedundancy(5, 2, 12).isEffectivelyGloballyDistributed()); assertTrue(createRedundancy(5, 2, 10).isEffectivelyGloballyDistributed()); assertTrue(createRedundancy(5, 3, 10).isEffectivelyGloballyDistributed()); assertTrue(createRedundancy(1, 1, 1).isEffectivelyGloballyDistributed()); }
public static ConfusionMatrix of(int[] truth, int[] prediction) { if (truth.length != prediction.length) { throw new IllegalArgumentException(String.format("The vector sizes don't match: %d != %d.", truth.length, prediction.length)); } Set<Integer> y = new HashSet<>(); // Sometimes, small test data doesn't have all the classes. for (int i = 0; i < truth.length; i++) { y.add(truth[i]); y.add(prediction[i]); } int k = 0; for (int c : y) { if (k < c) k = c; } int[][] matrix = new int[k+1][k+1]; for (int i = 0; i < truth.length; i++) { matrix[truth[i]][prediction[i]] += 1; } return new ConfusionMatrix(matrix); }
@Test public void test() { int[] truth = {0,1,2,3,4,5,0,1,2,3,4,5}; int[] prediction = {0,1,2,4,5,2,1,2,4,5,4,1}; ConfusionMatrix confusion = ConfusionMatrix.of(truth, prediction); System.out.println(confusion); int[][] matrix = confusion.matrix; int[] expected = {1,1,1,0,1,0}; for(int i = 0; i < expected.length; i++){ //main diagonal test assertEquals(matrix[i][i], expected[i]); //class 3 not predicted test assertEquals(matrix[i][3], 0); } }
@Override public Byte getByteAndRemove(K name) { return null; }
@Test public void testGetByteAndRemoveDefault() { assertEquals((byte) 0, HEADERS.getByteAndRemove("name1", (byte) 0)); }
@Override public Type classify(final Throwable e) { Type type = Type.UNKNOWN; if (e instanceof KsqlSerializationException || (e instanceof StreamsException && (ExceptionUtils.indexOfThrowable(e, KsqlSerializationException.class) != -1))) { if (!hasInternalTopicPrefix(e)) { type = Type.USER; LOG.info( "Classified error as USER error based on schema mismatch. Query ID: {} Exception: {}", queryId, e); } } return type; }
@Test public void shouldClassifyWrappedKsqlSerializationExceptionWithChangelogTopicAsUnknownError() { // Given: final String topic = "_confluent-ksql-default_query_CTAS_USERS_0-Aggregate-Aggregate-Materialize-changelog"; final Exception e = new StreamsException( new KsqlSerializationException( topic, "Error serializing message to topic: " + topic, new DataException("Struct schemas do not match."))); // When: final Type type = new KsqlSerializationClassifier("").classify(e); // Then: assertThat(type, is(Type.UNKNOWN)); }
@Override public int read() throws IOException { return in.read(); }
@Test public void testReadArrOffset() throws Exception { try (WPInputStream wpInputStream = emptyWPStream()) { byte[] buffer = new byte[10]; wpInputStream.read(buffer, 0, 2); } catch (EOFException e) { fail("should not have thrown EOF"); } }
@Override public Optional<Customer> findPublicCustomerByTenantId(UUID tenantId) { return Optional.ofNullable(DaoUtil.getData(customerRepository.findPublicCustomerByTenantId(tenantId))); }
@Test public void testFindPublicCustomerByTenantId() { UUID tenantId = Uuids.timeBased(); Optional<Customer> customerOpt = customerDao.findPublicCustomerByTenantId(tenantId); assertTrue(customerOpt.isEmpty()); String publicCustomerTitle = StringUtils.randomAlphanumeric(10); createPublicCustomer(tenantId, publicCustomerTitle); customerOpt = customerDao.findPublicCustomerByTenantId(tenantId); assertTrue(customerOpt.isPresent()); Customer customer = customerOpt.get(); assertTrue(customer.isPublic()); assertEquals(publicCustomerTitle, customer.getTitle()); }
public Schema mergeTables( Map<FeatureOption, MergingStrategy> mergingStrategies, Schema sourceSchema, List<SqlNode> derivedColumns, List<SqlWatermark> derivedWatermarkSpecs, SqlTableConstraint derivedPrimaryKey) { SchemaBuilder schemaBuilder = new SchemaBuilder( mergingStrategies, sourceSchema, (FlinkTypeFactory) validator.getTypeFactory(), dataTypeFactory, validator, escapeExpression); schemaBuilder.appendDerivedColumns(mergingStrategies, derivedColumns); schemaBuilder.appendDerivedWatermarks(mergingStrategies, derivedWatermarkSpecs); schemaBuilder.appendDerivedPrimaryKey(derivedPrimaryKey); return schemaBuilder.build(); }
@Test void mergeConstraintsFromBaseTable() { Schema sourceSchema = Schema.newBuilder() .column("one", DataTypes.INT().notNull()) .column("two", DataTypes.STRING().notNull()) .column("three", DataTypes.FLOAT()) .primaryKeyNamed("constraint-42", new String[] {"one", "two"}) .build(); Schema mergedSchema = util.mergeTables( getDefaultMergingStrategies(), sourceSchema, Collections.emptyList(), Collections.emptyList(), null); Schema expectedSchema = Schema.newBuilder() .column("one", DataTypes.INT().notNull()) .column("two", DataTypes.STRING().notNull()) .column("three", DataTypes.FLOAT()) .primaryKeyNamed("constraint-42", new String[] {"one", "two"}) .build(); assertThat(mergedSchema).isEqualTo(expectedSchema); }
void finishReport() { List<StepDefContainer> stepDefContainers = new ArrayList<>(); for (Map.Entry<String, List<StepContainer>> usageEntry : usageMap.entrySet()) { StepDefContainer stepDefContainer = new StepDefContainer( usageEntry.getKey(), createStepContainers(usageEntry.getValue())); stepDefContainers.add(stepDefContainer); } try { Jackson.OBJECT_MAPPER.writeValue(out, stepDefContainers); out.close(); } catch (IOException e) { throw new RuntimeException(e); } }
@Test @Disabled("TODO") void doneWithoutUsageStatisticStrategies() throws JSONException { OutputStream out = new ByteArrayOutputStream(); UsageFormatter usageFormatter = new UsageFormatter(out); UsageFormatter.StepContainer stepContainer = new UsageFormatter.StepContainer("a step"); UsageFormatter.StepDuration stepDuration = new UsageFormatter.StepDuration(Duration.ofNanos(1234567800L), "location.feature"); stepContainer.getDurations().addAll(singletonList(stepDuration)); usageFormatter.usageMap.put("a (.*)", singletonList(stepContainer)); usageFormatter.finishReport(); String json = "" + "[\n" + " {\n" + " \"source\": \"a (.*)\",\n" + " \"steps\": [\n" + " {\n" + " \"name\": \"a step\",\n" + " \"aggregatedDurations\": {\n" + " \"median\": 1.2345678,\n" + " \"average\": 1.2345678\n" + " },\n" + " \"durations\": [\n" + " {\n" + " \"duration\": 1.2345678,\n" + " \"location\": \"location.feature\"\n" + " }\n" + " ]\n" + " }\n" + " ]\n" + " }\n" + "]"; assertEquals(json, out.toString(), true); }
@Override public Column convert(BasicTypeDefine typeDefine) { try { return super.convert(typeDefine); } catch (SeaTunnelRuntimeException e) { PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .sourceType(typeDefine.getColumnType()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String kingbaseDataType = typeDefine.getDataType().toUpperCase(); switch (kingbaseDataType) { case KB_TINYINT: builder.dataType(BasicType.BYTE_TYPE); break; case KB_MONEY: builder.dataType(new DecimalType(38, 18)); builder.columnLength(38L); builder.scale(18); break; case KB_BLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength((long) (1024 * 1024 * 1024)); break; case KB_CLOB: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(typeDefine.getLength()); builder.columnLength((long) (1024 * 1024 * 1024)); break; case KB_BIT: builder.dataType(PrimitiveByteArrayType.INSTANCE); // BIT(M) -> BYTE(M/8) long byteLength = typeDefine.getLength() / 8; byteLength += typeDefine.getLength() % 8 > 0 ? 1 : 0; builder.columnLength(byteLength); break; default: throw CommonError.convertToSeaTunnelTypeError( DatabaseIdentifier.KINGBASE, typeDefine.getDataType(), typeDefine.getName()); } return builder.build(); } }
@Test public void testConvertVarchar() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder() .name("test") .columnType("varchar") .dataType("varchar") .build(); Column column = KingbaseTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.STRING_TYPE, column.getDataType()); Assertions.assertEquals(null, column.getColumnLength()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("varchar(10)") .dataType("varchar") .length(10L) .build(); column = KingbaseTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.STRING_TYPE, column.getDataType()); Assertions.assertEquals(40, column.getColumnLength()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); }
@Override public String toString() { return getClass().getSimpleName() + " barCount: " + barCount; }
@Test public void naNValuesInIntervall() { BaseBarSeries series = new BaseBarSeries("NaN test"); for (long i = 0; i <= 10; i++) { // (0, NaN, 2, NaN, 3, NaN, 4, NaN, 5, ...) Num closePrice = i % 2 == 0 ? series.numOf(i) : NaN; series.addBar(ZonedDateTime.now().plusDays(i), NaN, NaN, NaN, closePrice, NaN); } HighestValueIndicator highestValue = new HighestValueIndicator(new ClosePriceIndicator(series), 2); // index is the biggest of (index, index-1) for (int i = series.getBeginIndex(); i <= series.getEndIndex(); i++) { if (i % 2 != 0) // current is NaN take the previous as highest assertEquals(series.getBar(i - 1).getClosePrice().toString(), highestValue.getValue(i).toString()); else // current is not NaN but previous, take the current assertEquals(series.getBar(i).getClosePrice().toString(), highestValue.getValue(i).toString()); } }
protected SelectBufferResult getMessageFromCache(FlatMessageFile flatFile, long offset) { MessageQueue mq = flatFile.getMessageQueue(); SelectBufferResult buffer = this.fetcherCache.getIfPresent( String.format(CACHE_KEY_FORMAT, mq.getTopic(), mq.getQueueId(), offset)); // return duplicate buffer here if (buffer == null) { return null; } long count = buffer.getAccessCount().incrementAndGet(); if (count % 1000L == 0L) { log.warn("MessageFetcher fetch same offset message too many times, " + "topic={}, queueId={}, offset={}, count={}", mq.getTopic(), mq.getQueueId(), offset, count); } return new SelectBufferResult( buffer.getByteBuffer().asReadOnlyBuffer(), buffer.getStartOffset(), buffer.getSize(), buffer.getTagCode()); }
@Test public void getMessageFromCacheTest() throws Exception { this.getMessageFromTieredStoreTest(); mq = dispatcherTest.mq; messageStore = dispatcherTest.messageStore; storeConfig = dispatcherTest.storeConfig; storeConfig.setReadAheadCacheEnable(true); storeConfig.setReadAheadMessageCountThreshold(32); storeConfig.setReadAheadMessageSizeThreshold(Integer.MAX_VALUE); int batchSize = 4; AtomicLong times = new AtomicLong(0L); AtomicLong offset = new AtomicLong(100L); FlatMessageFile flatFile = dispatcherTest.fileStore.getFlatFile(mq); Awaitility.await().atMost(Duration.ofSeconds(10)).until(() -> { GetMessageResult getMessageResult = fetcher.getMessageFromCacheAsync(flatFile, groupName, offset.get(), batchSize, null).join(); offset.set(getMessageResult.getNextBeginOffset()); times.incrementAndGet(); return offset.get() == 200L; }); Assert.assertEquals(100 / times.get(), batchSize); }
@Override public void open() throws Exception { this.timerService = getInternalTimerService("processing timer", VoidNamespaceSerializer.INSTANCE, this); this.keySet = new HashSet<>(); super.open(); }
@Test void testProcessRecord() throws Exception { OutputTag<Long> sideOutputTag = new OutputTag<Long>("side-output") {}; KeyedTwoOutputProcessOperator<Integer, Integer, Integer, Long> processOperator = new KeyedTwoOutputProcessOperator<>( new TwoOutputStreamProcessFunction<Integer, Integer, Long>() { @Override public void processRecord( Integer record, Collector<Integer> output1, Collector<Long> output2, PartitionedContext ctx) { output1.collect(record); output2.collect((long) (record * 2)); } }, sideOutputTag); try (KeyedOneInputStreamOperatorTestHarness<Integer, Integer, Integer> testHarness = new KeyedOneInputStreamOperatorTestHarness<>( processOperator, (KeySelector<Integer, Integer>) value -> value, Types.INT)) { testHarness.open(); testHarness.processElement(new StreamRecord<>(1)); testHarness.processElement(new StreamRecord<>(2)); testHarness.processElement(new StreamRecord<>(3)); Collection<StreamRecord<Integer>> firstOutput = testHarness.getRecordOutput(); ConcurrentLinkedQueue<StreamRecord<Long>> secondOutput = testHarness.getSideOutput(sideOutputTag); assertThat(firstOutput) .containsExactly( new StreamRecord<>(1), new StreamRecord<>(2), new StreamRecord<>(3)); assertThat(secondOutput) .containsExactly( new StreamRecord<>(2L), new StreamRecord<>(4L), new StreamRecord<>(6L)); } }
public int size() { return values.size(); }
@Test public void shouldCreateEmptyVarArgs() { final VariadicArgs<Integer> varArgs = new VariadicArgs<>(ImmutableList.of()); assertEquals(0, varArgs.size()); }
public static String cleanPath(String path) { if (!hasLength(path)) { return path; } String normalizedPath = replace(path, WINDOWS_FOLDER_SEPARATOR, FOLDER_SEPARATOR); String pathToUse = normalizedPath; // Shortcut if there is no work to do if (pathToUse.indexOf(DOT) == -1) { return pathToUse; } // Strip prefix from path to analyze, to not treat it as part of the // first path element. This is necessary to correctly parse paths like // "file:core/../core/io/Resource.class", where the ".." should just // strip the first "core" directory while keeping the "file:" prefix. int prefixIndex = pathToUse.indexOf(':'); String prefix = ""; if (prefixIndex != -1) { prefix = pathToUse.substring(0, prefixIndex + 1); if (prefix.contains(FOLDER_SEPARATOR)) { prefix = ""; } else { pathToUse = pathToUse.substring(prefixIndex + 1); } } if (pathToUse.startsWith(FOLDER_SEPARATOR)) { prefix = prefix + FOLDER_SEPARATOR; pathToUse = pathToUse.substring(1); } String[] pathArray = delimitedListToStringArray(pathToUse, FOLDER_SEPARATOR); // we never require more elements than pathArray and in the common case the same number Deque<String> pathElements = new ArrayDeque<>(pathArray.length); int tops = 0; for (int i = pathArray.length - 1; i >= 0; i--) { String element = pathArray[i]; if (DOT.equals(element)) { // Points to current directory - drop it. } else if (TOP_PATH.equals(element)) { // Registering top path found. tops++; } else { if (tops > 0) { // Merging path element with element corresponding to top path. tops--; } else { // Normal path element found. pathElements.addFirst(element); } } } // All path elements stayed the same - shortcut if (pathArray.length == pathElements.size()) { return normalizedPath; } // Remaining top paths need to be retained. for (int i = 0; i < tops; i++) { pathElements.addFirst(TOP_PATH); } // If nothing else left, at least explicitly point to current path. if (pathElements.size() == 1 && pathElements.getLast().isEmpty() && !prefix.endsWith(FOLDER_SEPARATOR)) { pathElements.addFirst(DOT); } final String joined = collectionToDelimitedString(pathElements, FOLDER_SEPARATOR); // avoid string concatenation with empty prefix return prefix.isEmpty() ? joined : prefix + joined; }
@Test void testCleanPath() { // Test case 1: path with no length String path1 = ""; String expected1 = ""; assertEquals(expected1, StringUtils.cleanPath(path1)); // Test case 2: normal path String path2 = "path/to/file"; String expected2 = "path/to/file"; assertEquals(expected2, StringUtils.cleanPath(path2)); // Test case 3: path with Windows folder separator String path3 = "path\\to\\文件"; String expected3 = "path/to/文件"; assertEquals(expected3, StringUtils.cleanPath(path3)); // Test case 4: path with dot String path4 = "path/.."; String expected4 = ""; assertEquals(expected4, StringUtils.cleanPath(path4)); // Test case 5: path with top path String path5 = "path/../top"; String expected5 = "top"; assertEquals(expected5, StringUtils.cleanPath(path5)); // Test case 6: path with multiple top path String path6 = "path/../../top"; String expected6 = "../top"; assertEquals(expected6, StringUtils.cleanPath(path6)); // Test case 7: path with leading colon String path7 = "file:../top"; String expected7 = "file:../top"; assertEquals(expected7, StringUtils.cleanPath(path7)); // Test case 8: path with leading slash String path8 = "file:/path/../file"; String expected8 = "file:/file"; assertEquals(expected8, StringUtils.cleanPath(path8)); // Test case 9: path with empty prefix String path9 = "file:path/../file"; String expected9 = "file:file"; assertEquals(expected9, StringUtils.cleanPath(path9)); // Test case 10: prefix contain separator String path10 = "file/:path/../file"; String expected10 = "file/file"; assertEquals(expected10, StringUtils.cleanPath(path10)); // Test case 11: dot in file name String path11 = "file:/path/to/file.txt"; String expected11 = "file:/path/to/file.txt"; assertEquals(expected11, StringUtils.cleanPath(path11)); // Test case 12: dot in path String path12 = "file:/path/./file.txt"; String expected12 = "file:/path/file.txt"; assertEquals(expected12, StringUtils.cleanPath(path12)); // Test case 13: path with dot and slash String path13 = "file:aaa/../"; String expected13 = "file:./"; assertEquals(expected13, StringUtils.cleanPath(path13)); }
@Override public void schedulePendingRequestBulkTimeoutCheck( final PhysicalSlotRequestBulk bulk, Time timeout) { PhysicalSlotRequestBulkWithTimestamp bulkWithTimestamp = new PhysicalSlotRequestBulkWithTimestamp(bulk); bulkWithTimestamp.markUnfulfillable(clock.relativeTimeMillis()); schedulePendingRequestBulkWithTimestampCheck(bulkWithTimestamp, timeout); }
@Test void testUnfulfillableBulkIsCancelled() { final CompletableFuture<SlotRequestId> cancellationFuture = new CompletableFuture<>(); final SlotRequestId slotRequestId = new SlotRequestId(); final PhysicalSlotRequestBulk bulk = createPhysicalSlotRequestBulkWithCancellationFuture( cancellationFuture, slotRequestId); bulkChecker.schedulePendingRequestBulkTimeoutCheck(bulk, TIMEOUT); clock.advanceTime(TIMEOUT.toMilliseconds() + 1L, TimeUnit.MILLISECONDS); assertThat(cancellationFuture.join()).isEqualTo(slotRequestId); }
public DiscoveryProcessor chooseProcessor(final String mode) { if (DiscoveryMode.LOCAL.name().equalsIgnoreCase(mode)) { return localDiscoveryProcessor; } else if (DiscoveryMode.ZOOKEEPER.name().equalsIgnoreCase(mode)) { return defaultDiscoveryProcessor; } else if (DiscoveryMode.ETCD.name().equalsIgnoreCase(mode)) { return defaultDiscoveryProcessor; } else if (DiscoveryMode.NACOS.name().equalsIgnoreCase(mode)) { return defaultDiscoveryProcessor; } else if (DiscoveryMode.EUREKA.name().equalsIgnoreCase(mode)) { return apDiscoveryProcessor; } else { throw new NotImplementedException("shenyu discovery mode current didn't support " + mode); } }
@Test void chooseProcessor() { // 测试选择本地模式的情况 assertEquals(localProcessor, processorHolder.chooseProcessor(DiscoveryMode.LOCAL.name())); // 测试选择其他模式的情况 assertEquals(defaultProcessor, processorHolder.chooseProcessor(DiscoveryMode.ZOOKEEPER.name())); assertEquals(defaultProcessor, processorHolder.chooseProcessor(DiscoveryMode.ETCD.name())); assertEquals(defaultProcessor, processorHolder.chooseProcessor(DiscoveryMode.NACOS.name())); assertEquals(defaultProcessor, processorHolder.chooseProcessor(DiscoveryMode.EUREKA.name())); // 测试不支持的模式 assertThrows(NotImplementedException.class, () -> processorHolder.chooseProcessor("UNKNOWN_MODE")); }
private Mono<Collection<TopicPartition>> filterPartitionsWithLeaderCheck(Collection<TopicPartition> partitions, boolean failOnUnknownLeader) { var targetTopics = partitions.stream().map(TopicPartition::topic).collect(Collectors.toSet()); return describeTopicsImpl(targetTopics) .map(descriptions -> filterPartitionsWithLeaderCheck( descriptions.values(), partitions::contains, failOnUnknownLeader)); }
@Test void filterPartitionsWithLeaderCheckSkipsPartitionsFromTopicWhereSomePartitionsHaveNoLeader() { var filteredPartitions = ReactiveAdminClient.filterPartitionsWithLeaderCheck( List.of( // contains partitions with no leader new TopicDescription("noLeaderTopic", false, List.of( new TopicPartitionInfo(0, new Node(1, "n1", 9092), List.of(), List.of()), new TopicPartitionInfo(1, null, List.of(), List.of()))), // should be skipped by predicate new TopicDescription("skippingByPredicate", false, List.of( new TopicPartitionInfo(0, new Node(1, "n1", 9092), List.of(), List.of()))), // good topic new TopicDescription("good", false, List.of( new TopicPartitionInfo(0, new Node(1, "n1", 9092), List.of(), List.of()), new TopicPartitionInfo(1, new Node(2, "n2", 9092), List.of(), List.of())) )), p -> !p.topic().equals("skippingByPredicate"), false ); assertThat(filteredPartitions) .containsExactlyInAnyOrder( new TopicPartition("good", 0), new TopicPartition("good", 1) ); }
public List<QueueTimeSpan> queryConsumeTimeSpan(final String addr, final String topic, final String group, final long timeoutMillis) throws RemotingConnectException, RemotingSendRequestException, RemotingTimeoutException, InterruptedException, MQBrokerException { QueryConsumeTimeSpanRequestHeader requestHeader = new QueryConsumeTimeSpanRequestHeader(); requestHeader.setTopic(topic); requestHeader.setGroup(group); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.QUERY_CONSUME_TIME_SPAN, requestHeader); RemotingCommand response = this.remotingClient.invokeSync(MixAll.brokerVIPChannel(this.clientConfig.isVipChannelEnabled(), addr), request, timeoutMillis); switch (response.getCode()) { case ResponseCode.SUCCESS: { QueryConsumeTimeSpanBody consumeTimeSpanBody = GroupList.decode(response.getBody(), QueryConsumeTimeSpanBody.class); return consumeTimeSpanBody.getConsumeTimeSpanSet(); } default: break; } throw new MQBrokerException(response.getCode(), response.getRemark(), addr); }
@Test public void assertQueryConsumeTimeSpan() throws RemotingException, InterruptedException, MQBrokerException { mockInvokeSync(); QueryConsumeTimeSpanBody responseBody = new QueryConsumeTimeSpanBody(); responseBody.getConsumeTimeSpanSet().add(new QueueTimeSpan()); setResponseBody(responseBody); List<QueueTimeSpan> actual = mqClientAPI.queryConsumeTimeSpan(defaultBrokerAddr, defaultTopic, group, defaultTimeout); assertNotNull(actual); assertEquals(1, actual.size()); }
public BoundingBox increaseByScale(final float pBoundingboxPaddingRelativeScale) { if (pBoundingboxPaddingRelativeScale <= 0) throw new IllegalArgumentException("pBoundingboxPaddingRelativeScale must be positive"); final TileSystem tileSystem = org.osmdroid.views.MapView.getTileSystem(); // out-of-bounds latitude will be clipped final double latCenter = getCenterLatitude(); final double latSpanHalf = getLatitudeSpan() / 2 * pBoundingboxPaddingRelativeScale; final double latNorth = tileSystem.cleanLatitude(latCenter + latSpanHalf); final double latSouth = tileSystem.cleanLatitude(latCenter - latSpanHalf); // out-of-bounds longitude will be wrapped around final double lonCenter = getCenterLongitude(); final double lonSpanHalf = getLongitudeSpanWithDateLine() / 2 * pBoundingboxPaddingRelativeScale; final double latEast = tileSystem.cleanLongitude(lonCenter + lonSpanHalf); final double latWest = tileSystem.cleanLongitude(lonCenter - lonSpanHalf); return new BoundingBox(latNorth, latEast, latSouth, latWest); }
@Test public void increaseByScale() { BoundingBox bb = new BoundingBox(10, 20, 0, 0).increaseByScale(1.2f); assertEquals(11, bb.getLatNorth(), TOLERANCE); assertEquals(22, bb.getLonEast(), TOLERANCE); assertEquals(-1, bb.getLatSouth(), TOLERANCE); assertEquals(-2, bb.getLonWest(), TOLERANCE); }
@Deprecated public OffsetStore getOffsetStore() { return offsetStore; }
@Test public void testStart_OffsetShouldNotNUllAfterStart() { Assert.assertNotNull(pullConsumer.getOffsetStore()); }
public JobStatsExtended enrich(JobStats jobStats) { JobStats latestJobStats = getLatestJobStats(jobStats, previousJobStats); if (lock.tryLock()) { setFirstRelevantJobStats(latestJobStats); setJobStatsExtended(latestJobStats); setPreviousJobStats(latestJobStats); lock.unlock(); } return jobStatsExtended; }
@Test void estimatedTimeProcessingIsCalculated4() { JobStats firstJobStats = getJobStats(now().minusSeconds(10), 100L, 0L, 0L, 100L); JobStats secondJobStats = getJobStats(now(), 99L, 0L, 0L, 101L); JobStatsExtended jobStatsExtended = enrich(firstJobStats, secondJobStats); assertThat(jobStatsExtended.getEstimation().isProcessingDone()).isFalse(); assertThat(jobStatsExtended.getEstimation().isEstimatedProcessingFinishedInstantAvailable()).isTrue(); assertThat(Duration.between(now(), jobStatsExtended.getEstimation().getEstimatedProcessingFinishedAt()).toSeconds()).isCloseTo(990L, Offset.offset(10L)); }
@Override public List<NodeEvent> refresh() { Properties originalProperties = PropertiesUtils.loadProperties(file); List<String> nameList = PropertiesUtils.loadNameList(originalProperties, getPrefix()); Properties properties = new Properties(); for (String n : nameList) { String url = originalProperties.getProperty(n + ".url"); String username = originalProperties.getProperty(n + ".username"); String password = originalProperties.getProperty(n + ".password"); if (url == null || url.isEmpty()) { LOG.warn(n + ".url is EMPTY! IGNORE!"); continue; } else { properties.setProperty(n + ".url", url); } if (username == null || username.isEmpty()) { if (LOG.isDebugEnabled()) { LOG.debug(n + ".username is EMPTY. Maybe you should check the config."); } } else { properties.setProperty(n + ".username", username); } if (password == null || password.isEmpty()) { if (LOG.isDebugEnabled()) { LOG.debug(n + ".password is EMPTY. Maybe you should check the config."); } } else { properties.setProperty(n + ".password", password); } } List<NodeEvent> events = NodeEvent.getEventsByDiffProperties(getProperties(), properties); if (events != null && !events.isEmpty()) { LOG.info(events.size() + " different(s) detected."); setProperties(properties); } return events; }
@Test public void testRefresh() { String file = "/com/alibaba/druid/pool/ha/ha-with-prefix-datasource.properties"; FileNodeListener listener = new FileNodeListener(); listener.setFile(file); listener.setPrefix("prefix1"); List<NodeEvent> list = listener.refresh(); Properties properties = listener.getProperties(); assertEquals(3, properties.size()); assertEquals("jdbc:derby:memory:foo1;create=true", properties.getProperty("prefix1.foo.url")); assertEquals(1, list.size()); NodeEvent event = list.get(0); assertEquals(NodeEventTypeEnum.ADD, event.getType()); assertEquals("prefix1.foo", event.getNodeName()); assertEquals("jdbc:derby:memory:foo1;create=true", event.getUrl()); }
public static DubboUpstream buildDefaultDubboUpstream(final String host, final Integer port) { return DubboUpstream.builder().upstreamHost(LOCALHOST) .protocol("dubbo://").upstreamUrl(buildUrl(host, port)) .weight(DEFAULT_WEIGHT).warmup(Constants.WARMUP_TIME) .timestamp(System.currentTimeMillis()) .status(Objects.nonNull(port) && StringUtils.isNotBlank(host)) .build(); }
@Test public void buildDefaultDubboUpstream() { DubboUpstream dubboUpstream = CommonUpstreamUtils.buildDefaultDubboUpstream(HOST, PORT); Assert.assertNotNull(dubboUpstream); Assert.assertEquals(HOST + ":" + PORT, dubboUpstream.getUpstreamUrl()); Assert.assertEquals("dubbo://", dubboUpstream.getProtocol()); }
@GET @Path("/apps/{appid}/appattempts/{appattemptid}/containers") @Produces(MediaType.APPLICATION_JSON) public Set<TimelineEntity> getContainers(@Context HttpServletRequest req, @Context HttpServletResponse res, @PathParam("appid") String appId, @PathParam("appattemptid") String appattemptId, @QueryParam("userid") String userId, @QueryParam("flowname") String flowName, @QueryParam("flowrunid") String flowRunId, @QueryParam("limit") String limit, @QueryParam("createdtimestart") String createdTimeStart, @QueryParam("createdtimeend") String createdTimeEnd, @QueryParam("relatesto") String relatesTo, @QueryParam("isrelatedto") String isRelatedTo, @QueryParam("infofilters") String infofilters, @QueryParam("conffilters") String conffilters, @QueryParam("metricfilters") String metricfilters, @QueryParam("eventfilters") String eventfilters, @QueryParam("confstoretrieve") String confsToRetrieve, @QueryParam("metricstoretrieve") String metricsToRetrieve, @QueryParam("fields") String fields, @QueryParam("metricslimit") String metricsLimit, @QueryParam("metricstimestart") String metricsTimeStart, @QueryParam("metricstimeend") String metricsTimeEnd, @QueryParam("fromid") String fromId) { return getContainers(req, res, null, appId, appattemptId, userId, flowName, flowRunId, limit, createdTimeStart, createdTimeEnd, relatesTo, isRelatedTo, infofilters, conffilters, metricfilters, eventfilters, confsToRetrieve, metricsToRetrieve, fields, metricsLimit, metricsTimeStart, metricsTimeEnd, fromId); }
@Test void testGetContainers() throws Exception { Client client = createClient(); try { // total 3 containers in a application. URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/app1/entities/YARN_CONTAINER"); ClientResponse resp = getResponse(client, uri); Set<TimelineEntity> entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() { }); assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, resp.getType().toString()); assertNotNull(entities); int totalEntities = entities.size(); assertEquals(3, totalEntities); assertTrue( entities.contains(newEntity( TimelineEntityType.YARN_CONTAINER.toString(), "container_1_1")), "Entity with container_1_1 should have been present in response."); assertTrue( entities.contains(newEntity( TimelineEntityType.YARN_CONTAINER.toString(), "container_2_1")), "Entity with container_2_1 should have been present in response."); assertTrue( entities.contains(newEntity( TimelineEntityType.YARN_CONTAINER.toString(), "container_2_2")), "Entity with container_2_2 should have been present in response."); // for app-attempt1 1 container has run uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/app1/" + "appattempts/app-attempt-1/containers"); resp = getResponse(client, uri); entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() { }); assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType()); assertNotNull(entities); int retrievedEntity = entities.size(); assertEquals(1, retrievedEntity); assertTrue( entities.contains(newEntity( TimelineEntityType.YARN_CONTAINER.toString(), "container_1_1")), "Entity with container_1_1 should have been present in response."); // for app-attempt2 2 containers has run uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" + "timeline/clusters/cluster1/apps/app1/" + "appattempts/app-attempt-2/containers"); resp = getResponse(client, uri); entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() { }); assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType()); assertNotNull(entities); retrievedEntity += entities.size(); assertEquals(2, entities.size()); assertTrue( entities.contains(newEntity( TimelineEntityType.YARN_CONTAINER.toString(), "container_2_1")), "Entity with container_2_1 should have been present in response."); assertTrue( entities.contains(newEntity( TimelineEntityType.YARN_CONTAINER.toString(), "container_2_2")), "Entity with container_2_2 should have been present in response."); assertEquals(totalEntities, retrievedEntity); } finally { client.destroy(); } }
public static String chineseName(String fullName) { return firstMask(fullName); }
@Test public void chineseNameTest() { assertEquals("段**", DesensitizedUtil.chineseName("段正淳")); }
public String abbreviate(String fqClassName) { StringBuilder buf = new StringBuilder(targetLength); if (fqClassName == null) { throw new IllegalArgumentException("Class name may not be null"); } int inLen = fqClassName.length(); if (inLen < targetLength) { return fqClassName; } int[] dotIndexesArray = new int[ClassicConstants.MAX_DOTS]; // a.b.c contains 2 dots but 2+1 parts. // see also http://jira.qos.ch/browse/LBCLASSIC-110 int[] lengthArray = new int[ClassicConstants.MAX_DOTS + 1]; int dotCount = computeDotIndexes(fqClassName, dotIndexesArray); // System.out.println(); // System.out.println("Dot count for [" + className + "] is " + dotCount); // if there are not dots than abbreviation is not possible if (dotCount == 0) { return fqClassName; } // printArray("dotArray: ", dotArray); computeLengthArray(fqClassName, dotIndexesArray, lengthArray, dotCount); // printArray("lengthArray: ", lengthArray); for (int i = 0; i <= dotCount; i++) { if (i == 0) { buf.append(fqClassName.substring(0, lengthArray[i] - 1)); } else { buf.append(fqClassName.substring(dotIndexesArray[i - 1], dotIndexesArray[i - 1] + lengthArray[i])); } // System.out.println("i=" + i + ", buf=" + buf); } return buf.toString(); }
@Test public void testShortName() { { TargetLengthBasedClassNameAbbreviator abbreviator = new TargetLengthBasedClassNameAbbreviator(100); String name = "hello"; assertEquals(name, abbreviator.abbreviate(name)); } { TargetLengthBasedClassNameAbbreviator abbreviator = new TargetLengthBasedClassNameAbbreviator(100); String name = "hello.world"; assertEquals(name, abbreviator.abbreviate(name)); } }
@Override public void recordHits(int count) { hitCount.inc(count); }
@Test public void hit() { stats.recordHits(2); assertThat(registry.counter(PREFIX + ".hits").getCount()).isEqualTo(2); }
@Override public Map<String, String> generationCodes(Long tableId) { // 校验是否已经存在 CodegenTableDO table = codegenTableMapper.selectById(tableId); if (table == null) { throw exception(CODEGEN_TABLE_NOT_EXISTS); } List<CodegenColumnDO> columns = codegenColumnMapper.selectListByTableId(tableId); if (CollUtil.isEmpty(columns)) { throw exception(CODEGEN_COLUMN_NOT_EXISTS); } // 如果是主子表,则加载对应的子表信息 List<CodegenTableDO> subTables = null; List<List<CodegenColumnDO>> subColumnsList = null; if (CodegenTemplateTypeEnum.isMaster(table.getTemplateType())) { // 校验子表存在 subTables = codegenTableMapper.selectListByTemplateTypeAndMasterTableId( CodegenTemplateTypeEnum.SUB.getType(), tableId); if (CollUtil.isEmpty(subTables)) { throw exception(CODEGEN_MASTER_GENERATION_FAIL_NO_SUB_TABLE); } // 校验子表的关联字段存在 subColumnsList = new ArrayList<>(); for (CodegenTableDO subTable : subTables) { List<CodegenColumnDO> subColumns = codegenColumnMapper.selectListByTableId(subTable.getId()); if (CollUtil.findOne(subColumns, column -> column.getId().equals(subTable.getSubJoinColumnId())) == null) { throw exception(CODEGEN_SUB_COLUMN_NOT_EXISTS, subTable.getId()); } subColumnsList.add(subColumns); } } // 执行生成 return codegenEngine.execute(table, columns, subTables, subColumnsList); }
@Test public void testGenerationCodes_sub_columnNotExists() { // mock 数据(CodegenTableDO) CodegenTableDO table = randomPojo(CodegenTableDO.class, o -> o.setScene(CodegenSceneEnum.ADMIN.getScene()) .setTemplateType(CodegenTemplateTypeEnum.MASTER_NORMAL.getType())); codegenTableMapper.insert(table); // mock 数据(CodegenColumnDO) CodegenColumnDO column01 = randomPojo(CodegenColumnDO.class, o -> o.setTableId(table.getId())); codegenColumnMapper.insert(column01); // mock 数据(sub CodegenTableDO) CodegenTableDO subTable = randomPojo(CodegenTableDO.class, o -> o.setScene(CodegenSceneEnum.ADMIN.getScene()) .setTemplateType(CodegenTemplateTypeEnum.SUB.getType()) .setMasterTableId(table.getId())); codegenTableMapper.insert(subTable); // 准备参数 Long tableId = table.getId(); // 调用,并断言 assertServiceException(() -> codegenService.generationCodes(tableId), CODEGEN_SUB_COLUMN_NOT_EXISTS, subTable.getId()); }
@Override public long[] getValues() { return Arrays.copyOf(values, values.length); }
@Test public void canAlsoBeCreatedFromACollectionOfLongs() throws Exception { final Snapshot other = new UniformSnapshot(asList(5L, 1L, 2L, 3L, 4L)); assertThat(other.getValues()) .containsOnly(1, 2, 3, 4, 5); }
public void processOnce() throws IOException { // set status of query to OK. ctx.getState().reset(); executor = null; // reset sequence id of MySQL protocol final MysqlChannel channel = ctx.getMysqlChannel(); channel.setSequenceId(0); // read packet from channel try { packetBuf = channel.fetchOnePacket(); if (packetBuf == null) { throw new RpcException(ctx.getRemoteIP(), "Error happened when receiving packet."); } } catch (AsynchronousCloseException e) { // when this happened, timeout checker close this channel // killed flag in ctx has been already set, just return return; } // dispatch dispatch(); // finalize finalizeCommand(); ctx.setCommand(MysqlCommand.COM_SLEEP); }
@Test public void testQueryWithCustomQueryId(@Mocked StmtExecutor executor) throws Exception { ConnectContext ctx = initMockContext(mockChannel(queryPacket), GlobalStateMgr.getCurrentState()); ctx.getSessionVariable().setCustomQueryId("a_custom_query_id"); ConnectProcessor processor = new ConnectProcessor(ctx); AtomicReference<String> customQueryId = new AtomicReference<>(); new MockUp<StmtExecutor>() { @Mock public void execute() throws Exception { customQueryId.set(ctx.getCustomQueryId()); } @Mock public PQueryStatistics getQueryStatisticsForAuditLog() { return null; } }; processor.processOnce(); Assert.assertEquals(MysqlCommand.COM_QUERY, myContext.getCommand()); // verify customQueryId is set during query execution Assert.assertEquals("a_custom_query_id", customQueryId.get()); // customQueryId is cleared after query finished Assert.assertEquals("", ctx.getCustomQueryId()); Assert.assertEquals("", ctx.getSessionVariable().getCustomQueryId()); }
public static String squeezeStatement(String sql) { TokenSource tokens = getLexer(sql, ImmutableSet.of()); StringBuilder sb = new StringBuilder(); while (true) { Token token = tokens.nextToken(); if (token.getType() == Token.EOF) { break; } if (token.getType() == SqlBaseLexer.WS) { sb.append(' '); } else { sb.append(token.getText()); } } return sb.toString().trim(); }
@Test public void testSqueezeStatementWithBackquote() { String sql = "select ` f``o o`` ` from dual"; assertEquals(squeezeStatement(sql), "select ` f``o o`` ` from dual"); }
public Blade bannerText(String bannerText) { this.bannerText = bannerText; return this; }
@Test public void testBannerText() { Blade blade = Blade.create().bannerText("qq"); assertEquals("qq", blade.bannerText()); }
public void close() throws IOException { reader.close(); }
@Test(expected = InternalServerException.class) public void require_that_invalid_application_returns_error_when_unpacked() throws Exception { File app = new File("src/test/resources/deploy/validapp/services.xml"); streamFromTarGz(app).close(); }
static String getOidFromPkcs8Encoded(byte[] encoded) throws NoSuchAlgorithmException { if (encoded == null) { throw new NoSuchAlgorithmException("encoding is null"); } try { SimpleDERReader reader = new SimpleDERReader(encoded); reader.resetInput(reader.readSequenceAsByteArray()); reader.readInt(); reader.resetInput(reader.readSequenceAsByteArray()); return reader.readOid(); } catch (IOException e) { Log.w(TAG, "Could not read OID", e); throw new NoSuchAlgorithmException("Could not read key", e); } }
@Test public void getOidFromPkcs8Encoded_Null_Failure() throws Exception { try { PubkeyUtils.getOidFromPkcs8Encoded(null); fail("Should throw NoSuchAlgorithmException"); } catch (NoSuchAlgorithmException expected) { } }
public static List<Integer> toList(int[] array) { if (array == null) return null; List<Integer> list = new ArrayList<>(array.length); for (int i : array) { list.add(i); } return list; }
@Test public void testToList() { assertEquals(Arrays.asList(1, 2, 3, 4), Replicas.toList(new int[] {1, 2, 3, 4})); assertEquals(Collections.emptyList(), Replicas.toList(Replicas.NONE)); assertEquals(Collections.singletonList(2), Replicas.toList(new int[] {2})); }
public Set<Cookie> decode(String header) { Set<Cookie> cookies = new TreeSet<Cookie>(); decode(cookies, header); return cookies; }
@Test public void testDecodingLongValue() { String longValue = "b___$Q__$ha__<NC=MN(F__%#4__<NC=MN(F__2_d____#=IvZB__2_F____'=KqtH__2-9____" + "'=IvZM__3f:____$=HbQW__3g'____%=J^wI__3g-____%=J^wI__3g1____$=HbQW__3g2____" + "$=HbQW__3g5____%=J^wI__3g9____$=HbQW__3gT____$=HbQW__3gX____#=J^wI__3gY____" + "#=J^wI__3gh____$=HbQW__3gj____$=HbQW__3gr____$=HbQW__3gx____#=J^wI__3h_____" + "$=HbQW__3h$____#=J^wI__3h'____$=HbQW__3h_____$=HbQW__3h0____%=J^wI__3h1____" + "#=J^wI__3h2____$=HbQW__3h4____$=HbQW__3h7____$=HbQW__3h8____%=J^wI__3h:____" + "#=J^wI__3h@____%=J^wI__3hB____$=HbQW__3hC____$=HbQW__3hL____$=HbQW__3hQ____" + "$=HbQW__3hS____%=J^wI__3hU____$=HbQW__3h[____$=HbQW__3h^____$=HbQW__3hd____" + "%=J^wI__3he____%=J^wI__3hf____%=J^wI__3hg____$=HbQW__3hh____%=J^wI__3hi____" + "%=J^wI__3hv____$=HbQW__3i/____#=J^wI__3i2____#=J^wI__3i3____%=J^wI__3i4____" + "$=HbQW__3i7____$=HbQW__3i8____$=HbQW__3i9____%=J^wI__3i=____#=J^wI__3i>____" + "%=J^wI__3iD____$=HbQW__3iF____#=J^wI__3iH____%=J^wI__3iM____%=J^wI__3iS____" + "#=J^wI__3iU____%=J^wI__3iZ____#=J^wI__3i]____%=J^wI__3ig____%=J^wI__3ij____" + "%=J^wI__3ik____#=J^wI__3il____$=HbQW__3in____%=J^wI__3ip____$=HbQW__3iq____" + "$=HbQW__3it____%=J^wI__3ix____#=J^wI__3j_____$=HbQW__3j%____$=HbQW__3j'____" + "%=J^wI__3j(____%=J^wI__9mJ____'=KqtH__=SE__<NC=MN(F__?VS__<NC=MN(F__Zw`____" + "%=KqtH__j+C__<NC=MN(F__j+M__<NC=MN(F__j+a__<NC=MN(F__j_.__<NC=MN(F__n>M____" + "'=KqtH__s1X____$=MMyc__s1_____#=MN#O__ypn____'=KqtH__ypr____'=KqtH_#%h_____" + "%=KqtH_#%o_____'=KqtH_#)H6__<NC=MN(F_#*%'____%=KqtH_#+k(____'=KqtH_#-E_____" + "'=KqtH_#1)w____'=KqtH_#1)y____'=KqtH_#1*M____#=KqtH_#1*p____'=KqtH_#14Q__<N" + "C=MN(F_#14S__<NC=MN(F_#16I__<NC=MN(F_#16N__<NC=MN(F_#16X__<NC=MN(F_#16k__<N" + "C=MN(F_#17@__<NC=MN(F_#17A__<NC=MN(F_#1Cq____'=KqtH_#7)_____#=KqtH_#7)b____" + "#=KqtH_#7Ww____'=KqtH_#?cQ____'=KqtH_#His____'=KqtH_#Jrh____'=KqtH_#O@M__<N" + "C=MN(F_#O@O__<NC=MN(F_#OC6__<NC=MN(F_#Os.____#=KqtH_#YOW____#=H/Li_#Zat____" + "'=KqtH_#ZbI____%=KqtH_#Zbc____'=KqtH_#Zbs____%=KqtH_#Zby____'=KqtH_#Zce____" + "'=KqtH_#Zdc____%=KqtH_#Zea____'=KqtH_#ZhI____#=KqtH_#ZiD____'=KqtH_#Zis____" + "'=KqtH_#Zj0____#=KqtH_#Zj1____'=KqtH_#Zj[____'=KqtH_#Zj]____'=KqtH_#Zj^____" + "'=KqtH_#Zjb____'=KqtH_#Zk_____'=KqtH_#Zk6____#=KqtH_#Zk9____%=KqtH_#Zk<____" + "'=KqtH_#Zl>____'=KqtH_#]9R____$=H/Lt_#]I6____#=KqtH_#]Z#____%=KqtH_#^*N____" + "#=KqtH_#^:m____#=KqtH_#_*_____%=J^wI_#`-7____#=KqtH_#`T>____'=KqtH_#`T?____" + "'=KqtH_#`TA____'=KqtH_#`TB____'=KqtH_#`TG____'=KqtH_#`TP____#=KqtH_#`U_____" + "'=KqtH_#`U/____'=KqtH_#`U0____#=KqtH_#`U9____'=KqtH_#aEQ____%=KqtH_#b<)____" + "'=KqtH_#c9-____%=KqtH_#dxC____%=KqtH_#dxE____%=KqtH_#ev$____'=KqtH_#fBi____" + "#=KqtH_#fBj____'=KqtH_#fG)____'=KqtH_#fG+____'=KqtH_#g<d____'=KqtH_#g<e____" + "'=KqtH_#g=J____'=KqtH_#gat____#=KqtH_#s`D____#=J_#p_#sg?____#=J_#p_#t<a____" + "#=KqtH_#t<c____#=KqtH_#trY____$=JiYj_#vA$____'=KqtH_#xs_____'=KqtH_$$rO____" + "#=KqtH_$$rP____#=KqtH_$(_%____'=KqtH_$)]o____%=KqtH_$_@)____'=KqtH_$_k]____" + "'=KqtH_$1]+____%=KqtH_$3IO____%=KqtH_$3J#____'=KqtH_$3J.____'=KqtH_$3J:____" + "#=KqtH_$3JH____#=KqtH_$3JI____#=KqtH_$3JK____%=KqtH_$3JL____'=KqtH_$3JS____" + "'=KqtH_$8+M____#=KqtH_$99d____%=KqtH_$:Lw____#=LK+x_$:N@____#=KqtG_$:NC____" + "#=KqtG_$:hW____'=KqtH_$:i[____'=KqtH_$:ih____'=KqtH_$:it____'=KqtH_$:kO____" + "'=KqtH_$>*B____'=KqtH_$>hD____+=J^x0_$?lW____'=KqtH_$?ll____'=KqtH_$?lm____" + "%=KqtH_$?mi____'=KqtH_$?mx____'=KqtH_$D7]____#=J_#p_$D@T____#=J_#p_$V<g____" + "'=KqtH"; Set<Cookie> cookies = ServerCookieDecoder.STRICT.decode("bh=\"" + longValue + "\";"); assertEquals(1, cookies.size()); Cookie c = cookies.iterator().next(); assertEquals("bh", c.name()); assertEquals(longValue, c.value()); }
@Override public Map<String, Object> load(String configKey) { if (targetUri == null) { return null; } // Check for new file every so often int currentTimeSecs = Time.currentTimeSecs(); if (lastReturnedValue != null && ((currentTimeSecs - lastReturnedTime) < artifactoryPollTimeSecs)) { LOG.debug("currentTimeSecs: {}; lastReturnedTime {}; artifactoryPollTimeSecs: {}. Returning our last map.", currentTimeSecs, lastReturnedTime, artifactoryPollTimeSecs); return (Map<String, Object>) lastReturnedValue.get(configKey); } try { Map<String, Object> raw = loadFromUri(targetUri); if (raw != null) { return (Map<String, Object>) raw.get(configKey); } } catch (Exception e) { LOG.error("Failed to load from uri {}", targetUri); } return null; }
@Test public void testArtifactUpdate() { // This is a test where we are configured to point right at an artifact dir Config conf = new Config(); conf.put(DaemonConfig.SCHEDULER_CONFIG_LOADER_URI, ARTIFACTORY_HTTP_SCHEME_PREFIX + "bogushost.yahoo.com:9999/location/of/test/dir"); conf.put(Config.STORM_LOCAL_DIR, tmpDirPath.toString()); try (SimulatedTime ignored = new SimulatedTime()) { ArtifactoryConfigLoaderMock loaderMock = new ArtifactoryConfigLoaderMock(conf); loaderMock.setData("Anything", "/location/of/test/dir", "{\"children\" : [ { \"uri\" : \"/20160621204337.yaml\", \"folder\" : false }]}"); loaderMock.setData(null, null, "{ \"" + DaemonConfig.MULTITENANT_SCHEDULER_USER_POOLS + "\": {one: 1, two: 2, three: 3}}"); Map<String, Object> ret = loaderMock.load(DaemonConfig.MULTITENANT_SCHEDULER_USER_POOLS); assertNotNull(ret, "Unexpectedly returned null"); assertEquals(1, ret.get("one")); assertEquals(2, ret.get("two")); assertEquals(3, ret.get("three")); assertNull(ret.get("four"), "Unexpectedly contained \"four\""); // Now let's load w/o setting up gets, and we should still get valid map back ArtifactoryConfigLoaderMock tc2 = new ArtifactoryConfigLoaderMock(conf); Map<String, Object> ret2 = tc2.load(DaemonConfig.MULTITENANT_SCHEDULER_USER_POOLS); assertNotNull(ret2, "Unexpectedly returned null"); assertEquals(1, ret2.get("one")); assertEquals(2, ret2.get("two")); assertEquals(3, ret2.get("three")); assertNull(ret2.get("four"), "Unexpectedly did not return null"); // Now let's update it, but not advance time. Should get old map again. loaderMock.setData("Anything", "/location/of/test/dir", "{\"children\" : [ { \"uri\" : \"/20160621204999.yaml\", \"folder\" : false }]}"); loaderMock .setData(null, null, "{ \"" + DaemonConfig.MULTITENANT_SCHEDULER_USER_POOLS + "\": {one: 1, two: 2, three: 3, four : 4}}"); ret = loaderMock.load(DaemonConfig.MULTITENANT_SCHEDULER_USER_POOLS); assertNotNull(ret, "Unexpectedly returned null"); assertEquals(1, ret.get("one")); assertEquals(2, ret.get("two")); assertEquals(3, ret.get("three")); assertNull(ret.get("four"), "Unexpectedly did not return null, not enough time passed!"); // Re-load from cached' file. ret2 = tc2.load(DaemonConfig.MULTITENANT_SCHEDULER_USER_POOLS); assertNotNull(ret2, "Unexpectedly returned null"); assertEquals(1, ret2.get("one")); assertEquals(2, ret2.get("two")); assertEquals(3, ret2.get("three")); assertNull(ret2.get("four"), "Unexpectedly did not return null, last cached result should not have \"four\""); // Now, let's advance time. Time.advanceTime(11 * 60 * 1000); ret = loaderMock.load(DaemonConfig.MULTITENANT_SCHEDULER_USER_POOLS); assertNotNull(ret, "Unexpectedly returned null"); assertEquals(1, ret.get("one")); assertEquals(2, ret.get("two")); assertEquals(3, ret.get("three")); assertEquals(4, ret.get("four")); // Re-load from cached' file. ret2 = tc2.load(DaemonConfig.MULTITENANT_SCHEDULER_USER_POOLS); assertNotNull(ret2, "Unexpectedly returned null"); assertEquals(1, ret2.get("one")); assertEquals(2, ret2.get("two")); assertEquals(3, ret2.get("three")); assertEquals(4, ret2.get("four")); } }