focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public Hedge hedge(final String name) { return hedge(name, getDefaultConfig(), emptyMap()); }
@Test public void hedgeNewWithNullConfigSupplier() { exception.expect(NullPointerException.class); exception.expectMessage("Supplier must not be null"); HedgeRegistry registry = HedgeRegistry.builder().withDefaultConfig(config).build(); registry.hedge("name", (Supplier<HedgeConfig>) null); }
public boolean tryTerminateQueuedInstance( WorkflowInstance instance, WorkflowInstance.Status status, String reason) { return withMetricLogError( () -> withRetryableTransaction( conn -> { int res = terminateQueuedInstance(conn, instance, status, reason); if (res == SUCCESS_WRITE_SIZE) { publisher.publishOrThrow( WorkflowInstanceUpdateJobEvent.create( instance, status, System.currentTimeMillis()), "Failed sending job events when terminating queued instance"); return true; } return false; }), "tryTerminateQueuedInstance", "Failed to terminate the queued workflow instance {}", instance.getIdentity()); }
@Test public void testTryTerminateQueuedInstance() { boolean res = instanceDao.tryTerminateQueuedInstance(wfi, WorkflowInstance.Status.STOPPED, "test-reason"); assertTrue(res); verify(publisher, times(1)).publishOrThrow(any(), any()); WorkflowInstance updated = instanceDao.getLatestWorkflowInstanceRun(wfi.getWorkflowId(), wfi.getWorkflowInstanceId()); assertEquals(WorkflowInstance.Status.STOPPED, updated.getStatus()); assertEquals( "Workflow instance status becomes [STOPPED] due to reason [test-reason]", updated.getTimeline().getTimelineEvents().get(0).getMessage()); assertNotNull(updated.getEndTime()); assertNotNull(updated.getModifyTime()); }
public synchronized long getPositionForFractionConsumed(double fraction) { if (stopOffset == OFFSET_INFINITY) { throw new IllegalArgumentException( "getPositionForFractionConsumed is not applicable to an unbounded range: " + this); } return (long) Math.floor(startOffset + fraction * (stopOffset - startOffset)); }
@Test public void testGetPositionForFractionDense() throws Exception { // Represents positions 3, 4, 5. OffsetRangeTracker tracker = new OffsetRangeTracker(3, 6); // [3, 3) represents from [0, 1/3) fraction of [3, 6) assertEquals(3, tracker.getPositionForFractionConsumed(0.0)); assertEquals(3, tracker.getPositionForFractionConsumed(1.0 / 6)); assertEquals(3, tracker.getPositionForFractionConsumed(0.333)); // [3, 4) represents from [0, 2/3) fraction of [3, 6) assertEquals(4, tracker.getPositionForFractionConsumed(0.334)); assertEquals(4, tracker.getPositionForFractionConsumed(0.666)); // [3, 5) represents from [0, 1) fraction of [3, 6) assertEquals(5, tracker.getPositionForFractionConsumed(0.667)); assertEquals(5, tracker.getPositionForFractionConsumed(0.999)); // The whole [3, 6) is consumed for fraction 1 assertEquals(6, tracker.getPositionForFractionConsumed(1.0)); }
public String nextNonCliCommand() { String line; do { line = terminal.readLine(); } while (maybeHandleCliSpecificCommands(line)); return line; }
@Test public void shouldSupportCmdWithQuotedArgBeingTerminatedWithSemiColon() { // Given: when(lineSupplier.get()) .thenReturn(CLI_CMD_NAME + WHITE_SPACE + "'Arg0';") .thenReturn("not a CLI command;"); // When: console.nextNonCliCommand(); // Then: verify(cliCommand).execute(eq(ImmutableList.of("Arg0")), any()); }
@Override public byte[] evaluateChallenge(byte[] challenge) throws SaslException { try { OAuthBearerTokenCallback callback = new OAuthBearerTokenCallback(); switch (state) { case SEND_CLIENT_FIRST_MESSAGE: if (challenge != null && challenge.length != 0) throw new SaslException("Expected empty challenge"); callbackHandler().handle(new Callback[] {callback}); SaslExtensions extensions = retrieveCustomExtensions(); setState(State.RECEIVE_SERVER_FIRST_MESSAGE); return new OAuthBearerClientInitialResponse(callback.token().value(), extensions).toBytes(); case RECEIVE_SERVER_FIRST_MESSAGE: if (challenge != null && challenge.length != 0) { String jsonErrorResponse = new String(challenge, StandardCharsets.UTF_8); if (log.isDebugEnabled()) log.debug("Sending %%x01 response to server after receiving an error: {}", jsonErrorResponse); setState(State.RECEIVE_SERVER_MESSAGE_AFTER_FAILURE); return new byte[] {BYTE_CONTROL_A}; } callbackHandler().handle(new Callback[] {callback}); if (log.isDebugEnabled()) log.debug("Successfully authenticated as {}", callback.token().principalName()); setState(State.COMPLETE); return null; default: throw new IllegalSaslStateException("Unexpected challenge in Sasl client state " + state); } } catch (SaslException e) { setState(State.FAILED); throw e; } catch (IOException | UnsupportedCallbackException e) { setState(State.FAILED); throw new SaslException(e.getMessage(), e); } }
@Test public void testAttachesExtensionsToFirstClientMessage() throws Exception { String expectedToken = new String(new OAuthBearerClientInitialResponse("", testExtensions).toBytes(), StandardCharsets.UTF_8); OAuthBearerSaslClient client = new OAuthBearerSaslClient(new ExtensionsCallbackHandler(false)); String message = new String(client.evaluateChallenge("".getBytes()), StandardCharsets.UTF_8); assertEquals(expectedToken, message); }
public static List<MetricsReporter> metricsReporters(AbstractConfig config) { return metricsReporters(Collections.emptyMap(), config); }
@Test @SuppressWarnings("deprecation") public void testMetricsReporters() { TestConfig config = new TestConfig(Collections.emptyMap()); List<MetricsReporter> reporters = CommonClientConfigs.metricsReporters("clientId", config); assertEquals(1, reporters.size()); assertInstanceOf(JmxReporter.class, reporters.get(0)); config = new TestConfig(Collections.singletonMap(CommonClientConfigs.AUTO_INCLUDE_JMX_REPORTER_CONFIG, "false")); reporters = CommonClientConfigs.metricsReporters("clientId", config); assertTrue(reporters.isEmpty()); config = new TestConfig(Collections.singletonMap(CommonClientConfigs.METRIC_REPORTER_CLASSES_CONFIG, JmxReporter.class.getName())); reporters = CommonClientConfigs.metricsReporters("clientId", config); assertEquals(1, reporters.size()); assertInstanceOf(JmxReporter.class, reporters.get(0)); Map<String, String> props = new HashMap<>(); props.put(CommonClientConfigs.METRIC_REPORTER_CLASSES_CONFIG, JmxReporter.class.getName() + "," + MyJmxReporter.class.getName()); config = new TestConfig(props); reporters = CommonClientConfigs.metricsReporters("clientId", config); assertEquals(2, reporters.size()); }
@Override public Map<String, String> getHeaders(ConnectorSession session) { if (isUseIdentityThriftHeader(session)) { return ImmutableMap.of(PRESTO_CONNECTOR_IDENTITY_USER, session.getUser()); } return ImmutableMap.of(); }
@Test public void testWithoutIdentityHeaders() { ThriftConnectorConfig config = new ThriftConnectorConfig().setUseIdentityThriftHeader(false); ThriftSessionProperties properties = new ThriftSessionProperties(config); TestingConnectorSession session = new TestingConnectorSession(properties.getSessionProperties()); DefaultThriftHeaderProvider headerProvider = new DefaultThriftHeaderProvider(); Map<String, String> headers = headerProvider.getHeaders(session); assertTrue(headers.isEmpty()); }
public void decode(ByteBuf buffer) { boolean last; int statusCode; while (true) { switch(state) { case READ_COMMON_HEADER: if (buffer.readableBytes() < SPDY_HEADER_SIZE) { return; } int frameOffset = buffer.readerIndex(); int flagsOffset = frameOffset + SPDY_HEADER_FLAGS_OFFSET; int lengthOffset = frameOffset + SPDY_HEADER_LENGTH_OFFSET; buffer.skipBytes(SPDY_HEADER_SIZE); boolean control = (buffer.getByte(frameOffset) & 0x80) != 0; int version; int type; if (control) { // Decode control frame common header version = getUnsignedShort(buffer, frameOffset) & 0x7FFF; type = getUnsignedShort(buffer, frameOffset + SPDY_HEADER_TYPE_OFFSET); streamId = 0; // Default to session Stream-ID } else { // Decode data frame common header version = spdyVersion; // Default to expected version type = SPDY_DATA_FRAME; streamId = getUnsignedInt(buffer, frameOffset); } flags = buffer.getByte(flagsOffset); length = getUnsignedMedium(buffer, lengthOffset); // Check version first then validity if (version != spdyVersion) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SPDY Version"); } else if (!isValidFrameHeader(streamId, type, flags, length)) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid Frame Error"); } else { state = getNextState(type, length); } break; case READ_DATA_FRAME: if (length == 0) { state = State.READ_COMMON_HEADER; delegate.readDataFrame(streamId, hasFlag(flags, SPDY_DATA_FLAG_FIN), Unpooled.buffer(0)); break; } // Generate data frames that do not exceed maxChunkSize int dataLength = Math.min(maxChunkSize, length); // Wait until entire frame is readable if (buffer.readableBytes() < dataLength) { return; } ByteBuf data = buffer.alloc().buffer(dataLength); data.writeBytes(buffer, dataLength); length -= dataLength; if (length == 0) { state = State.READ_COMMON_HEADER; } last = length == 0 && hasFlag(flags, SPDY_DATA_FLAG_FIN); delegate.readDataFrame(streamId, last, data); break; case READ_SYN_STREAM_FRAME: if (buffer.readableBytes() < 10) { return; } int offset = buffer.readerIndex(); streamId = getUnsignedInt(buffer, offset); int associatedToStreamId = getUnsignedInt(buffer, offset + 4); byte priority = (byte) (buffer.getByte(offset + 8) >> 5 & 0x07); last = hasFlag(flags, SPDY_FLAG_FIN); boolean unidirectional = hasFlag(flags, SPDY_FLAG_UNIDIRECTIONAL); buffer.skipBytes(10); length -= 10; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SYN_STREAM Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readSynStreamFrame(streamId, associatedToStreamId, priority, last, unidirectional); } break; case READ_SYN_REPLY_FRAME: if (buffer.readableBytes() < 4) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); last = hasFlag(flags, SPDY_FLAG_FIN); buffer.skipBytes(4); length -= 4; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SYN_REPLY Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readSynReplyFrame(streamId, last); } break; case READ_RST_STREAM_FRAME: if (buffer.readableBytes() < 8) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); statusCode = getSignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); if (streamId == 0 || statusCode == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid RST_STREAM Frame"); } else { state = State.READ_COMMON_HEADER; delegate.readRstStreamFrame(streamId, statusCode); } break; case READ_SETTINGS_FRAME: if (buffer.readableBytes() < 4) { return; } boolean clear = hasFlag(flags, SPDY_SETTINGS_CLEAR); numSettings = getUnsignedInt(buffer, buffer.readerIndex()); buffer.skipBytes(4); length -= 4; // Validate frame length against number of entries. Each ID/Value entry is 8 bytes. if ((length & 0x07) != 0 || length >> 3 != numSettings) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SETTINGS Frame"); } else { state = State.READ_SETTING; delegate.readSettingsFrame(clear); } break; case READ_SETTING: if (numSettings == 0) { state = State.READ_COMMON_HEADER; delegate.readSettingsEnd(); break; } if (buffer.readableBytes() < 8) { return; } byte settingsFlags = buffer.getByte(buffer.readerIndex()); int id = getUnsignedMedium(buffer, buffer.readerIndex() + 1); int value = getSignedInt(buffer, buffer.readerIndex() + 4); boolean persistValue = hasFlag(settingsFlags, SPDY_SETTINGS_PERSIST_VALUE); boolean persisted = hasFlag(settingsFlags, SPDY_SETTINGS_PERSISTED); buffer.skipBytes(8); --numSettings; delegate.readSetting(id, value, persistValue, persisted); break; case READ_PING_FRAME: if (buffer.readableBytes() < 4) { return; } int pingId = getSignedInt(buffer, buffer.readerIndex()); buffer.skipBytes(4); state = State.READ_COMMON_HEADER; delegate.readPingFrame(pingId); break; case READ_GOAWAY_FRAME: if (buffer.readableBytes() < 8) { return; } int lastGoodStreamId = getUnsignedInt(buffer, buffer.readerIndex()); statusCode = getSignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); state = State.READ_COMMON_HEADER; delegate.readGoAwayFrame(lastGoodStreamId, statusCode); break; case READ_HEADERS_FRAME: if (buffer.readableBytes() < 4) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); last = hasFlag(flags, SPDY_FLAG_FIN); buffer.skipBytes(4); length -= 4; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid HEADERS Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readHeadersFrame(streamId, last); } break; case READ_WINDOW_UPDATE_FRAME: if (buffer.readableBytes() < 8) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); int deltaWindowSize = getUnsignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); if (deltaWindowSize == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid WINDOW_UPDATE Frame"); } else { state = State.READ_COMMON_HEADER; delegate.readWindowUpdateFrame(streamId, deltaWindowSize); } break; case READ_HEADER_BLOCK: if (length == 0) { state = State.READ_COMMON_HEADER; delegate.readHeaderBlockEnd(); break; } if (!buffer.isReadable()) { return; } int compressedBytes = Math.min(buffer.readableBytes(), length); ByteBuf headerBlock = buffer.alloc().buffer(compressedBytes); headerBlock.writeBytes(buffer, compressedBytes); length -= compressedBytes; delegate.readHeaderBlock(headerBlock); break; case DISCARD_FRAME: int numBytes = Math.min(buffer.readableBytes(), length); buffer.skipBytes(numBytes); length -= numBytes; if (length == 0) { state = State.READ_COMMON_HEADER; break; } return; case FRAME_ERROR: buffer.skipBytes(buffer.readableBytes()); return; default: throw new Error("Shouldn't reach here."); } } }
@Test public void testReservedSpdyGoAwayFrameBits() throws Exception { short type = 7; byte flags = 0; int length = 8; int lastGoodStreamId = RANDOM.nextInt() & 0x7FFFFFFF; int statusCode = RANDOM.nextInt() | 0x01; ByteBuf buf = Unpooled.buffer(SPDY_HEADER_SIZE + length); encodeControlFrameHeader(buf, type, flags, length); buf.writeInt(lastGoodStreamId | 0x80000000); // should ignore reserved bit buf.writeInt(statusCode); decoder.decode(buf); verify(delegate).readGoAwayFrame(lastGoodStreamId, statusCode); assertFalse(buf.isReadable()); buf.release(); }
protected List<T> getEntities( @Nonnull final EntityProvider<T, ?> entities, @Nullable final Pageable pageable, @Nullable final Class<T> clazz, @Nullable final Sort sort) { Objects.requireNonNull(entities); Stream<? extends T> entityStream = entities .stream() .filter(this.criteria::evaluate); final Sort sortToUse = this.staticSort.orElse(sort); if(sortToUse != null) { entityStream = EntitySorter.sortEntitiesStream(clazz, sortToUse, entityStream); } entityStream = this.pageEntityStream(pageable, entityStream); final List<T> result = this.copyEntities(entityStream); if(LOG.isTraceEnabled()) { LOG.trace("Found {} entries.", result.size()); } return result; }
@Test void getEntities_EmptyCriteria_NoPageable_Sortable_IsSortedByFirstName() { final PageableSortableCollectionQuerier<Customer> querier = new PageableSortableCollectionQuerier<>( new DummyWorkingCopier<>(), new CriteriaSingleNode<>() ); final List<Customer> sortedCustomers = querier.getEntities(DATA_CUSTOMERS_DABC_ABCD, Pageable.unpaged(), Customer.class, Sort.by("firstName")); Assertions.assertEquals("A", sortedCustomers.get(0).firstName); Assertions.assertEquals("B", sortedCustomers.get(1).firstName); Assertions.assertEquals("C", sortedCustomers.get(2).firstName); Assertions.assertEquals("D", sortedCustomers.get(3).firstName); }
public void findIntersections(Rectangle query, Consumer<T> consumer) { IntArrayList todoNodes = new IntArrayList(levelOffsets.length * degree); IntArrayList todoLevels = new IntArrayList(levelOffsets.length * degree); int rootLevel = levelOffsets.length - 1; int rootIndex = levelOffsets[rootLevel]; if (doesIntersect(query, rootIndex)) { todoNodes.push(rootIndex); todoLevels.push(rootLevel); } while (!todoNodes.isEmpty()) { int nodeIndex = todoNodes.popInt(); int level = todoLevels.popInt(); if (level == 0) { // This is a leaf node consumer.accept(items[nodeIndex / ENVELOPE_SIZE]); } else { int childrenOffset = getChildrenOffset(nodeIndex, level); for (int i = 0; i < degree; i++) { int childIndex = childrenOffset + ENVELOPE_SIZE * i; if (doesIntersect(query, childIndex)) { todoNodes.push(childIndex); todoLevels.push(level - 1); } } } } }
@Test(dataProvider = "rectangle-counts") public void testRectangleCollection(int numBuildRectangles, int numProbeRectangles, int seed) { Random random = new Random(seed); List<Rectangle> buildRectangles = makeRectangles(random, numBuildRectangles); List<Rectangle> probeRectangles = makeRectangles(random, numProbeRectangles); Flatbush<Rectangle> rtree = new Flatbush<>(buildRectangles.toArray(new Rectangle[] {})); for (Rectangle query : probeRectangles) { List<Rectangle> actual = findIntersections(rtree, query); List<Rectangle> expected = buildRectangles.stream() .filter(rect -> rect.intersects(query)) .collect(toList()); assertEqualsSorted(actual, expected, RECTANGLE_COMPARATOR); } }
@Subscribe public void onChatMessage(ChatMessage event) { if (event.getType() == ChatMessageType.GAMEMESSAGE || event.getType() == ChatMessageType.SPAM) { String message = Text.removeTags(event.getMessage()); Matcher dodgyCheckMatcher = DODGY_CHECK_PATTERN.matcher(message); Matcher dodgyProtectMatcher = DODGY_PROTECT_PATTERN.matcher(message); Matcher dodgyBreakMatcher = DODGY_BREAK_PATTERN.matcher(message); Matcher bindingNecklaceCheckMatcher = BINDING_CHECK_PATTERN.matcher(message); Matcher bindingNecklaceUsedMatcher = BINDING_USED_PATTERN.matcher(message); Matcher ringOfForgingCheckMatcher = RING_OF_FORGING_CHECK_PATTERN.matcher(message); Matcher amuletOfChemistryCheckMatcher = AMULET_OF_CHEMISTRY_CHECK_PATTERN.matcher(message); Matcher amuletOfChemistryUsedMatcher = AMULET_OF_CHEMISTRY_USED_PATTERN.matcher(message); Matcher amuletOfChemistryBreakMatcher = AMULET_OF_CHEMISTRY_BREAK_PATTERN.matcher(message); Matcher amuletOfBountyCheckMatcher = AMULET_OF_BOUNTY_CHECK_PATTERN.matcher(message); Matcher amuletOfBountyUsedMatcher = AMULET_OF_BOUNTY_USED_PATTERN.matcher(message); Matcher chronicleAddMatcher = CHRONICLE_ADD_PATTERN.matcher(message); Matcher chronicleUseAndCheckMatcher = CHRONICLE_USE_AND_CHECK_PATTERN.matcher(message); Matcher slaughterActivateMatcher = BRACELET_OF_SLAUGHTER_ACTIVATE_PATTERN.matcher(message); Matcher slaughterCheckMatcher = BRACELET_OF_SLAUGHTER_CHECK_PATTERN.matcher(message); Matcher expeditiousActivateMatcher = EXPEDITIOUS_BRACELET_ACTIVATE_PATTERN.matcher(message); Matcher expeditiousCheckMatcher = EXPEDITIOUS_BRACELET_CHECK_PATTERN.matcher(message); Matcher bloodEssenceCheckMatcher = BLOOD_ESSENCE_CHECK_PATTERN.matcher(message); Matcher bloodEssenceExtractMatcher = BLOOD_ESSENCE_EXTRACT_PATTERN.matcher(message); Matcher braceletOfClayCheckMatcher = BRACELET_OF_CLAY_CHECK_PATTERN.matcher(message); if (message.contains(RING_OF_RECOIL_BREAK_MESSAGE)) { notifier.notify(config.recoilNotification(), "Your Ring of Recoil has shattered"); } else if (dodgyBreakMatcher.find()) { notifier.notify(config.dodgyNotification(), "Your dodgy necklace has crumbled to dust."); updateDodgyNecklaceCharges(MAX_DODGY_CHARGES); } else if (dodgyCheckMatcher.find()) { updateDodgyNecklaceCharges(Integer.parseInt(dodgyCheckMatcher.group(1))); } else if (dodgyProtectMatcher.find()) { updateDodgyNecklaceCharges(Integer.parseInt(dodgyProtectMatcher.group(1))); } else if (amuletOfChemistryCheckMatcher.find()) { updateAmuletOfChemistryCharges(Integer.parseInt(amuletOfChemistryCheckMatcher.group(1))); } else if (amuletOfChemistryUsedMatcher.find()) { final String match = amuletOfChemistryUsedMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateAmuletOfChemistryCharges(charges); } else if (amuletOfChemistryBreakMatcher.find()) { notifier.notify(config.amuletOfChemistryNotification(), "Your amulet of chemistry has crumbled to dust."); updateAmuletOfChemistryCharges(MAX_AMULET_OF_CHEMISTRY_CHARGES); } else if (amuletOfBountyCheckMatcher.find()) { updateAmuletOfBountyCharges(Integer.parseInt(amuletOfBountyCheckMatcher.group(1))); } else if (amuletOfBountyUsedMatcher.find()) { updateAmuletOfBountyCharges(Integer.parseInt(amuletOfBountyUsedMatcher.group(1))); } else if (message.equals(AMULET_OF_BOUNTY_BREAK_TEXT)) { updateAmuletOfBountyCharges(MAX_AMULET_OF_BOUNTY_CHARGES); } else if (message.contains(BINDING_BREAK_TEXT)) { notifier.notify(config.bindingNotification(), BINDING_BREAK_TEXT); // This chat message triggers before the used message so add 1 to the max charges to ensure proper sync updateBindingNecklaceCharges(MAX_BINDING_CHARGES + 1); } else if (bindingNecklaceUsedMatcher.find()) { final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); if (equipment.contains(ItemID.BINDING_NECKLACE)) { updateBindingNecklaceCharges(getItemCharges(ItemChargeConfig.KEY_BINDING_NECKLACE) - 1); } } else if (bindingNecklaceCheckMatcher.find()) { final String match = bindingNecklaceCheckMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateBindingNecklaceCharges(charges); } else if (ringOfForgingCheckMatcher.find()) { final String match = ringOfForgingCheckMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateRingOfForgingCharges(charges); } else if (message.equals(RING_OF_FORGING_USED_TEXT) || message.equals(RING_OF_FORGING_VARROCK_PLATEBODY)) { final ItemContainer inventory = client.getItemContainer(InventoryID.INVENTORY); final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); // Determine if the player smelted with a Ring of Forging equipped. if (equipment == null) { return; } if (equipment.contains(ItemID.RING_OF_FORGING) && (message.equals(RING_OF_FORGING_USED_TEXT) || inventory.count(ItemID.IRON_ORE) > 1)) { int charges = Ints.constrainToRange(getItemCharges(ItemChargeConfig.KEY_RING_OF_FORGING) - 1, 0, MAX_RING_OF_FORGING_CHARGES); updateRingOfForgingCharges(charges); } } else if (message.equals(RING_OF_FORGING_BREAK_TEXT)) { notifier.notify(config.ringOfForgingNotification(), "Your ring of forging has melted."); // This chat message triggers before the used message so add 1 to the max charges to ensure proper sync updateRingOfForgingCharges(MAX_RING_OF_FORGING_CHARGES + 1); } else if (chronicleAddMatcher.find()) { final String match = chronicleAddMatcher.group(1); if (match.equals("one")) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1); } else { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, Integer.parseInt(match)); } } else if (chronicleUseAndCheckMatcher.find()) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, Integer.parseInt(chronicleUseAndCheckMatcher.group(1))); } else if (message.equals(CHRONICLE_ONE_CHARGE_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1); } else if (message.equals(CHRONICLE_EMPTY_TEXT) || message.equals(CHRONICLE_NO_CHARGES_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 0); } else if (message.equals(CHRONICLE_FULL_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1000); } else if (slaughterActivateMatcher.find()) { final String found = slaughterActivateMatcher.group(1); if (found == null) { updateBraceletOfSlaughterCharges(MAX_SLAYER_BRACELET_CHARGES); notifier.notify(config.slaughterNotification(), BRACELET_OF_SLAUGHTER_BREAK_TEXT); } else { updateBraceletOfSlaughterCharges(Integer.parseInt(found)); } } else if (slaughterCheckMatcher.find()) { updateBraceletOfSlaughterCharges(Integer.parseInt(slaughterCheckMatcher.group(1))); } else if (expeditiousActivateMatcher.find()) { final String found = expeditiousActivateMatcher.group(1); if (found == null) { updateExpeditiousBraceletCharges(MAX_SLAYER_BRACELET_CHARGES); notifier.notify(config.expeditiousNotification(), EXPEDITIOUS_BRACELET_BREAK_TEXT); } else { updateExpeditiousBraceletCharges(Integer.parseInt(found)); } } else if (expeditiousCheckMatcher.find()) { updateExpeditiousBraceletCharges(Integer.parseInt(expeditiousCheckMatcher.group(1))); } else if (bloodEssenceCheckMatcher.find()) { updateBloodEssenceCharges(Integer.parseInt(bloodEssenceCheckMatcher.group(1))); } else if (bloodEssenceExtractMatcher.find()) { updateBloodEssenceCharges(getItemCharges(ItemChargeConfig.KEY_BLOOD_ESSENCE) - Integer.parseInt(bloodEssenceExtractMatcher.group(1))); } else if (message.contains(BLOOD_ESSENCE_ACTIVATE_TEXT)) { updateBloodEssenceCharges(MAX_BLOOD_ESSENCE_CHARGES); } else if (braceletOfClayCheckMatcher.find()) { updateBraceletOfClayCharges(Integer.parseInt(braceletOfClayCheckMatcher.group(1))); } else if (message.equals(BRACELET_OF_CLAY_USE_TEXT) || message.equals(BRACELET_OF_CLAY_USE_TEXT_TRAHAEARN)) { final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); // Determine if the player mined with a Bracelet of Clay equipped. if (equipment != null && equipment.contains(ItemID.BRACELET_OF_CLAY)) { final ItemContainer inventory = client.getItemContainer(InventoryID.INVENTORY); // Charge is not used if only 1 inventory slot is available when mining in Prifddinas boolean ignore = inventory != null && inventory.count() == 27 && message.equals(BRACELET_OF_CLAY_USE_TEXT_TRAHAEARN); if (!ignore) { int charges = Ints.constrainToRange(getItemCharges(ItemChargeConfig.KEY_BRACELET_OF_CLAY) - 1, 0, MAX_BRACELET_OF_CLAY_CHARGES); updateBraceletOfClayCharges(charges); } } } else if (message.equals(BRACELET_OF_CLAY_BREAK_TEXT)) { notifier.notify(config.braceletOfClayNotification(), "Your bracelet of clay has crumbled to dust"); updateBraceletOfClayCharges(MAX_BRACELET_OF_CLAY_CHARGES); } } }
@Test public void testBraceletOfClayUseTrahaearn() { // Set bracelet of clay charges to 13 when(configManager.getRSProfileConfiguration(ItemChargeConfig.GROUP, ItemChargeConfig.KEY_BRACELET_OF_CLAY, Integer.class)) .thenReturn(13); // Equip bracelet of clay ItemContainer equipmentItemContainer = mock(ItemContainer.class); when(client.getItemContainer(InventoryID.EQUIPMENT)) .thenReturn(equipmentItemContainer); when(equipmentItemContainer.contains(ItemID.BRACELET_OF_CLAY)).thenReturn(true); when(equipmentItemContainer.getItems()).thenReturn(new Item[0]); // Set inventory to 2 free slots ItemContainer inventoryItemContainer = mock(ItemContainer.class); when(inventoryItemContainer.count()).thenReturn(26); when(client.getItemContainer(InventoryID.INVENTORY)).thenReturn(inventoryItemContainer); // Verify bracelet of clay charges decreased ChatMessage chatMessage = new ChatMessage(null, ChatMessageType.GAMEMESSAGE, "", USED_BRACELET_OF_CLAY_TRAHAEARN, "", 0); itemChargePlugin.onChatMessage(chatMessage); verify(configManager).setRSProfileConfiguration(ItemChargeConfig.GROUP, ItemChargeConfig.KEY_BRACELET_OF_CLAY, 12); }
@Injection( name = "TABLE_NAME_DEFINED_IN_FIELD" ) public void metaSetTableNameDefinedInField( String value ) { setTableNameInField( "Y".equalsIgnoreCase( value ) ); }
@Test public void metaSetTableNameDefinedInField() { TableOutputMeta tableOutputMeta = new TableOutputMeta(); tableOutputMeta.metaSetTableNameDefinedInField( "Y" ); assertTrue( tableOutputMeta.isTableNameInField() ); tableOutputMeta.metaSetTableNameDefinedInField( "N" ); assertFalse( tableOutputMeta.isTableNameInField() ); tableOutputMeta.metaSetTableNameDefinedInField( "Ynot" ); assertFalse( tableOutputMeta.isTableNameInField() ); }
Future<String> findZookeeperLeader(Reconciliation reconciliation, Set<String> pods, TlsPemIdentity coTlsPemIdentity) { if (pods.size() == 0) { return Future.succeededFuture(UNKNOWN_LEADER); } else if (pods.size() == 1) { return Future.succeededFuture(pods.stream().findFirst().get()); } try { NetClientOptions netClientOptions = clientOptions(coTlsPemIdentity.pemTrustSet(), coTlsPemIdentity.pemAuthIdentity()); return zookeeperLeaderWithBackoff(reconciliation, pods, netClientOptions); } catch (Throwable e) { return Future.failedFuture(e); } }
@Test public void testReturnUnknownLeaderDuringNetworkExceptions(VertxTestContext context) throws InterruptedException { int[] ports = startMockZks(context, 2, (id, attempt) -> false); // Close ports to ensure closed ports are used so as to mock network problems stopZks(); ZookeeperLeaderFinder finder = new TestingZookeeperLeaderFinder(this::backoff, ports); Checkpoint a = context.checkpoint(); finder.findZookeeperLeader(Reconciliation.DUMMY_RECONCILIATION, treeSet(createPodWithId(0), createPodWithId(1)), DUMMY_IDENTITY) .onComplete(context.succeeding(leader -> context.verify(() -> { assertThat(leader, is(ZookeeperLeaderFinder.UNKNOWN_LEADER)); for (FakeZk zk : zks) { assertThat("Unexpected number of attempts for node " + zk.id, zk.attempts.get(), is(0)); } a.flag(); }))); }
@Override public void deleteLevel(Long id) { // 校验存在 validateLevelExists(id); // 校验分组下是否有用户 validateLevelHasUser(id); // 删除 memberLevelMapper.deleteById(id); }
@Test public void testDeleteLevel_notExists() { // 准备参数 Long id = randomLongId(); // 调用, 并断言异常 assertServiceException(() -> levelService.deleteLevel(id), LEVEL_NOT_EXISTS); }
public static boolean isStorageSpaceError(final Throwable error) { Throwable cause = error; while (null != cause) { if (cause instanceof IOException) { final String msg = cause.getMessage(); if ("No space left on device".equals(msg) || "There is not enough space on the disk".equals(msg)) { return true; } } cause = cause.getCause(); } return false; }
@Test void isStorageSpaceErrorReturnsFalseIfNull() { assertFalse(isStorageSpaceError(null)); }
@Udf public Map<String, String> records(@UdfParameter final String jsonObj) { if (jsonObj == null) { return null; } final JsonNode node = UdfJsonMapper.parseJson(jsonObj); if (node.isMissingNode() || !node.isObject()) { return null; } final Map<String, String> ret = new HashMap<>(node.size()); node.fieldNames().forEachRemaining(k -> { final JsonNode value = node.get(k); if (value instanceof TextNode) { ret.put(k, value.textValue()); } else { ret.put(k, value.toString()); } }); return ret; }
@Test public void shouldReturnEmptyMapForEmptyObject() { assertEquals(Collections.emptyMap(), udf.records("{}")); }
public synchronized void synchronizeSteps( StepMeta step ) { synchronizeSteps( step, step.getName() ); }
@Test public void synchronizeSteps_sync_shared_only() throws Exception { final String stepName = "Step"; TransMeta transformarion1 = createTransMeta(); StepMeta step1 = createStepMeta( stepName, true ); transformarion1.addStep( step1 ); spoonDelegates.trans.addTransformation( transformarion1 ); TransMeta transformarion2 = createTransMeta(); StepMeta unsharedStep2 = createStepMeta( stepName, false ); transformarion2.addStep( unsharedStep2 ); spoonDelegates.trans.addTransformation( transformarion2 ); TransMeta transformarion3 = createTransMeta(); StepMeta step3 = createStepMeta( stepName, true ); transformarion3.addStep( step3 ); spoonDelegates.trans.addTransformation( transformarion3 ); step3.setDescription( AFTER_SYNC_VALUE ); sharedUtil.synchronizeSteps( step3 ); assertThat( step1.getDescription(), equalTo( AFTER_SYNC_VALUE ) ); assertThat( unsharedStep2.getDescription(), equalTo( BEFORE_SYNC_VALUE ) ); }
public static synchronized boolean contains(@NonNull String polygonInString, @NonNull Coordinates coordinates) { if (polygonInString.isEmpty() || polygonInString.isBlank()) { throw new RuntimeException("Polygon string can't be empty or null!"); } JsonArray polygonsJson = normalizePolygonsJson(JsonParser.parseString(polygonInString).getAsJsonArray()); List<Geometry> polygons = buildPolygonsFromJson(polygonsJson); Set<Geometry> holes = extractHolesFrom(polygons); polygons.removeIf(holes::contains); Geometry globalGeometry = unionToGlobalGeometry(polygons, holes); var point = jtsCtx.getShapeFactory().getGeometryFactory() .createPoint(new Coordinate(coordinates.getLatitude(), coordinates.getLongitude())); return globalGeometry.contains(point); }
@Test public void testPointsInComplexPolygons() { Assertions.assertTrue(GeoUtil.contains(SAND_CLOCK, POINT_INSIDE_SAND_CLOCK_CENTER), "Polygon " + SAND_CLOCK + " must contain the dot " + POINT_INSIDE_SAND_CLOCK_CENTER ); Assertions.assertTrue(GeoUtil.contains(SAND_CLOCK, POINT_INSIDE_SAND_CLOCK_NEAR_BORDER), "Polygon " + SAND_CLOCK + " must contain the dot " + POINT_INSIDE_SAND_CLOCK_NEAR_BORDER ); Assertions.assertTrue(GeoUtil.contains(SAND_CLOCK_WITH_HOLE_IN_CENTER, POINT_INSIDE_SAND_CLOCK_NEAR_BORDER), "Polygon " + SAND_CLOCK_WITH_HOLE_IN_CENTER + " must contain the dot " + POINT_INSIDE_SAND_CLOCK_NEAR_BORDER ); Assertions.assertFalse(GeoUtil.contains(SAND_CLOCK, POINT_OUTSIDE_SAND_CLOCK_1), "Polygon " + SAND_CLOCK + " must not contain the dot " + POINT_OUTSIDE_SAND_CLOCK_1 ); Assertions.assertFalse(GeoUtil.contains(SAND_CLOCK, POINT_OUTSIDE_SAND_CLOCK_2), "Polygon " + SAND_CLOCK + " must not contain the dot " + POINT_OUTSIDE_SAND_CLOCK_2 ); Assertions.assertFalse(GeoUtil.contains(SAND_CLOCK_WITH_HOLE_IN_CENTER, POINT_INSIDE_SAND_CLOCK_CENTER), "Polygon " + SAND_CLOCK_WITH_HOLE_IN_CENTER + " must not contain the dot " + POINT_INSIDE_SAND_CLOCK_CENTER ); Assertions.assertFalse(GeoUtil.contains(SAND_CLOCK_WITH_HOLE_IN_CENTER, POINT_OUTSIDE_SAND_CLOCK_1), "Polygon " + SAND_CLOCK_WITH_HOLE_IN_CENTER + " must not contain the dot " + POINT_OUTSIDE_SAND_CLOCK_1 ); Assertions.assertFalse(GeoUtil.contains(SAND_CLOCK_WITH_HOLE_IN_CENTER, POINT_OUTSIDE_SAND_CLOCK_2), "Polygon " + SAND_CLOCK_WITH_HOLE_IN_CENTER + " must not contain the dot " + POINT_OUTSIDE_SAND_CLOCK_2 ); }
@Override public ExecuteContext before(ExecuteContext context) { Object object = context.getObject(); String serviceId = getServiceId(object).orElse(null); if (StringUtils.isBlank(serviceId)) { return context; } Object obj = context.getMemberFieldValue("serviceInstances"); if (obj instanceof Flux<?>) { List<Object> instances = getInstances((Flux<Object>) obj, object); if (CollectionUtils.isEmpty(instances)) { return context; } RequestData requestData = ThreadLocalUtils.getRequestData(); List<Object> targetInstances = loadBalancerService.getTargetInstances(serviceId, instances, requestData); context.skip(Flux.just(targetInstances)); } return context; }
@Test public void testBeforeWithEmptyInstances() { supplier.setServiceInstances(Collections.emptyList()); ThreadLocalUtils.setRequestData(new RequestData(Collections.emptyMap(), "", "")); interceptor.before(context); ServiceInstanceListSupplier supplier = (ServiceInstanceListSupplier) context.getObject(); List<ServiceInstance> instances = supplier.get().blockFirst(); Assert.assertNotNull(instances); Assert.assertEquals(0, instances.size()); }
private Mono<Void> rateLimiter(final ServerWebExchange exchange, final ShenyuPluginChain chain, final RuleData rule) { return ratelimiterExecutor.run( chain.execute(exchange), fallback(ratelimiterExecutor, exchange, null), Resilience4JBuilder.build(rule)) .onErrorResume(throwable -> ratelimiterExecutor.withoutFallback(exchange, throwable)); }
@Test @SuppressWarnings({"rawtypes", "unchecked"}) public void rateLimiterTest() { RuleData data = mock(RuleData.class); data.setSelectorId("SHENYU"); data.setId("SHENYU"); Resilience4JHandle resilience4JHandle = GsonUtils.getGson().fromJson(HANDLER, Resilience4JHandle.class); Resilience4JHandler.CACHED_HANDLE.get().cachedHandle(CacheKeyUtils.INST.getKey(data), resilience4JHandle); CombinedExecutor combinedExecutor = mock(CombinedExecutor.class); resilience4JPlugin = new Resilience4JPlugin(combinedExecutor, new RateLimiterExecutor()); Mono mono = Mono.error(RequestNotPermitted.createRequestNotPermitted(rateLimiter)).onErrorResume(Mono::error); when(data.getHandle()).thenReturn(HANDLER); when(chain.execute(exchange)).thenReturn(mono); SelectorData selectorData = mock(SelectorData.class); StepVerifier.create(resilience4JPlugin.doExecute(exchange, chain, selectorData, data)).expectSubscription().expectError().verify(); }
public static List<URI> getNamespaceEditsDirs(Configuration conf) throws IOException { return getNamespaceEditsDirs(conf, true); }
@Test public void testUniqueEditDirs() throws IOException { Configuration config = new Configuration(); config.set(DFS_NAMENODE_EDITS_DIR_KEY, "file://edits/dir, " + "file://edits/dir1,file://edits/dir1"); // overlapping internally // getNamespaceEditsDirs removes duplicates Collection<URI> editsDirs = FSNamesystem.getNamespaceEditsDirs(config); assertEquals(2, editsDirs.size()); }
@VisibleForTesting static DBCollection prepareCollection(final MongoConnection mongoConnection) { final DB db = mongoConnection.getDatabase(); DBCollection coll = db.getCollection(COLLECTION_NAME); coll.createIndex(DBSort .asc("timestamp") .asc("producer") .asc("consumers")); coll.setWriteConcern(WriteConcern.JOURNALED); return coll; }
@Test public void prepareCollectionCreatesIndexesOnExistingCollection() throws Exception { @SuppressWarnings("deprecation") DBCollection original = mongoConnection.getDatabase().getCollection(ClusterEventPeriodical.COLLECTION_NAME); original.dropIndexes(); assertThat(original.getName()).isEqualTo(ClusterEventPeriodical.COLLECTION_NAME); assertThat(original.getIndexInfo()).hasSize(1); DBCollection collection = ClusterEventPeriodical.prepareCollection(mongoConnection); assertThat(collection.getName()).isEqualTo(ClusterEventPeriodical.COLLECTION_NAME); assertThat(collection.getIndexInfo()).hasSize(2); assertThat(collection.getWriteConcern()).isEqualTo(WriteConcern.JOURNALED); }
public ExecutionStateTracker create() { return new ExecutionStateTracker(); }
@Test public void testErrorState() throws Exception { MillisProvider clock = mock(MillisProvider.class); ExecutionStateSampler sampler = new ExecutionStateSampler( PipelineOptionsFactory.fromArgs("--experiments=state_sampling_period_millis=10") .create(), clock); ExecutionStateTracker tracker = sampler.create(); ExecutionState state1 = tracker.create("shortId1", "ptransformId1", "ptransformIdName1", "process"); ExecutionState state2 = tracker.create("shortId2", "ptransformId2", "ptransformIdName2", "process"); state1.activate(); state2.activate(); assertTrue(state2.error()); assertFalse(state2.error()); state2.deactivate(); assertFalse(state2.error()); tracker.reset(); assertTrue(state1.error()); }
@Override public int hashCode() { return Arrays.deepHashCode(new Object[] {original, duplicates}); }
@Test public void hashcode_is_based_on_original_only() { Duplication duplication = new Duplication(SOME_ORIGINAL_TEXTBLOCK, Arrays.asList(new InnerDuplicate(TEXT_BLOCK_1))); assertThat(duplication).hasSameHashCodeAs(new Duplication(SOME_ORIGINAL_TEXTBLOCK, Arrays.asList(new InnerDuplicate(TEXT_BLOCK_1)))); assertThat(duplication.hashCode()).isNotEqualTo(new Duplication(SOME_ORIGINAL_TEXTBLOCK, Arrays.asList(new InnerDuplicate(TEXT_BLOCK_2))).hashCode()); assertThat(duplication.hashCode()).isNotEqualTo(new Duplication(TEXT_BLOCK_1, Arrays.asList(new InnerDuplicate(SOME_ORIGINAL_TEXTBLOCK))).hashCode()); }
@Override @DSTransactional // 多数据源,使用 @DSTransactional 保证本地事务,以及数据源的切换 public void updateTenant(TenantSaveReqVO updateReqVO) { // 校验存在 TenantDO tenant = validateUpdateTenant(updateReqVO.getId()); // 校验租户名称是否重复 validTenantNameDuplicate(updateReqVO.getName(), updateReqVO.getId()); // 校验租户域名是否重复 validTenantWebsiteDuplicate(updateReqVO.getWebsite(), updateReqVO.getId()); // 校验套餐被禁用 TenantPackageDO tenantPackage = tenantPackageService.validTenantPackage(updateReqVO.getPackageId()); // 更新租户 TenantDO updateObj = BeanUtils.toBean(updateReqVO, TenantDO.class); tenantMapper.updateById(updateObj); // 如果套餐发生变化,则修改其角色的权限 if (ObjectUtil.notEqual(tenant.getPackageId(), updateReqVO.getPackageId())) { updateTenantRoleMenu(tenant.getId(), tenantPackage.getMenuIds()); } }
@Test public void testUpdateTenant_system() { // mock 数据 TenantDO dbTenant = randomPojo(TenantDO.class, o -> o.setPackageId(PACKAGE_ID_SYSTEM)); tenantMapper.insert(dbTenant);// @Sql: 先插入出一条存在的数据 // 准备参数 TenantSaveReqVO reqVO = randomPojo(TenantSaveReqVO.class, o -> { o.setId(dbTenant.getId()); // 设置更新的 ID }); // 调用,校验业务异常 assertServiceException(() -> tenantService.updateTenant(reqVO), TENANT_CAN_NOT_UPDATE_SYSTEM); }
public static String hmacSha256Hex(final String key, final String valueToDigest) { return getHmacHex(HmacAlgorithms.HMAC_SHA_256, key, valueToDigest); }
@Test public void testHmacSha256Hex() { assertEquals(HmacHexUtils.hmacSha256Hex("testKey", "testValue"), "c52d1ebe5e779f5b337dc8f515bf594bd44a7007cb3f4ab1f6c5a15149bed793"); }
@Override public void accept(MeterEntity entity, DataTable value) { setEntityId(entity.id()); setServiceId(entity.serviceId()); this.total.append(value); }
@Test public void testAccept() { function.accept(MeterEntity.newService("sum_sync_time", Layer.GENERAL), table1); function.calculate(); assertThat(function.getValue()).isEqualTo(table1); function.accept(MeterEntity.newService("sum_sync_time", Layer.GENERAL), table2); function.calculate(); assertThat(function.getValue()).isEqualTo(table1.append(table2)); }
public synchronized Topology addSource(final String name, final String... topics) { internalTopologyBuilder.addSource(null, name, null, null, null, topics); return this; }
@Test public void shouldNotAllowNullTopicsWhenAddingSourceWithTopic() { assertThrows(NullPointerException.class, () -> topology.addSource("source", (String[]) null)); }
public static HttpClient create() { return new HttpClientConnect(new HttpConnectionProvider()); }
@Test void testClientContext_WithPool() throws Exception { doTestClientContext(HttpClient.create()); }
static public synchronized ThreadFactory getThreadFactory(String name) { PooledFactory p = factory.get(name); if (p == null) { p = new PooledFactory(name); factory.put(name, p); } return p.getFactory(false); }
@Test public void requireThatFactoryCreatesCorrectlyNamedThreads() { Thread thread = ThreadFactoryFactory.getThreadFactory("a").newThread(new Runner()); assertEquals("a-1-thread-1", thread.getName()); thread = ThreadFactoryFactory.getThreadFactory("a").newThread(new Runner()); assertEquals("a-2-thread-1", thread.getName()); thread = ThreadFactoryFactory.getThreadFactory("b").newThread(new Runner()); assertEquals("b-1-thread-1", thread.getName()); ThreadFactory factory = ThreadFactoryFactory.getThreadFactory("a"); thread = factory.newThread(new Runner()); assertEquals("a-3-thread-1", thread.getName()); thread = factory.newThread(new Runner()); assertEquals("a-3-thread-2", thread.getName()); thread = factory.newThread(new Runner()); assertEquals("a-3-thread-3", thread.getName()); }
public void addMergeTask(String dataId, String groupId, String tenant, String clientIp) { if (!canExecute()) { return; } MergeDataTask task = new MergeDataTask(dataId, groupId, tenant, clientIp); mergeTasks.addTask(task.getId(), task); }
@Test void testAddMergeTaskEmbeddedAndStandAloneModel() { DatasourceConfiguration.setEmbeddedStorage(true); envUtilMockedStatic.when(() -> EnvUtil.getStandaloneMode()).thenReturn(true); TaskManager mockTasker = Mockito.mock(TaskManager.class); ReflectionTestUtils.setField(mergeDatumService, "mergeTasks", mockTasker); String dataId = "dataId12345"; String group = "group123"; String tenant = "tenant1234"; String clientIp = "127.0.0.1"; mergeDatumService.addMergeTask(dataId, group, tenant, clientIp); Mockito.verify(mockTasker, times(1)).addTask(anyString(), any(MergeDataTask.class)); }
@Override public Map<String, String> generationCodes(Long tableId) { // 校验是否已经存在 CodegenTableDO table = codegenTableMapper.selectById(tableId); if (table == null) { throw exception(CODEGEN_TABLE_NOT_EXISTS); } List<CodegenColumnDO> columns = codegenColumnMapper.selectListByTableId(tableId); if (CollUtil.isEmpty(columns)) { throw exception(CODEGEN_COLUMN_NOT_EXISTS); } // 如果是主子表,则加载对应的子表信息 List<CodegenTableDO> subTables = null; List<List<CodegenColumnDO>> subColumnsList = null; if (CodegenTemplateTypeEnum.isMaster(table.getTemplateType())) { // 校验子表存在 subTables = codegenTableMapper.selectListByTemplateTypeAndMasterTableId( CodegenTemplateTypeEnum.SUB.getType(), tableId); if (CollUtil.isEmpty(subTables)) { throw exception(CODEGEN_MASTER_GENERATION_FAIL_NO_SUB_TABLE); } // 校验子表的关联字段存在 subColumnsList = new ArrayList<>(); for (CodegenTableDO subTable : subTables) { List<CodegenColumnDO> subColumns = codegenColumnMapper.selectListByTableId(subTable.getId()); if (CollUtil.findOne(subColumns, column -> column.getId().equals(subTable.getSubJoinColumnId())) == null) { throw exception(CODEGEN_SUB_COLUMN_NOT_EXISTS, subTable.getId()); } subColumnsList.add(subColumns); } } // 执行生成 return codegenEngine.execute(table, columns, subTables, subColumnsList); }
@Test public void testGenerationCodes_sub_tableNotExists() { // mock 数据(CodegenTableDO) CodegenTableDO table = randomPojo(CodegenTableDO.class, o -> o.setScene(CodegenSceneEnum.ADMIN.getScene()) .setTemplateType(CodegenTemplateTypeEnum.MASTER_NORMAL.getType())); codegenTableMapper.insert(table); // mock 数据(CodegenColumnDO) CodegenColumnDO column01 = randomPojo(CodegenColumnDO.class, o -> o.setTableId(table.getId())); codegenColumnMapper.insert(column01); // 准备参数 Long tableId = table.getId(); // 调用,并断言 assertServiceException(() -> codegenService.generationCodes(tableId), CODEGEN_MASTER_GENERATION_FAIL_NO_SUB_TABLE); }
@Override public void checkAuthorization( final KsqlSecurityContext securityContext, final MetaStore metaStore, final Statement statement ) { if (statement instanceof Query) { validateQuery(securityContext, metaStore, (Query)statement); } else if (statement instanceof InsertInto) { validateInsertInto(securityContext, metaStore, (InsertInto)statement); } else if (statement instanceof CreateAsSelect) { validateCreateAsSelect(securityContext, metaStore, (CreateAsSelect)statement); } else if (statement instanceof PrintTopic) { validatePrintTopic(securityContext, (PrintTopic)statement); } else if (statement instanceof CreateSource) { validateCreateSource(securityContext, (CreateSource)statement); } }
@Test public void shouldThrowWhenJoinWitOneLeftTopicWithReadPermissionsDenied() { // Given: givenTopicAccessDenied(KAFKA_TOPIC, AclOperation.READ); final Statement statement = givenStatement(String.format( "SELECT * FROM %s A JOIN %s B ON A.F1 = B.F1;", KAFKA_STREAM_TOPIC, AVRO_STREAM_TOPIC) ); // When: final Exception e = assertThrows( KsqlTopicAuthorizationException.class, () -> authorizationValidator.checkAuthorization(securityContext, metaStore, statement) ); // Then: assertThat(e.getMessage(), containsString(String.format( "Authorization denied to Read on topic(s): [%s]", KAFKA_TOPIC ))); }
@Override public String getCiName() { return ciName; }
@Test public void getName_for_detected_ci() { assertThat(new CiConfigurationImpl(null, "test").getCiName()).isEqualTo("test"); }
public FEELFnResult<BigDecimal> invoke(@ParameterName( "n" ) BigDecimal n, @ParameterName( "scale" ) BigDecimal scale) { if ( n == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "n", "cannot be null")); } if ( scale == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "scale", "cannot be null")); } // Based on Table 76: Semantics of numeric functions, the scale is in range −6111 .. 6176 if (scale.compareTo(BigDecimal.valueOf(-6111)) < 0 || scale.compareTo(BigDecimal.valueOf(6176)) > 0) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "scale", "must be in range between -6111 to 6176.")); } return FEELFnResult.ofResult( n.setScale( scale.intValue(), RoundingMode.HALF_EVEN ) ); }
@Test void invokeRoundingDown() { FunctionTestUtil.assertResult(decimalFunction.invoke(BigDecimal.valueOf(10.24), BigDecimal.ONE), BigDecimal.valueOf(10.2)); }
@Override public V putIfAbsent(K key, V value) { if (null != this.inverse) { this.inverse.putIfAbsent(value, key); } return super.putIfAbsent(key, value); }
@Test public void putIfAbsentTest(){ final BiMap<String, Integer> biMap = new BiMap<>(new HashMap<>()); biMap.put("aaa", 111); biMap.put("bbb", 222); biMap.putIfAbsent("ccc", 333); assertEquals(new Integer(333), biMap.get("ccc")); assertEquals("ccc", biMap.getKey(333)); }
@Override protected Source.Reader<T> createReader(@Nonnull FlinkSourceSplit<T> sourceSplit) throws IOException { Source<T> beamSource = sourceSplit.getBeamSplitSource(); return ((BoundedSource<T>) beamSource).createReader(pipelineOptions); }
@Test public void testSnapshotStateAndRestore() throws Exception { final int numSplits = 2; final int numRecordsPerSplit = 10; List<FlinkSourceSplit<KV<Integer, Integer>>> splits = createSplits(numSplits, numRecordsPerSplit, 0); RecordsValidatingOutput validatingOutput = new RecordsValidatingOutput(splits); List<FlinkSourceSplit<KV<Integer, Integer>>> snapshot; // Create a reader, take a snapshot. try (SourceReader<WindowedValue<KV<Integer, Integer>>, FlinkSourceSplit<KV<Integer, Integer>>> reader = createReader()) { // Only poll half of the records in the first split. pollAndValidate(reader, splits, validatingOutput, numRecordsPerSplit / 2); snapshot = reader.snapshotState(0L); } // Create another reader, add the snapshot splits back. try (SourceReader<WindowedValue<KV<Integer, Integer>>, FlinkSourceSplit<KV<Integer, Integer>>> reader = createReader()) { pollAndValidate(reader, snapshot, validatingOutput, Integer.MAX_VALUE); } }
@Override public int positionedRead(long pos, byte[] b, int off, int len) throws IOException { if (!CachePerThreadContext.get().getCacheEnabled()) { MetricsSystem.meter(MetricKey.CLIENT_CACHE_BYTES_REQUESTED_EXTERNAL.getName()) .mark(len); MetricsSystem.counter(MetricKey.CLIENT_CACHE_EXTERNAL_REQUESTS.getName()).inc(); len = getExternalFileInStream().positionedRead(pos, b, off, len); MultiDimensionalMetricsSystem.EXTERNAL_DATA_READ.inc(len); return len; } try { return readInternal(new ByteArrayTargetBuffer(b, off), off, len, ReadType.READ_INTO_BYTE_ARRAY, pos, true); } catch (IOException | RuntimeException e) { LOG.warn("Failed to read from Alluxio's page cache.", e); if (mFallbackEnabled) { MetricsSystem.counter(MetricKey.CLIENT_CACHE_POSITION_READ_FALLBACK.getName()).inc(); len = getExternalFileInStream().positionedRead(pos, b, off, len); MultiDimensionalMetricsSystem.EXTERNAL_DATA_READ.inc(len); return len; } throw e; } }
@Test public void testPageDataFileCorrupted() throws Exception { int pages = 10; int fileSize = mPageSize * pages; byte[] testData = BufferUtils.getIncreasingByteArray(fileSize); ByteArrayCacheManager manager = new ByteArrayCacheManager(); //by default local cache fallback is not enabled, the read should fail for any error LocalCacheFileInStream streamWithOutFallback = setupWithSingleFile(testData, manager); sConf.set(PropertyKey.USER_CLIENT_CACHE_FALLBACK_ENABLED, true); LocalCacheFileInStream streamWithFallback = setupWithSingleFile(testData, manager); Assert.assertEquals(100, streamWithFallback.positionedRead(0, new byte[10], 100, 100)); Assert.assertEquals(1, MetricsSystem.counter(MetricKey.CLIENT_CACHE_POSITION_READ_FALLBACK.getName()).getCount()); }
@SuppressWarnings("unchecked") public static <T extends Factory> T discoverFactory( ClassLoader classLoader, Class<T> factoryClass, String factoryIdentifier) { final List<Factory> factories = discoverFactories(classLoader); final List<Factory> foundFactories = factories.stream() .filter(f -> factoryClass.isAssignableFrom(f.getClass())) .collect(Collectors.toList()); if (foundFactories.isEmpty()) { throw new ValidationException( String.format( "Could not find any factories that implement '%s' in the classpath.", factoryClass.getName())); } final List<Factory> matchingFactories = foundFactories.stream() .filter(f -> f.factoryIdentifier().equals(factoryIdentifier)) .collect(Collectors.toList()); if (matchingFactories.isEmpty()) { throw new ValidationException( String.format( "Could not find any factory for identifier '%s' that implements '%s' in the classpath.\n\n" + "Available factory identifiers are:\n\n" + "%s", factoryIdentifier, factoryClass.getName(), foundFactories.stream() .map(Factory::factoryIdentifier) .filter(identifier -> !DEFAULT_IDENTIFIER.equals(identifier)) .distinct() .sorted() .collect(Collectors.joining("\n")))); } if (matchingFactories.size() > 1) { throw new ValidationException( String.format( "Multiple factories for identifier '%s' that implement '%s' found in the classpath.\n\n" + "Ambiguous factory classes are:\n\n" + "%s", factoryIdentifier, factoryClass.getName(), matchingFactories.stream() .map(f -> f.getClass().getName()) .sorted() .collect(Collectors.joining("\n")))); } return (T) matchingFactories.get(0); }
@Test void testDiscoverFactoryFromClosedClassLoader() throws Exception { MutableURLClassLoader classLoader = FlinkUserCodeClassLoaders.create( new URL[0], FactoryUtilTest.class.getClassLoader(), new Configuration()); classLoader.close(); assertThatThrownBy(() -> FactoryUtil.discoverFactory(classLoader, Factory.class, "test")) .satisfies( FlinkAssertions.anyCauseMatches( IllegalStateException.class, "Trying to access closed classloader. Please check if you store classloaders directly " + "or indirectly in static fields. If the stacktrace suggests that the leak occurs in a third " + "party library and cannot be fixed immediately, you can disable this check with the " + "configuration 'classloader.check-leaked-classloader'")); }
static FEELFnResult<Boolean> matchFunctionWithFlags(String input, String pattern, String flags) { log.debug("Input: {} , Pattern: {}, Flags: {}", input, pattern, flags); if ( input == null ) { throw new InvalidParameterException("input"); } if ( pattern == null ) { throw new InvalidParameterException("pattern"); } final String flagsString; if (flags != null && !flags.isEmpty()) { checkFlags(flags); if(!flags.contains("U")){ flags += "U"; } flagsString = String.format("(?%s)", flags); } else { flagsString = ""; } log.debug("flagsString: {}", flagsString); String stringToBeMatched = flagsString + pattern; log.debug("stringToBeMatched: {}", stringToBeMatched); Pattern p=Pattern.compile(stringToBeMatched); Matcher m = p.matcher( input ); boolean matchFound=m.find(); log.debug("matchFound: {}", matchFound); return FEELFnResult.ofResult(matchFound); }
@Test void invokeUnsupportedFlags() { assertThrows(IllegalArgumentException.class, () -> MatchesFunction.matchFunctionWithFlags("foobar", "fo.bar", "g")); assertThrows(IllegalArgumentException.class, () -> MatchesFunction.matchFunctionWithFlags("abracadabra", "bra", "p")); assertThrows(IllegalArgumentException.class, () -> MatchesFunction.matchFunctionWithFlags("abracadabra", "bra", "X")); assertThrows(IllegalArgumentException.class, () -> MatchesFunction.matchFunctionWithFlags("abracadabra", "bra", " ")); assertThrows(IllegalArgumentException.class, () -> MatchesFunction.matchFunctionWithFlags("abracadabra", "bra", "iU")); }
public static void checkTrue(boolean expression, String errorMessage) { if (!expression) { throw new IllegalArgumentException(errorMessage); } }
@Test public void test_checkTrue_whenFalse() { String errorMessage = "foobar"; try { checkTrue(false, errorMessage); fail(); } catch (IllegalArgumentException e) { assertSame(errorMessage, e.getMessage()); } }
@VisibleForTesting BackupReplayFile openOrCreateReplayFile() { try { final Optional<BackupReplayFile> backupReplayFile = latestReplayFile(); if (backupReplayFile.isPresent()) { return backupReplayFile.get(); } return newReplayFile(); } catch (final IOException e) { throw new RuntimeException(e); } }
@Test public void shouldOpenLatestReplayFileWhenDifferentCommandTopicNamesExist() throws IOException { // Given: backupLocation.newFile("backup_command_topic_111"); backupLocation.newFile("backup_other_command_topic_222"); // When: final BackupReplayFile replayFile = commandTopicBackup.openOrCreateReplayFile(); // Then: assertThat(replayFile.getPath(), is(String.format( "%s/backup_command_topic_111", backupLocation.getRoot().getAbsolutePath() ))); }
@Override public RemotingCommand processRequest(final ChannelHandlerContext ctx, RemotingCommand request) throws RemotingCommandException { return this.processRequest(ctx.channel(), request, true); }
@Test public void testSingleAck_QueueCheck() throws RemotingCommandException { { int qId = -1; AckMessageRequestHeader requestHeader = new AckMessageRequestHeader(); requestHeader.setTopic(topic); requestHeader.setQueueId(qId); requestHeader.setOffset(0L); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.ACK_MESSAGE, requestHeader); request.makeCustomHeaderToNet(); RemotingCommand response = ackMessageProcessor.processRequest(handlerContext, request); assertThat(response.getCode()).isEqualTo(ResponseCode.MESSAGE_ILLEGAL); assertThat(response.getRemark()).contains("queueId[" + qId + "] is illegal"); } { int qId = 17; AckMessageRequestHeader requestHeader = new AckMessageRequestHeader(); requestHeader.setTopic(topic); requestHeader.setQueueId(qId); requestHeader.setOffset(0L); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.ACK_MESSAGE, requestHeader); request.makeCustomHeaderToNet(); RemotingCommand response = ackMessageProcessor.processRequest(handlerContext, request); assertThat(response.getCode()).isEqualTo(ResponseCode.MESSAGE_ILLEGAL); assertThat(response.getRemark()).contains("queueId[" + qId + "] is illegal"); } }
@Override public HttpResponse handle(HttpRequest request) { try { switch (request.getMethod()) { case GET: return handleGET(request); case DELETE: return handleDELETE(request); case PUT: return handlePUT(request); default: return ErrorResponse.methodNotAllowed("Method '" + request.getMethod() + "' is not supported"); } } catch (IllegalArgumentException e) { return ErrorResponse.badRequest(Exceptions.toMessageString(e)); } catch (RuntimeException e) { log.log(Level.WARNING, "Unexpected error handling '" + request.getUri() + "'", e); return ErrorResponse.internalServerError(Exceptions.toMessageString(e)); } }
@Test void testForcing() { assertTrue(handle(Method.PUT, "/data/" + new FlagId("undef"), "", 400).contains("There is no flag 'undef'")); assertTrue(handle(Method.PUT, "/data/" + new FlagId("undef") + "?force=true", "", 400). contains("No content to map due to end-of-input")); assertTrue(handle(Method.PUT, "/data/" + FLAG1.id(), "{}", 400). contains("Flag ID missing")); assertTrue(handle(Method.PUT, "/data/" + FLAG1.id(), "{\"id\": \"id1\",\"rules\": [{\"value\":\"string\"}]}", 400). contains("Wrong type of JsonNode: STRING")); assertEquals(handle(Method.PUT, "/data/" + FLAG1.id() + "?force=true", "{\"id\": \"id1\",\"rules\": [{\"value\":\"string\"}]}", 200), ""); }
public static BackgroundActionRegistry global() { synchronized(lock) { if(null == global) { global = new BackgroundActionRegistry(); } return global; } }
@Test public void testGlobal() { assertSame(BackgroundActionRegistry.global(), BackgroundActionRegistry.global()); }
@ScalarOperator(CAST) @SqlType(StandardTypes.INTEGER) public static long castToInteger(@SqlType(StandardTypes.TINYINT) long value) { return value; }
@Test public void testCastToInteger() { assertFunction("cast(TINYINT'37' as integer)", INTEGER, 37); assertFunction("cast(TINYINT'17' as integer)", INTEGER, 17); }
@ShellMethod(key = "diff file", value = "Check how file differs across range of commits") public String diffFile( @ShellOption(value = {"--fileId"}, help = "File ID to diff across range of commits") String fileId, @ShellOption(value = {"--startTs"}, help = "start time for compactions, default: now - 10 days", defaultValue = ShellOption.NULL) String startTs, @ShellOption(value = {"--endTs"}, help = "end time for compactions, default: now - 1 day", defaultValue = ShellOption.NULL) String endTs, @ShellOption(value = {"--limit"}, help = "Limit compactions", defaultValue = "-1") final Integer limit, @ShellOption(value = {"--sortBy"}, help = "Sorting Field", defaultValue = "") final String sortByField, @ShellOption(value = {"--desc"}, help = "Ordering", defaultValue = "false") final boolean descending, @ShellOption(value = {"--headeronly"}, help = "Print Header Only", defaultValue = "false") final boolean headerOnly, @ShellOption(value = {"--includeArchivedTimeline"}, help = "Include archived commits as well", defaultValue = "false") final boolean includeArchivedTimeline) throws IOException { HoodieDefaultTimeline timeline = CLIUtils.getTimelineInRange(startTs, endTs, includeArchivedTimeline); return printCommitsWithMetadataForFileId(timeline, limit, sortByField, descending, headerOnly, "", fileId); }
@Test public void testDiffFile() throws Exception { // create COW table. new TableCommand().createTable( tablePath, tableName, HoodieTableType.COPY_ON_WRITE.name(), "", TimelineLayoutVersion.VERSION_1, HoodieAvroPayload.class.getName()); StorageConfiguration<?> conf = HoodieCLI.conf; HoodieTableMetaClient metaClient = HoodieCLI.getTableMetaClient(); String fileId1 = UUID.randomUUID().toString(); String fileId2 = UUID.randomUUID().toString(); HoodieStorage storage = HoodieStorageUtils.getStorage(basePath(), storageConf()); HoodieTestDataGenerator.writePartitionMetadataDeprecated(storage, HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS, tablePath); // Create four commits Set<String> commits = new HashSet<>(); for (int i = 100; i < 104; i++) { String timestamp = String.valueOf(i); commits.add(timestamp); // Requested Compaction HoodieTestCommitMetadataGenerator.createCompactionAuxiliaryMetadata(tablePath, new HoodieInstant(HoodieInstant.State.REQUESTED, HoodieTimeline.COMPACTION_ACTION, timestamp), conf); // Inflight Compaction HoodieTestCommitMetadataGenerator.createCompactionAuxiliaryMetadata(tablePath, new HoodieInstant(HoodieInstant.State.INFLIGHT, HoodieTimeline.COMPACTION_ACTION, timestamp), conf); Map<String, String> extraCommitMetadata = Collections.singletonMap(HoodieCommitMetadata.SCHEMA_KEY, HoodieTestTable.PHONY_TABLE_SCHEMA); HoodieTestCommitMetadataGenerator.createCommitFileWithMetadata(tablePath, timestamp, conf, fileId1, fileId2, Option.empty(), Option.empty(), extraCommitMetadata, false); } HoodieTableMetaClient.reload(metaClient); Object result = shell.evaluate(() -> String.format("diff file --fileId %s", fileId1)); assertTrue(ShellEvaluationResultUtil.isSuccess(result)); String expected = generateExpectDataWithExtraMetadata(commits, fileId1, HoodieTestDataGenerator.DEFAULT_FIRST_PARTITION_PATH); expected = removeNonWordAndStripSpace(expected); String got = removeNonWordAndStripSpace(result.toString()); assertEquals(expected, got); }
@Override public Optional<Entity> exportEntity(EntityDescriptor entityDescriptor, EntityDescriptorIds entityDescriptorIds) { final ModelId modelId = entityDescriptor.id(); return dataAdapterService.get(modelId.id()).map(dataAdapterDto -> exportNativeEntity(dataAdapterDto, entityDescriptorIds)); }
@Test @MongoDBFixtures("LookupDataAdapterFacadeTest.json") public void exportEntityDescriptor() { final EntityDescriptor descriptor = EntityDescriptor.create("5adf24a04b900a0fdb4e52c8", ModelTypes.LOOKUP_ADAPTER_V1); final EntityDescriptorIds entityDescriptorIds = EntityDescriptorIds.of(descriptor); final Entity entity = facade.exportEntity(descriptor, entityDescriptorIds).orElseThrow(AssertionError::new); assertThat(entity).isInstanceOf(EntityV1.class); assertThat(entity.id()).isEqualTo(ModelId.of(entityDescriptorIds.get(descriptor).orElse(null))); assertThat(entity.type()).isEqualTo(ModelTypes.LOOKUP_ADAPTER_V1); final EntityV1 entityV1 = (EntityV1) entity; final LookupDataAdapterEntity lookupDataAdapterEntity = objectMapper.convertValue(entityV1.data(), LookupDataAdapterEntity.class); assertThat(lookupDataAdapterEntity.name()).isEqualTo(ValueReference.of("http-dsv")); assertThat(lookupDataAdapterEntity.title()).isEqualTo(ValueReference.of("HTTP DSV")); assertThat(lookupDataAdapterEntity.description()).isEqualTo(ValueReference.of("HTTP DSV")); }
@Override public WebhookDelivery call(Webhook webhook, WebhookPayload payload) { WebhookDelivery.Builder builder = new WebhookDelivery.Builder(); long startedAt = system.now(); builder .setAt(startedAt) .setPayload(payload) .setWebhook(webhook); try { HttpUrl url = HttpUrl.parse(webhook.getUrl()); if (url == null) { throw new IllegalArgumentException("Webhook URL is not valid: " + webhook.getUrl()); } builder.setEffectiveUrl(HttpUrlHelper.obfuscateCredentials(webhook.getUrl(), url)); Request request = buildHttpRequest(url, webhook, payload); try (Response response = execute(request)) { builder.setHttpStatus(response.code()); } } catch (Exception e) { builder.setError(e); } return builder .setDurationInMs((int) (system.now() - startedAt)) .build(); }
@Test public void silently_catch_error_when_url_is_local_network_interface() throws Exception { String url = "https://localhost"; InetAddress inetAddress = InetAddress.getByName(HttpUrl.parse(url).host()); when(networkInterfaceProvider.getNetworkInterfaceAddresses()) .thenReturn(ImmutableList.of(inetAddress)); Webhook webhook = new Webhook(WEBHOOK_UUID, PROJECT_UUID, CE_TASK_UUID, randomAlphanumeric(40), "my-webhook", url, null); WebhookDelivery delivery = newSender(true).call(webhook, PAYLOAD); assertThat(delivery.getHttpStatus()).isEmpty(); assertThat(delivery.getDurationInMs().get()).isNotNegative(); assertThat(delivery.getError().get()).isInstanceOf(IllegalArgumentException.class); assertThat(delivery.getErrorMessage()).contains("Invalid URL: loopback and wildcard addresses are not allowed for webhooks."); assertThat(delivery.getAt()).isEqualTo(NOW); assertThat(delivery.getWebhook()).isSameAs(webhook); assertThat(delivery.getPayload()).isSameAs(PAYLOAD); }
public static void extract(Path source, Path destination) throws IOException { extract(source, destination, false); }
@Test public void testExtract_symlinks() throws URISyntaxException, IOException { Path source = Paths.get(Resources.getResource("core/symlinks.tar").toURI()); Path destination = temporaryFolder.getRoot().toPath(); TarExtractor.extract(source, destination); Assert.assertTrue(Files.isDirectory(destination.resolve("directory1"))); Assert.assertTrue(Files.isDirectory(destination.resolve("directory2"))); Assert.assertTrue(Files.isRegularFile(destination.resolve("directory2/regular"))); Assert.assertTrue(Files.isSymbolicLink(destination.resolve("directory-symlink"))); Assert.assertTrue(Files.isSymbolicLink(destination.resolve("directory1/file-symlink"))); }
public static ParsedCommand parse( // CHECKSTYLE_RULES.ON: CyclomaticComplexity final String sql, final Map<String, String> variables) { validateSupportedStatementType(sql); final String substituted; try { substituted = VariableSubstitutor.substitute(KSQL_PARSER.parse(sql).get(0), variables); } catch (ParseFailedException e) { throw new MigrationException(String.format( "Failed to parse the statement. Statement: %s. Reason: %s", sql, e.getMessage())); } final SqlBaseParser.SingleStatementContext statementContext = KSQL_PARSER.parse(substituted) .get(0).getStatement(); final boolean isStatement = StatementType.get(statementContext.statement().getClass()) == StatementType.STATEMENT; return new ParsedCommand(substituted, isStatement ? Optional.empty() : Optional.of(new AstBuilder(TypeRegistry.EMPTY) .buildStatement(statementContext))); }
@Test public void shouldParseAlterSourceStatement() { // When: List<CommandParser.ParsedCommand> commands = parse("alter stream foo add column new_col string;"); // Then: assertThat(commands.size(), is(1)); assertThat(commands.get(0).getStatement().isPresent(), is (false)); assertThat(commands.get(0).getCommand(), is("alter stream foo add column new_col string;")); }
@Override public UserInfo getByUsername(String username) { return userInfoRepository.getByUsername(username); }
@Test public void loadByUsername_admin_success() { Mockito.when(userInfoRepository.getByUsername(adminUsername)).thenReturn(userInfoAdmin); UserInfo user = service.getByUsername(adminUsername); assertEquals(user.getSub(), adminSub); }
@Override public boolean add(FilteredBlock block) throws VerificationException, PrunedException { boolean success = super.add(block); if (success) { trackFilteredTransactions(block.getTransactionCount()); } return success; }
@Test public void badDifficultyTarget() throws Exception { assertTrue(testNetChain.add(getBlock1())); Block b2 = getBlock2(); assertTrue(testNetChain.add(b2)); Block bad = new Block(Block.BLOCK_VERSION_GENESIS); // Merkle root can be anything here, doesn't matter. bad.setMerkleRoot(Sha256Hash.wrap("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")); // Nonce was just some number that made the hash < difficulty limit set below, it can be anything. bad.setNonce(140548933); bad.setTime(Instant.ofEpochSecond(1279242649)); bad.setPrevBlockHash(b2.getHash()); // We're going to make this block so easy 50% of solutions will pass, and check it gets rejected for having a // bad difficulty target. Unfortunately the encoding mechanism means we cannot make one that accepts all // solutions. bad.setDifficultyTarget(Block.EASIEST_DIFFICULTY_TARGET); try { testNetChain.add(bad); // The difficulty target above should be rejected on the grounds of being easier than the networks // allowable difficulty. fail(); } catch (VerificationException e) { assertTrue(e.getMessage(), e.getCause().getMessage().contains("Difficulty target is out of range")); } }
public static BigInteger decodeQuantity(String value) { if (isLongValue(value)) { return BigInteger.valueOf(Long.parseLong(value)); } if (!isValidHexQuantity(value)) { throw new MessageDecodingException("Value must be in format 0x[0-9a-fA-F]+"); } try { return parsePaddedNumberHex(value); } catch (NumberFormatException e) { throw new MessageDecodingException("Negative ", e); } }
@Test public void testQuantityDecodeMissingValue() { assertThrows(MessageDecodingException.class, () -> Numeric.decodeQuantity("0x")); }
boolean changelogAsSource(final TopicPartition partition) { return sourcePartitions.contains(partition); }
@Test public void shouldReportChangelogAsSource() { final ProcessorStateManager stateMgr = new ProcessorStateManager( taskId, Task.TaskType.STANDBY, false, logContext, stateDirectory, changelogReader, mkMap( mkEntry(persistentStoreName, persistentStoreTopicName), mkEntry(persistentStoreTwoName, persistentStoreTwoTopicName), mkEntry(nonPersistentStoreName, nonPersistentStoreTopicName) ), mkSet(persistentStorePartition, nonPersistentStorePartition), false); assertTrue(stateMgr.changelogAsSource(persistentStorePartition)); assertTrue(stateMgr.changelogAsSource(nonPersistentStorePartition)); assertFalse(stateMgr.changelogAsSource(persistentStoreTwoPartition)); }
@Override public LOOKUP3Response lookup(XDR xdr, RpcInfo info) { return lookup(xdr, getSecurityHandler(info), info.remoteAddress()); }
@Test(timeout = 60000) public void testLookup() throws Exception { HdfsFileStatus status = nn.getRpcServer().getFileInfo(testdir); long dirId = status.getFileId(); int namenodeId = Nfs3Utils.getNamenodeId(config); FileHandle handle = new FileHandle(dirId, namenodeId); LOOKUP3Request lookupReq = new LOOKUP3Request(handle, "bar"); XDR xdr_req = new XDR(); lookupReq.serialize(xdr_req); // Attempt by an unpriviledged user should fail. LOOKUP3Response response1 = nfsd.lookup(xdr_req.asReadOnlyWrap(), securityHandlerUnpriviledged, new InetSocketAddress("localhost", 1234)); assertEquals("Incorrect return code", Nfs3Status.NFS3ERR_ACCES, response1.getStatus()); // Attempt by a priviledged user should pass. LOOKUP3Response response2 = nfsd.lookup(xdr_req.asReadOnlyWrap(), securityHandler, new InetSocketAddress("localhost", 1234)); assertEquals("Incorrect return code", Nfs3Status.NFS3_OK, response2.getStatus()); }
@Override public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { try { final SDSApiClient client = session.getClient(); final DownloadTokenGenerateResponse token = new NodesApi(session.getClient()).generateDownloadUrl(Long.valueOf(nodeid.getVersionId(file)), StringUtils.EMPTY); final HttpUriRequest request = new HttpGet(token.getDownloadUrl()); request.addHeader("X-Sds-Auth-Token", StringUtils.EMPTY); if(status.isAppend()) { final HttpRange range = HttpRange.withStatus(status); final String header; if(TransferStatus.UNKNOWN_LENGTH == range.getEnd()) { header = String.format("bytes=%d-", range.getStart()); } else { header = String.format("bytes=%d-%d", range.getStart(), range.getEnd()); } if(log.isDebugEnabled()) { log.debug(String.format("Add range header %s for file %s", header, file)); } request.addHeader(new BasicHeader(HttpHeaders.RANGE, header)); // Disable compression request.addHeader(new BasicHeader(HttpHeaders.ACCEPT_ENCODING, "identity")); } final HttpResponse response = client.getClient().execute(request); switch(response.getStatusLine().getStatusCode()) { case HttpStatus.SC_OK: case HttpStatus.SC_PARTIAL_CONTENT: return new HttpMethodReleaseInputStream(response, status); case HttpStatus.SC_NOT_FOUND: nodeid.cache(file, null); // Break through default: throw new DefaultHttpResponseExceptionMappingService().map("Download {0} failed", new HttpResponseException( response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase()), file); } } catch(ApiException e) { throw new SDSExceptionMappingService(nodeid).map("Download {0} failed", e, file); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Download {0} failed", e, file); } }
@Test public void testReadRange() throws Exception { final SDSNodeIdProvider nodeid = new SDSNodeIdProvider(session); final Path room = new SDSDirectoryFeature(session, nodeid).mkdir( new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus()); final Path test = new Path(room, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new SDSTouchFeature(session, nodeid).touch(test, new TransferStatus()); final Local local = new Local(System.getProperty("java.io.tmpdir"), new AlphanumericRandomStringService().random()); final byte[] content = RandomUtils.nextBytes(1023); final OutputStream out = local.getOutputStream(false); assertNotNull(out); IOUtils.write(content, out); out.close(); final TransferStatus upload = new TransferStatus().withLength(content.length); upload.setExists(true); new DefaultUploadFeature<>(new SDSDirectS3MultipartWriteFeature(session, nodeid)).upload( test, local, new BandwidthThrottle(BandwidthThrottle.UNLIMITED), new DisabledStreamListener(), upload, new DisabledConnectionCallback()); final TransferStatus status = new TransferStatus(); status.setLength(content.length); status.setAppend(true); status.setOffset(100L); final InputStream in = new SDSReadFeature(session, nodeid).read(test, status.withLength(content.length - 100), new DisabledConnectionCallback()); assertNotNull(in); final ByteArrayOutputStream buffer = new ByteArrayOutputStream(content.length - 100); new StreamCopier(status, status).transfer(in, buffer); final byte[] reference = new byte[content.length - 100]; System.arraycopy(content, 100, reference, 0, content.length - 100); assertArrayEquals(reference, buffer.toByteArray()); in.close(); new SDSDeleteFeature(session, nodeid).delete(Collections.singletonList(room), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public void searchForAddOns() { mDialogController.showDialog(LEAVE); }
@Test public void testSearchHappyPath() { Application context = ApplicationProvider.getApplicationContext(); ShadowApplication shadowApplication = Shadows.shadowOf(context); final AddOnStoreSearchController underTest = new AddOnStoreSearchController(context, "add on"); underTest.searchForAddOns(); var leaveDialog = GeneralDialogTestUtil.getLatestShownDialog(); Assert.assertEquals( "Leaving AnySoftKeyboard", GeneralDialogTestUtil.getTitleFromDialog(leaveDialog)); var button = leaveDialog.getButton(DialogInterface.BUTTON_POSITIVE); Shadows.shadowOf(button).getOnClickListener().onClick(button); Assert.assertSame( GeneralDialogTestUtil.NO_DIALOG, GeneralDialogTestUtil.getLatestShownDialog()); var intent = shadowApplication.getNextStartedActivity(); Assert.assertEquals(Intent.ACTION_VIEW, intent.getAction()); Assert.assertEquals( Uri.parse("market://search?q=AnySoftKeyboard%20add%20on"), intent.getData()); }
@Override public BasicTypeDefine reconvert(Column column) { BasicTypeDefine.BasicTypeDefineBuilder builder = BasicTypeDefine.builder() .name(column.getName()) .nullable(column.isNullable()) .comment(column.getComment()) .defaultValue(column.getDefaultValue()); switch (column.getDataType().getSqlType()) { case BOOLEAN: builder.columnType(PG_BOOLEAN); builder.dataType(PG_BOOLEAN); break; case TINYINT: case SMALLINT: builder.columnType(PG_SMALLINT); builder.dataType(PG_SMALLINT); break; case INT: builder.columnType(PG_INTEGER); builder.dataType(PG_INTEGER); break; case BIGINT: builder.columnType(PG_BIGINT); builder.dataType(PG_BIGINT); break; case FLOAT: builder.columnType(PG_REAL); builder.dataType(PG_REAL); break; case DOUBLE: builder.columnType(PG_DOUBLE_PRECISION); builder.dataType(PG_DOUBLE_PRECISION); break; case DECIMAL: if (column.getSourceType() != null && column.getSourceType().equalsIgnoreCase(PG_MONEY)) { builder.columnType(PG_MONEY); builder.dataType(PG_MONEY); } else { DecimalType decimalType = (DecimalType) column.getDataType(); long precision = decimalType.getPrecision(); int scale = decimalType.getScale(); if (precision <= 0) { precision = DEFAULT_PRECISION; scale = DEFAULT_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is precision less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (precision > MAX_PRECISION) { scale = (int) Math.max(0, scale - (precision - MAX_PRECISION)); precision = MAX_PRECISION; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum precision of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_PRECISION, precision, scale); } if (scale < 0) { scale = 0; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is scale less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (scale > MAX_SCALE) { scale = MAX_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_SCALE, precision, scale); } builder.columnType(String.format("%s(%s,%s)", PG_NUMERIC, precision, scale)); builder.dataType(PG_NUMERIC); builder.precision(precision); builder.scale(scale); } break; case BYTES: builder.columnType(PG_BYTEA); builder.dataType(PG_BYTEA); break; case STRING: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType(PG_TEXT); builder.dataType(PG_TEXT); } else if (column.getColumnLength() <= MAX_VARCHAR_LENGTH) { builder.columnType( String.format("%s(%s)", PG_VARCHAR, column.getColumnLength())); builder.dataType(PG_VARCHAR); } else { builder.columnType(PG_TEXT); builder.dataType(PG_TEXT); } break; case DATE: builder.columnType(PG_DATE); builder.dataType(PG_DATE); break; case TIME: Integer timeScale = column.getScale(); if (timeScale != null && timeScale > MAX_TIME_SCALE) { timeScale = MAX_TIME_SCALE; log.warn( "The time column {} type time({}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to time({})", column.getName(), column.getScale(), MAX_SCALE, timeScale); } if (timeScale != null && timeScale > 0) { builder.columnType(String.format("%s(%s)", PG_TIME, timeScale)); } else { builder.columnType(PG_TIME); } builder.dataType(PG_TIME); builder.scale(timeScale); break; case TIMESTAMP: Integer timestampScale = column.getScale(); if (timestampScale != null && timestampScale > MAX_TIMESTAMP_SCALE) { timestampScale = MAX_TIMESTAMP_SCALE; log.warn( "The timestamp column {} type timestamp({}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to timestamp({})", column.getName(), column.getScale(), MAX_TIMESTAMP_SCALE, timestampScale); } if (timestampScale != null && timestampScale > 0) { builder.columnType(String.format("%s(%s)", PG_TIMESTAMP, timestampScale)); } else { builder.columnType(PG_TIMESTAMP); } builder.dataType(PG_TIMESTAMP); builder.scale(timestampScale); break; case ARRAY: ArrayType arrayType = (ArrayType) column.getDataType(); SeaTunnelDataType elementType = arrayType.getElementType(); switch (elementType.getSqlType()) { case BOOLEAN: builder.columnType(PG_BOOLEAN_ARRAY); builder.dataType(PG_BOOLEAN_ARRAY); break; case TINYINT: case SMALLINT: builder.columnType(PG_SMALLINT_ARRAY); builder.dataType(PG_SMALLINT_ARRAY); break; case INT: builder.columnType(PG_INTEGER_ARRAY); builder.dataType(PG_INTEGER_ARRAY); break; case BIGINT: builder.columnType(PG_BIGINT_ARRAY); builder.dataType(PG_BIGINT_ARRAY); break; case FLOAT: builder.columnType(PG_REAL_ARRAY); builder.dataType(PG_REAL_ARRAY); break; case DOUBLE: builder.columnType(PG_DOUBLE_PRECISION_ARRAY); builder.dataType(PG_DOUBLE_PRECISION_ARRAY); break; case BYTES: builder.columnType(PG_BYTEA); builder.dataType(PG_BYTEA); break; case STRING: builder.columnType(PG_TEXT_ARRAY); builder.dataType(PG_TEXT_ARRAY); break; default: throw CommonError.convertToConnectorTypeError( DatabaseIdentifier.POSTGRESQL, elementType.getSqlType().name(), column.getName()); } break; default: throw CommonError.convertToConnectorTypeError( DatabaseIdentifier.POSTGRESQL, column.getDataType().getSqlType().name(), column.getName()); } return builder.build(); }
@Test public void testReconvertTime() { Column column = PhysicalColumn.builder() .name("test") .dataType(LocalTimeType.LOCAL_TIME_TYPE) .build(); BasicTypeDefine typeDefine = PostgresTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals(PostgresTypeConverter.PG_TIME, typeDefine.getColumnType()); Assertions.assertEquals(PostgresTypeConverter.PG_TIME, typeDefine.getDataType()); column = PhysicalColumn.builder() .name("test") .dataType(LocalTimeType.LOCAL_TIME_TYPE) .scale(3) .build(); typeDefine = PostgresTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals( String.format("%s(%s)", PostgresTypeConverter.PG_TIME, column.getScale()), typeDefine.getColumnType()); Assertions.assertEquals(PostgresTypeConverter.PG_TIME, typeDefine.getDataType()); Assertions.assertEquals(column.getScale(), typeDefine.getScale()); }
public static boolean delete(@Nullable File file) { if (file == null) { LOGGER.warn("cannot delete null File"); return false; } try (Stream<Path> paths = Files.walk(file.toPath())) { paths.sorted(Comparator.reverseOrder()) .map(Path::toFile) .forEach(File::delete); } catch (IOException ex) { LOGGER.trace(ex.getMessage(), ex); LOGGER.debug("Failed to delete file: {} (error message: {}); attempting to delete on exit.", file.getPath(), ex.getMessage()); file.deleteOnExit(); return false; } return true; }
@Test public void testDelete() throws Exception { File file = File.createTempFile("tmp", "deleteme", getSettings().getTempDirectory()); if (!file.exists()) { fail("Unable to create a temporary file."); } boolean status = FileUtils.delete(file); assertTrue("delete returned a failed status", status); assertFalse("Temporary file exists after attempting deletion", file.exists()); }
public static BigDecimal cast(final Integer value, final int precision, final int scale) { if (value == null) { return null; } return cast(value.longValue(), precision, scale); }
@Test public void shouldCastIntNegative() { // When: final BigDecimal decimal = DecimalUtil.cast(-1, 2, 1); // Then: assertThat(decimal, is(new BigDecimal("-1.0"))); }
@Override public PluginDescriptor find(Path pluginPath) { for (PluginDescriptorFinder finder : finders) { if (finder.isApplicable(pluginPath)) { log.debug("'{}' is applicable for plugin '{}'", finder, pluginPath); try { PluginDescriptor pluginDescriptor = finder.find(pluginPath); if (pluginDescriptor != null) { return pluginDescriptor; } } catch (Exception e) { if (finders.indexOf(finder) == finders.size() - 1) { // it's the last finder log.error(e.getMessage(), e); } else { // log the exception and continue with the next finder log.debug(e.getMessage()); log.debug("Try to continue with the next finder"); } } } else { log.debug("'{}' is not applicable for plugin '{}'", finder, pluginPath); } } throw new PluginRuntimeException("No PluginDescriptorFinder for plugin '{}'", pluginPath); }
@Test public void testNotFound() { PluginDescriptorFinder descriptorFinder = new CompoundPluginDescriptorFinder(); assertThrows(PluginRuntimeException.class, () -> descriptorFinder.find(pluginsPath.resolve("test-plugin-3"))); }
public CommitConsumer(GitHubEndpoint endpoint, Processor processor, String branchName, String startingSha) throws Exception { super(endpoint, processor); this.branchName = branchName; this.startingSha = startingSha; }
@Test public void commitConsumerTest() throws Exception { mockResultEndpoint.expectedMessageCount(2); RepositoryCommit commit1 = commitService.addRepositoryCommit("test-1"); RepositoryCommit commit2 = commitService.addRepositoryCommit("test-2"); mockResultEndpoint.expectedBodiesReceivedInAnyOrder(commit1.getCommit().getMessage(), commit2.getCommit().getMessage()); mockResultEndpoint.assertIsSatisfied(); }
@Override public ScheduledReporter build(MetricRegistry registry) { GraphiteReporter.Builder builder = builder(registry); if ("udp".equalsIgnoreCase(transport)) { return builder.build(new GraphiteUDP(host, port)); } else { return builder.build(new Graphite(host, port)); } }
@Test void testNoAddressResolutionForGraphite() { graphiteReporterFactory.build(new MetricRegistry()); final ArgumentCaptor<Graphite> argument = ArgumentCaptor.forClass(Graphite.class); verify(builderSpy).build(argument.capture()); final Graphite graphite = argument.getValue(); assertThat(graphite) .satisfies(g -> assertThat(g).extracting("hostname").isEqualTo("localhost")) .satisfies(g -> assertThat(g).extracting("port").isEqualTo(2003)) .satisfies(g -> assertThat(g).extracting("address").isNull()); }
private CompletableFuture<Boolean> verifyTxnOwnership(TxnID txnID) { assert ctx.executor().inEventLoop(); return service.pulsar().getTransactionMetadataStoreService() .verifyTxnOwnership(txnID, getPrincipal()) .thenComposeAsync(isOwner -> { if (isOwner) { return CompletableFuture.completedFuture(true); } if (service.isAuthenticationEnabled() && service.isAuthorizationEnabled()) { return isSuperUser(); } else { return CompletableFuture.completedFuture(false); } }, ctx.executor()); }
@Test(timeOut = 30000) public void sendEndTxnOnPartitionResponseFailed() throws Exception { final TransactionMetadataStoreService txnStore = mock(TransactionMetadataStoreService.class); when(txnStore.getTxnMeta(any())).thenReturn(CompletableFuture.completedFuture(mock(TxnMeta.class))); when(txnStore.verifyTxnOwnership(any(), any())).thenReturn(CompletableFuture.completedFuture(true)); when(txnStore.endTransaction(any(TxnID.class), anyInt(), anyBoolean())) .thenReturn(CompletableFuture.completedFuture(null)); when(pulsar.getTransactionMetadataStoreService()).thenReturn(txnStore); svcConfig.setTransactionCoordinatorEnabled(true); resetChannel(); setChannelConnected(); Topic topic = mock(Topic.class); doReturn(CompletableFuture.failedFuture(new RuntimeException("server error"))).when(topic) .endTxn(any(TxnID.class), anyInt(), anyLong()); doReturn(CompletableFuture.completedFuture(Optional.of(topic))).when(brokerService) .getTopicIfExists(any(String.class)); ByteBuf clientCommand = Commands.newEndTxnOnPartition(89L, 1L, 12L, successTopicName, TxnAction.COMMIT, 1L); channel.writeInbound(clientCommand); CommandEndTxnOnPartitionResponse response = (CommandEndTxnOnPartitionResponse) getResponse(); assertEquals(response.getRequestId(), 89L); assertEquals(response.getTxnidLeastBits(), 1L); assertEquals(response.getTxnidMostBits(), 12L); assertEquals(response.getError().getValue(), 0); assertEquals(response.getMessage(), "server error"); channel.finish(); }
public static LogCollector<ShenyuRequestLog> getInstance() { return INSTANCE; }
@Test public void testAbstractLogCollector() throws Exception { AliyunSlsLogCollector.getInstance().start(); Field field1 = AbstractLogCollector.class.getDeclaredField("started"); field1.setAccessible(true); Assertions.assertEquals(field1.get(AliyunSlsLogCollector.getInstance()).toString(), "true"); AliyunSlsLogCollector.getInstance().collect(shenyuRequestLog); AliyunSlsLogCollector.getInstance().close(); Field field2 = AbstractLogCollector.class.getDeclaredField("started"); field2.setAccessible(true); Assertions.assertEquals(field2.get(AliyunSlsLogCollector.getInstance()).toString(), "false"); }
public Node parse() throws ScanException { if (tokenList == null || tokenList.isEmpty()) return null; return E(); }
@Test public void defaultSeparatorOutsideOfAVariable() throws ScanException { Tokenizer tokenizer = new Tokenizer("{a:-b}"); Parser parser = new Parser(tokenizer.tokenize()); Node node = parser.parse(); dump(node); Node witness = new Node(Node.Type.LITERAL, "{"); Node t = witness.next = new Node(Node.Type.LITERAL, "a"); t.next = new Node(Node.Type.LITERAL, ":-"); t = t.next; t.next = new Node(Node.Type.LITERAL, "b"); t = t.next; t.next = new Node(Node.Type.LITERAL, "}"); assertEquals(witness, node); }
public DecoderResult decode(AztecDetectorResult detectorResult) throws FormatException { ddata = detectorResult; BitMatrix matrix = detectorResult.getBits(); boolean[] rawbits = extractBits(matrix); CorrectedBitsResult correctedBits = correctBits(rawbits); byte[] rawBytes = convertBoolArrayToByteArray(correctedBits.correctBits); String result = getEncodedData(correctedBits.correctBits); DecoderResult decoderResult = new DecoderResult(rawBytes, result, null, String.format("%d%%", correctedBits.ecLevel)); decoderResult.setNumBits(correctedBits.correctBits.length); decoderResult.setErrorsCorrected(correctedBits.errorsCorrected); return decoderResult; }
@Test(expected = FormatException.class) public void testDecodeTooManyErrors2() throws FormatException { BitMatrix matrix = BitMatrix.parse("" + ". X X . . X . X X . . . X . . X X X . . . X X . X X . \n" + "X X . X X . . X . . . X X . . . X X . X X X . X . X X \n" + ". . . . X . . . X X X . X X . X X X X . X X . . X . . \n" + "X . X X . . X . . . X X . X X . X . X X . . . . . X . \n" + "X X . X . . X . X X . . . . . X X . . . . . X . . . X \n" + "X . . X . . . . . . X . . . X . X X X X X X X . . . X \n" + "X . . X X . . X . . X X . . . . . X . . . . . X X X . \n" + ". . X X X X . X . . . . . X X X X X X . . . . . . X X \n" + "X . . . X . X X X X X X . . X X X . X . X X X X X X . \n" + "X . . X X X . X X X X X X X X X X X X X . . . X . X X \n" + ". . . . X X . . . X . . . . . . . X X . . . X X . X . \n" + ". . . X X X . . X X . X X X X X . X . . X . . . . . . \n" + "X . . . . X . X . X . X . . . X . X . X X . X X . X X \n" + "X . X . . X . X . X . X . X . X . X . . . . . X . X X \n" + "X . X X X . . X . X . X . . . X . X . X X X . . . X X \n" + "X X X X X X X X . X . X X X X X . X . X . X . X X X . \n" + ". . . . . . . X . X . . . . . . . X X X X . . . X X X \n" + "X X . . X . . X . X X X X X X X X X X X X X . . X . X \n" + "X X X . X X X X . . X X X X . . X . . . . X . . X X X \n" + ". . X X X X X . X . . . . X X X X . . X X X . X . X . \n" + ". . X X . X . X . . . X . X X . X X . . . . X X . . . \n" + "X . . . X . X . X X X X X X . . X . X X X X X . X . . \n" + ". X . . . X X X . . . . . X . . . . . X X X X X . X . \n" + "X . . X . X X X X . X . X . . . . X . X X . X . . X . \n" + "X . . . X X . X . X X X X X X X X . X X X X . . X X . \n" + ". X X X X . . X . . X X X . X X . . X . . . . X X X . \n" + "X X . . . X X . . X . X . . . . X X . X . . X . X . X \n", "X ", ". "); AztecDetectorResult r = new AztecDetectorResult(matrix, NO_POINTS, true, 16, 4); new Decoder().decode(r); }
public static CompletableFuture<Object> waitForAny(Collection<? extends CompletableFuture<?>> futures) { return CompletableFuture.anyOf(futures.toArray(new CompletableFuture[0])); }
@Test public void testWaitForAny() { CompletableFuture<String> f1 = new CompletableFuture<>(); CompletableFuture<String> f2 = new CompletableFuture<>(); CompletableFuture<String> f3 = new CompletableFuture<>(); CompletableFuture<String> f4 = new CompletableFuture<>(); f1.complete("1"); f2.complete("2"); f3.complete("3"); f4.complete("4"); CompletableFuture<Optional<Object>> ret = FutureUtil.waitForAny(Lists.newArrayList(f1, f2, f3, f4), p -> p.equals("3")); assertEquals(ret.join().get(), "3"); // test not matched predicate result CompletableFuture<String> f5 = new CompletableFuture<>(); CompletableFuture<String> f6 = new CompletableFuture<>(); f5.complete("5"); f6.complete("6"); ret = FutureUtil.waitForAny(Lists.newArrayList(f5, f6), p -> p.equals("3")); assertFalse(ret.join().isPresent()); // test one complete, others are cancelled. CompletableFuture<String> f55 = new CompletableFuture<>(); CompletableFuture<String> f66 = new CompletableFuture<>(); f55.complete("55"); ret = FutureUtil.waitForAny(Lists.newArrayList(f55, f66), p -> p.equals("55")); assertTrue(ret.join().isPresent()); assertTrue(f66.isCancelled()); // test with exception CompletableFuture<String> f7 = new CompletableFuture<>(); CompletableFuture<String> f8 = new CompletableFuture<>(); f8.completeExceptionally(new RuntimeException("f7 exception")); f8.completeExceptionally(new RuntimeException("f8 exception")); ret = FutureUtil.waitForAny(Lists.newArrayList(f7, f8), p -> p.equals("3")); try { ret.join(); fail("Should have failed"); } catch (CompletionException ex) { assertTrue(ex.getCause() instanceof RuntimeException); } }
public ProjectMeasuresStatistics searchSupportStatistics() { SearchRequest projectMeasuresSearchRequest = buildProjectMeasureSearchRequest(); SearchResponse projectMeasures = client.search(projectMeasuresSearchRequest); return buildProjectMeasuresStatistics(projectMeasures); }
@Test public void search_statistics_should_ignore_applications() { es.putDocuments(TYPE_PROJECT_MEASURES, // insert projects newDoc(ComponentTesting.newPrivateProjectDto(), "lines", 10, "coverage", 80) .setLanguages(Arrays.asList("java", "cs", "js")) .setNclocLanguageDistributionFromMap(ImmutableMap.of("java", 200, "cs", 250, "js", 50)), newDoc(ComponentTesting.newPrivateProjectDto(), "lines", 20, "coverage", 80) .setLanguages(Arrays.asList("java", "python", "kotlin")) .setNclocLanguageDistributionFromMap(ImmutableMap.of("java", 300, "python", 100, "kotlin", 404)), // insert applications newDoc(ComponentTesting.newApplication(), "lines", 1000, "coverage", 70) .setLanguages(Arrays.asList("java", "python", "kotlin")) .setNclocLanguageDistributionFromMap(ImmutableMap.of("java", 300, "python", 100, "kotlin", 404)), newDoc(ComponentTesting.newApplication(), "lines", 20, "coverage", 80) .setLanguages(Arrays.asList("java", "python", "kotlin")) .setNclocLanguageDistributionFromMap(ImmutableMap.of("java", 300, "python", 100, "kotlin", 404))); ProjectMeasuresStatistics result = underTest.searchSupportStatistics(); assertThat(result.getProjectCount()).isEqualTo(2); assertThat(result.getProjectCountByLanguage()).containsOnly( entry("java", 2L), entry("cs", 1L), entry("js", 1L), entry("python", 1L), entry("kotlin", 1L)); assertThat(result.getNclocByLanguage()).containsOnly( entry("java", 500L), entry("cs", 250L), entry("js", 50L), entry("python", 100L), entry("kotlin", 404L)); }
public Optional<Measure> toMeasure(@Nullable MeasureDto measureDto, Metric metric) { requireNonNull(metric); if (measureDto == null) { return Optional.empty(); } Double value = measureDto.getValue(); String data = measureDto.getData(); switch (metric.getType().getValueType()) { case INT: return toIntegerMeasure(measureDto, value, data); case LONG: return toLongMeasure(measureDto, value, data); case DOUBLE: return toDoubleMeasure(measureDto, value, data); case BOOLEAN: return toBooleanMeasure(measureDto, value, data); case STRING: return toStringMeasure(measureDto, data); case LEVEL: return toLevelMeasure(measureDto, data); case NO_VALUE: return toNoValueMeasure(measureDto); default: throw new IllegalArgumentException("Unsupported Measure.ValueType " + metric.getType().getValueType()); } }
@Test public void toMeasure_returns_no_QualityGateStatus_if_alertStatus_has_data_in_wrong_case_for_Level_Metric() { Optional<Measure> measure = underTest.toMeasure(new MeasureDto().setData("waRn"), SOME_STRING_METRIC); assertThat(measure).isPresent(); assertThat(measure.get().hasQualityGateStatus()).isFalse(); }
@Nullable public String getInstanceRegion(InstanceInfo instanceInfo) { if (instanceInfo.getDataCenterInfo() == null || instanceInfo.getDataCenterInfo().getName() == null) { logger.warn("Cannot get region for instance id:{}, app:{} as dataCenterInfo is null. Returning local:{} by default", instanceInfo.getId(), instanceInfo.getAppName(), localRegion); return localRegion; } if (DataCenterInfo.Name.Amazon.equals(instanceInfo.getDataCenterInfo().getName())) { AmazonInfo amazonInfo = (AmazonInfo) instanceInfo.getDataCenterInfo(); Map<String, String> metadata = amazonInfo.getMetadata(); String availabilityZone = metadata.get(AmazonInfo.MetaDataKey.availabilityZone.getName()); if (null != availabilityZone) { return azToRegionMapper.getRegionForAvailabilityZone(availabilityZone); } } return null; }
@Test public void testNotMappedAZNotFollowingFormat() throws Exception { ConfigurationManager.getConfigInstance().setProperty("eureka.us-east-1.availabilityZones", "abc,def"); PropertyBasedAzToRegionMapper azToRegionMapper = new PropertyBasedAzToRegionMapper(new DefaultEurekaClientConfig()); InstanceRegionChecker checker = new InstanceRegionChecker(azToRegionMapper, "us-east-1"); azToRegionMapper.setRegionsToFetch(new String[]{"us-east-1"}); AmazonInfo dcInfo = AmazonInfo.Builder.newBuilder().addMetadata(AmazonInfo.MetaDataKey.availabilityZone, "us-east-x").build(); InstanceInfo instanceInfo = InstanceInfo.Builder.newBuilder().setAppName("abc").setDataCenterInfo(dcInfo).build(); String instanceRegion = checker.getInstanceRegion(instanceInfo); Assert.assertNull("Invalid instance region.", instanceRegion); }
public long scan( final UnsafeBuffer termBuffer, final long rebuildPosition, final long hwmPosition, final long nowNs, final int termLengthMask, final int positionBitsToShift, final int initialTermId) { boolean lossFound = false; int rebuildOffset = (int)rebuildPosition & termLengthMask; if (rebuildPosition < hwmPosition) { final int rebuildTermCount = (int)(rebuildPosition >>> positionBitsToShift); final int hwmTermCount = (int)(hwmPosition >>> positionBitsToShift); final int rebuildTermId = initialTermId + rebuildTermCount; final int hwmTermOffset = (int)hwmPosition & termLengthMask; final int limitOffset = rebuildTermCount == hwmTermCount ? hwmTermOffset : termLengthMask + 1; rebuildOffset = scanForGap(termBuffer, rebuildTermId, rebuildOffset, limitOffset, this); if (rebuildOffset < limitOffset) { if (scannedTermOffset != activeTermOffset || scannedTermId != activeTermId) { activateGap(nowNs); lossFound = true; } checkTimerExpiry(nowNs); } } return pack(rebuildOffset, lossFound); }
@Test void shouldRetransmitNakForMissingData() { final long rebuildPosition = ACTIVE_TERM_POSITION; final long hwmPosition = ACTIVE_TERM_POSITION + (ALIGNED_FRAME_LENGTH * 3L); insertDataFrame(offsetOfMessage(0)); insertDataFrame(offsetOfMessage(2)); lossDetector.scan(termBuffer, rebuildPosition, hwmPosition, currentTime, MASK, POSITION_BITS_TO_SHIFT, TERM_ID); currentTime = TimeUnit.MILLISECONDS.toNanos(30); lossDetector.scan(termBuffer, rebuildPosition, hwmPosition, currentTime, MASK, POSITION_BITS_TO_SHIFT, TERM_ID); currentTime = TimeUnit.MILLISECONDS.toNanos(60); lossDetector.scan(termBuffer, rebuildPosition, hwmPosition, currentTime, MASK, POSITION_BITS_TO_SHIFT, TERM_ID); verify(lossHandler, atLeast(2)).onGapDetected(TERM_ID, offsetOfMessage(1), gapLength()); }
@Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { var requestIdGenerator = platform.getContainer().getOptionalComponentByType(RequestIdGenerator.class); if (requestIdGenerator.isEmpty()) { chain.doFilter(request, response); } else { String requestId = requestIdGenerator.get().generate(); try (RequestIdMDCStorage mdcStorage = new RequestIdMDCStorage(requestId)) { request.setAttribute("ID", requestId); chain.doFilter(request, response); } } }
@Test public void filter_does_not_fail_when_there_is_no_RequestIdGenerator_in_container() throws IOException, ServletException { ExtensionContainer container = mock(ExtensionContainer.class); when(container.getOptionalComponentByType(RequestIdGenerator.class)).thenReturn(Optional.empty()); when(platform.getContainer()).thenReturn(container); RequestIdFilter underTest = new RequestIdFilter(platform); underTest.doFilter(servletRequest, servletResponse, filterChain); }
public Tree<T> cloneTree() { final Tree<T> result = ObjectUtil.clone(this); result.setChildren(cloneChildren()); return result; }
@Test public void cloneTreeTest() { final Tree<String> tree = TreeUtil.buildSingle(nodeList, "0"); final Tree<String> cloneTree = tree.cloneTree(); List<String> ids = new ArrayList<>(); cloneTree.walk((tr) -> ids.add(tr.getId())); assertEquals(7, ids.size()); }
@Override public String toString() { return toStringHelper(getClass()) .add("targetAddress", Arrays.toString(targetAddress)) .toString(); // TODO: need to handle options }
@Test public void testToStringNS() throws Exception { NeighborSolicitation ns = deserializer.deserialize(bytePacket, 0, bytePacket.length); String str = ns.toString(); // TODO: need to handle TARGET_ADDRESS and Options }
@Override protected CompletableFuture<Void> getCompletionFuture() { return sourceThread.getCompletionFuture(); }
@Test void testClosedOnRestoreSourceSkipExecution() throws Exception { LifeCycleMonitorSource testSource = new LifeCycleMonitorSource(); List<Object> output = new ArrayList<>(); try (StreamTaskMailboxTestHarness<String> harness = new StreamTaskMailboxTestHarnessBuilder<>(SourceStreamTask::new, STRING_TYPE_INFO) .setTaskStateSnapshot(1, TaskStateSnapshot.FINISHED_ON_RESTORE) .addAdditionalOutput( new RecordOrEventCollectingResultPartitionWriter<StreamElement>( output, new StreamElementSerializer<>(IntSerializer.INSTANCE)) { @Override public void notifyEndOfData(StopMode mode) throws IOException { broadcastEvent(new EndOfData(mode), false); } }) .setupOperatorChain(new StreamSource<>(testSource)) .chain(new TestFinishedOnRestoreStreamOperator(), StringSerializer.INSTANCE) .finish() .build()) { harness.getStreamTask().invoke(); harness.processAll(); harness.streamTask.getCompletionFuture().get(); assertThat(output) .containsExactly(Watermark.MAX_WATERMARK, new EndOfData(StopMode.DRAIN)); LifeCycleMonitorSource source = (LifeCycleMonitorSource) ((StreamSource<?, ?>) harness.getStreamTask().getMainOperator()) .getUserFunction(); source.getLifeCycleMonitor() .assertCallTimes( 0, LifeCyclePhase.OPEN, LifeCyclePhase.PROCESS_ELEMENT, LifeCyclePhase.CLOSE); } }
@Override public void delete(HttpRequest request, HttpResponse response) { response.addCookie(newCookieBuilder(request) .setName(AUTHENTICATION_COOKIE_NAME) .setValue(null) .setHttpOnly(true) .setExpiry(0) .build()); }
@Test public void delete() { when(request.getCookies()).thenReturn(new Cookie[]{wrapCookie(AUTHENTICATION_COOKIE_NAME, "{\"return_to\":\"/admin/settings\"}")}); underTest.delete(request, response); verify(response).addCookie(cookieArgumentCaptor.capture()); Cookie updatedCookie = cookieArgumentCaptor.getValue(); assertThat(updatedCookie.getName()).isEqualTo(AUTHENTICATION_COOKIE_NAME); assertThat(updatedCookie.getValue()).isNull(); assertThat(updatedCookie.getPath()).isEqualTo("/"); assertThat(updatedCookie.getMaxAge()).isZero(); }
public void mergeRuntimeUpdate( List<TimelineEvent> pendingTimeline, Map<String, Artifact> pendingArtifacts) { if (timeline.addAll(pendingTimeline)) { synced = false; } if (pendingArtifacts != null && !pendingArtifacts.isEmpty()) { for (Map.Entry<String, Artifact> entry : pendingArtifacts.entrySet()) { String key = entry.getKey(); if (!entry.getValue().equals(artifacts.get(key))) { if (artifacts.containsKey(key) && artifacts.get(key).getType() == Artifact.Type.DEFAULT && entry.getValue().getType() == Artifact.Type.DEFAULT) { artifacts.get(key).asDefault().getData().putAll(entry.getValue().asDefault().getData()); } else { artifacts.put(entry.getKey(), entry.getValue()); } synced = false; } } } if (!synced) { runtimeState.setModifyTime(System.currentTimeMillis()); } }
@Test public void testMergeForeachArtifact() throws Exception { StepRuntimeSummary summary = loadObject( "fixtures/execution/sample-step-runtime-summary-3.json", StepRuntimeSummary.class); ForeachArtifact artifact = summary.getArtifacts().get(Artifact.Type.FOREACH.key()).asForeach(); assertEquals("inline-wf", artifact.getForeachWorkflowId()); assertEquals("foo", artifact.getForeachIdentity()); assertEquals(10, artifact.getTotalLoopCount()); assertEquals(0, artifact.getNextLoopIndex()); assertEquals( 5L, artifact.getForeachOverview().getStats().get(WorkflowInstance.Status.CREATED).longValue()); assertEquals( 1L, artifact .getForeachOverview() .getStats() .get(WorkflowInstance.Status.SUCCEEDED) .longValue()); ForeachArtifact artifact1 = new ForeachArtifact(); artifact1.setForeachWorkflowId("updated"); assertTrue(summary.isSynced()); summary.mergeRuntimeUpdate( null, Collections.singletonMap(Artifact.Type.FOREACH.key(), artifact1)); assertFalse(summary.isSynced()); String ser1 = MAPPER.writeValueAsString(summary); StepRuntimeSummary actual = MAPPER.readValue(ser1, StepRuntimeSummary.class); String ser2 = MAPPER.writeValueAsString(actual); assertEquals(summary, actual); assertEquals(ser1, ser2); assertEquals( "updated", summary.getArtifacts().get(Artifact.Type.FOREACH.key()).asForeach().getForeachWorkflowId()); assertNull( summary.getArtifacts().get(Artifact.Type.FOREACH.key()).asForeach().getForeachOverview()); }
public void isInStrictOrder() { isInStrictOrder(Ordering.natural()); }
@Test public void iterableIsInStrictOrderWithComparator() { Iterable<String> emptyStrings = asList(); assertThat(emptyStrings).isInStrictOrder(COMPARE_AS_DECIMAL); assertThat(asList("1")).isInStrictOrder(COMPARE_AS_DECIMAL); // Note: Use "10" and "20" to distinguish numerical and lexicographical ordering. assertThat(asList("1", "2", "10", "20")).isInStrictOrder(COMPARE_AS_DECIMAL); }
@Override @NotNull public List<PartitionStatistics> select(Collection<PartitionStatistics> statistics, Set<Long> excludeTables) { long now = System.currentTimeMillis(); return statistics.stream() .filter(p -> p.getNextCompactionTime() <= now) .filter(p -> !excludeTables.contains(p.getPartition().getTableId())) .filter(p -> isReadyForCompaction(p, now)) .collect(Collectors.toList()); }
@Test public void testVersionCountNotReached() { List<PartitionStatistics> statisticsList = new ArrayList<>(); final PartitionIdentifier partitionIdentifier = new PartitionIdentifier(1, 2, 3); PartitionStatistics statistics = new PartitionStatistics(partitionIdentifier); statistics.setCurrentVersion(new PartitionVersion(MIN_COMPACTION_VERSIONS - 1, System.currentTimeMillis())); statistics.setCompactionVersion(new PartitionVersion(1, 0)); statisticsList.add(statistics); Assert.assertEquals(0, selector.select(statisticsList, new HashSet<Long>()).size()); }
@Override public Config build() { return build(new Config()); }
@Override @Test public void testConfigurationWithFileName() throws Exception { File file = createTempFile("foo", "bar"); file.deleteOnExit(); String xml = HAZELCAST_START_TAG + " <cluster-name>foobar</cluster-name>\n" + HAZELCAST_END_TAG; Writer writer = new PrintWriter(file, StandardCharsets.UTF_8); writer.write(xml); writer.close(); String path = file.getAbsolutePath(); Config config = new XmlConfigBuilder(path).build(); assertEquals(path, config.getConfigurationFile().getAbsolutePath()); assertNull(config.getConfigurationUrl()); }
@Override public MapTask apply(MapTask input) { for (ParallelInstruction instruction : Apiary.listOrEmpty(input.getInstructions())) { ParDoInstruction parDoInstruction = instruction.getParDo(); if (parDoInstruction != null) { int numOutputs = Apiary.intOrZero(parDoInstruction.getNumOutputs()); List<MultiOutputInfo> multiOutputInfos = Apiary.listOrEmpty(parDoInstruction.getMultiOutputInfos()); if (numOutputs != Apiary.listOrEmpty(instruction.getParDo().getMultiOutputInfos()).size()) { if (numOutputs == 1) { parDoInstruction.setMultiOutputInfos( ImmutableList.of(new MultiOutputInfo().setTag(idGenerator.getId()))); } else { throw new IllegalArgumentException( String.format( "Invalid ParDoInstruction %s, %d outputs specified, found %s tags.", instruction.getSystemName(), numOutputs, multiOutputInfos)); } } } } return input; }
@Test public void testExistingMultiOutputInfosAreUnmodified() { FixMultiOutputInfosOnParDoInstructions function = new FixMultiOutputInfosOnParDoInstructions(IdGenerators.decrementingLongs()); MapTask output = function.apply(createMapTaskWithParDo(2, "5", "6")); assertEquals(createMapTaskWithParDo(2, "5", "6"), output); }
@ApiOperation(value = "Delete asset profile (deleteAssetProfile)", notes = "Deletes the asset profile. Referencing non-existing asset profile Id will cause an error. " + "Can't delete the asset profile if it is referenced by existing assets." + TENANT_AUTHORITY_PARAGRAPH) @PreAuthorize("hasAuthority('TENANT_ADMIN')") @RequestMapping(value = "/assetProfile/{assetProfileId}", method = RequestMethod.DELETE) @ResponseStatus(value = HttpStatus.OK) public void deleteAssetProfile( @Parameter(description = ASSET_PROFILE_ID_PARAM_DESCRIPTION) @PathVariable(ASSET_PROFILE_ID) String strAssetProfileId) throws ThingsboardException { checkParameter(ASSET_PROFILE_ID, strAssetProfileId); AssetProfileId assetProfileId = new AssetProfileId(toUUID(strAssetProfileId)); AssetProfile assetProfile = checkAssetProfileId(assetProfileId, Operation.DELETE); tbAssetProfileService.delete(assetProfile, getCurrentUser()); }
@Test public void testDeleteAssetProfile() throws Exception { AssetProfile assetProfile = this.createAssetProfile("Asset Profile"); AssetProfile savedAssetProfile = doPost("/api/assetProfile", assetProfile, AssetProfile.class); Mockito.reset(tbClusterService, auditLogService); doDelete("/api/assetProfile/" + savedAssetProfile.getId().getId().toString()) .andExpect(status().isOk()); String savedAssetProfileIdStr = savedAssetProfile.getId().getId().toString(); testNotifyEntityBroadcastEntityStateChangeEventOneTime(savedAssetProfile, savedAssetProfile.getId(), savedAssetProfile.getId(), savedTenant.getId(), tenantAdmin.getCustomerId(), tenantAdmin.getId(), tenantAdmin.getEmail(), ActionType.DELETED, savedAssetProfileIdStr); doGet("/api/assetProfile/" + savedAssetProfile.getId().getId().toString()) .andExpect(status().isNotFound()) .andExpect(statusReason(containsString(msgErrorNoFound("Asset profile", savedAssetProfileIdStr)))); }
@Override public <T> T persist(T detachedObject) { Map<Object, Object> alreadyPersisted = new HashMap<Object, Object>(); return persist(detachedObject, alreadyPersisted, RCascadeType.PERSIST); }
@Test public void testIsAccessor() { HasIsAccessor o = new HasIsAccessor(); o.setGood(true); o = redisson.getLiveObjectService().persist(o); assertThat(o.isGood()).isEqualTo(true); }
@Override public void addAll(PartitionIdSet other) { throw new UnsupportedOperationException(); }
@Test(expected = UnsupportedOperationException.class) public void test_addAllPartitionIdSet() { set.addAll(new PartitionIdSet(11, asList(6, 8, 9))); }
public static boolean checkUserName(String userName) { return regexChecks(userName, Constants.REGEX_USER_NAME); }
@Test public void testCheckUserName() { Assertions.assertTrue(CheckUtils.checkUserName("test01")); Assertions.assertFalse(CheckUtils.checkUserName(null)); Assertions.assertFalse(CheckUtils.checkUserName("test01@abc")); }
@Udf(description = "Returns the string with the the first letter" + " of each word capitalized and the rest lowercased") public String initcap( @UdfParameter( description = "The source string." + " If null, then function returns null.") final String str) { if (str == null) { return null; } final Pattern pattern = Pattern.compile("[^\\s]+\\s*"); final Matcher matcher = pattern.matcher(str.toLowerCase()); String initCapped = ""; while (matcher.find()) { final String part = matcher.group(); initCapped = initCapped.concat(part.substring(0, 1).toUpperCase() + part.substring(1)); } return initCapped; }
@Test public void shouldHandleNull() { assertThat(udf.initcap(null), isEmptyOrNullString()); }
public static XmlPullParserFactory getXmlPullParserFactory() { final XmlPullParserFactory xmlPullParserFactory = SmackXmlParser.xmlPullParserFactory; if (xmlPullParserFactory != null) { return xmlPullParserFactory; } Iterator<XmlPullParserFactory> iterator = xmlPullParserFactoryServiceLoader.iterator(); if (!iterator.hasNext()) { throw new IllegalStateException( "No XmlPullParserFactory registered with Service Provider Interface (SPI). Is smack-xmlparser-xpp3 or smack-xmlparser-stax in classpath?"); } return iterator.next(); }
@Test public void testSmackXmlParser() { assertThrows(IllegalStateException.class, () -> SmackXmlParser.getXmlPullParserFactory()); }
@Override public CompletableFuture<Void> heartbeatFromResourceManager(ResourceID resourceID) { return resourceManagerHeartbeatManager.requestHeartbeat(resourceID, null); }
@Test void testResourceManagerBecomesUnreachableTriggersDisconnect() throws Exception { runResourceManagerHeartbeatTest( failedRpcEnabledHeartbeatServices, (rmGateway) -> rmGateway.setTaskExecutorHeartbeatFunction( (resourceID, taskExecutorHeartbeatPayload) -> FutureUtils.completedExceptionally( new RecipientUnreachableException( "sender", "recipient", "resource manager is unreachable."))), (taskExecutorGateway, rmResourceId, taskExecutorDisconnectFuture) -> CommonTestUtils.waitUntilCondition( () -> { // request heartbeats until the disconnect future is completed taskExecutorGateway.heartbeatFromResourceManager(rmResourceId); return taskExecutorDisconnectFuture.isDone(); }, 50L)); }
@SuppressWarnings("unchecked") public static <T extends Type> T decodeStaticArray( String input, int offset, TypeReference<T> typeReference, int length) { BiFunction<List<T>, String, T> function = (elements, typeName) -> { if (elements.isEmpty()) { throw new UnsupportedOperationException( "Zero length fixed array is invalid type"); } else { return instantiateStaticArray(elements, length); } }; return decodeArrayElements(input, offset, typeReference, length, function); }
@Test public void testEmptyStaticArray() { assertThrows( UnsupportedOperationException.class, () -> TypeDecoder.decodeStaticArray( "0000000000000000000000000000000000000000000000000000000000000000", 0, new TypeReference.StaticArrayTypeReference<StaticArray<Uint256>>( 0) {}, 0)); }
KafkaBasedLog<String, byte[]> setupAndCreateKafkaBasedLog(String topic, final WorkerConfig config) { String clusterId = config.kafkaClusterId(); Map<String, Object> originals = config.originals(); Map<String, Object> producerProps = new HashMap<>(baseProducerProps); producerProps.put(CommonClientConfigs.CLIENT_ID_CONFIG, clientId); Map<String, Object> consumerProps = new HashMap<>(originals); consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); consumerProps.put(CommonClientConfigs.CLIENT_ID_CONFIG, clientId); ConnectUtils.addMetricsContextProperties(consumerProps, config, clusterId); if (config.exactlyOnceSourceEnabled()) { ConnectUtils.ensureProperty( consumerProps, ConsumerConfig.ISOLATION_LEVEL_CONFIG, IsolationLevel.READ_COMMITTED.toString(), "for the worker's config topic consumer when exactly-once source support is enabled", true ); } Map<String, Object> adminProps = new HashMap<>(originals); ConnectUtils.addMetricsContextProperties(adminProps, config, clusterId); adminProps.put(CommonClientConfigs.CLIENT_ID_CONFIG, clientId); Map<String, Object> topicSettings = config instanceof DistributedConfig ? ((DistributedConfig) config).configStorageTopicSettings() : Collections.emptyMap(); NewTopic topicDescription = TopicAdmin.defineTopic(topic) .config(topicSettings) // first so that we override user-supplied settings as needed .compacted() .partitions(1) .replicationFactor(config.getShort(DistributedConfig.CONFIG_STORAGE_REPLICATION_FACTOR_CONFIG)) .build(); return createKafkaBasedLog(topic, producerProps, consumerProps, new ConsumeCallback(), topicDescription, topicAdminSupplier, config, time); }
@Test public void testConsumerPropertiesInsertedByDefaultWithExactlyOnceSourceEnabled() { props.put(EXACTLY_ONCE_SOURCE_SUPPORT_CONFIG, "enabled"); props.remove(ISOLATION_LEVEL_CONFIG); createStore(); configStorage.setupAndCreateKafkaBasedLog(TOPIC, config); verifyConfigure(); assertEquals( IsolationLevel.READ_COMMITTED.toString(), capturedConsumerProps.getValue().get(ISOLATION_LEVEL_CONFIG) ); }
public static <T> Read<T> read() { return new AutoValue_CassandraIO_Read.Builder<T>().build(); }
@Test public void testRead() throws Exception { PCollection<Scientist> output = pipeline.apply( CassandraIO.<Scientist>read() .withHosts(Collections.singletonList(CASSANDRA_HOST)) .withPort(cassandraPort) .withKeyspace(CASSANDRA_KEYSPACE) .withTable(CASSANDRA_TABLE) .withMinNumberOfSplits(50) .withCoder(SerializableCoder.of(Scientist.class)) .withEntity(Scientist.class)); PAssert.thatSingleton(output.apply("Count", Count.globally())).isEqualTo(NUM_ROWS); PCollection<KV<String, Integer>> mapped = output.apply( MapElements.via( new SimpleFunction<Scientist, KV<String, Integer>>() { @Override public KV<String, Integer> apply(Scientist scientist) { return KV.of(scientist.name, scientist.id); } })); PAssert.that(mapped.apply("Count occurrences per scientist", Count.perKey())) .satisfies( input -> { int count = 0; for (KV<String, Long> element : input) { count++; assertEquals(element.getKey(), NUM_ROWS / 10, element.getValue().longValue()); } assertEquals(11, count); return null; }); pipeline.run(); }
public void evaluateStepDependenciesOrOutputsParameters( Map<String, Map<String, Object>> allStepOutputData, Map<String, Parameter> workflowParams, Map<String, Parameter> stepParams, Collection<List<MapParameter>> parameters, String stepId) { parameters.stream() .flatMap(Collection::stream) .forEach(v -> parseStepParameter(allStepOutputData, workflowParams, stepParams, v, stepId)); }
@Test public void testEvaluateSignalParameters() { Map<String, ParamDefinition> paramDefMap = new LinkedHashMap<>(); paramDefMap.put( "name", StringParamDefinition.builder().name("name").value("signal ${step1__foo}").build()); paramDefMap.put( "bar", SignalParamDefinition.builder() .operator(SignalOperator.EQUALS_TO) .parameter( StringParamDefinition.builder().name("bar").value("test ${step1__foo}").build()) .build()); MapParamDefinition mapParamDefinition = MapParamDefinition.builder().value(paramDefMap).build(); MapParameter mapParameter = (MapParameter) mapParamDefinition.toParameter(); paramEvaluator.evaluateStepDependenciesOrOutputsParameters( Collections.emptyMap(), Collections.emptyMap(), Collections.singletonMap("foo", StringParameter.builder().name("foo").value("123").build()), Collections.singletonList(Collections.singletonList(mapParameter)), "step1"); assertEquals("test 123", mapParameter.getEvaluatedParam("bar").getEvaluatedResult()); assertEquals("signal 123", mapParameter.getEvaluatedParam("name").getEvaluatedResultString()); paramDefMap = new LinkedHashMap<>(); paramDefMap.put( "name", StringParamDefinition.builder().name("name").value("signal ${foo}").build()); paramDefMap.put( "bar", SignalParamDefinition.builder() .operator(SignalOperator.EQUALS_TO) .parameter(StringParamDefinition.builder().name("bar").value("test ${foo}").build()) .build()); mapParamDefinition = MapParamDefinition.builder().value(paramDefMap).build(); mapParameter = (MapParameter) mapParamDefinition.toParameter(); paramEvaluator.evaluateStepDependenciesOrOutputsParameters( Collections.emptyMap(), Collections.emptyMap(), Collections.singletonMap("foo", StringParameter.builder().name("foo").value("123").build()), Collections.singletonList(Collections.singletonList(mapParameter)), "step1"); assertEquals("test 123", mapParameter.getEvaluatedParam("bar").getEvaluatedResult()); paramDefMap = new LinkedHashMap<>(); paramDefMap.put( "name", StringParamDefinition.builder().name("name").value("signal123").build()); paramDefMap.put( "bar", SignalParamDefinition.builder() .operator(SignalOperator.EQUALS_TO) .parameter(StringParamDefinition.builder().name("bar").value("test12").build()) .build()); mapParamDefinition = MapParamDefinition.builder().value(paramDefMap).build(); mapParameter = (MapParameter) mapParamDefinition.toParameter(); paramEvaluator.evaluateStepDependenciesOrOutputsParameters( Collections.emptyMap(), Collections.emptyMap(), Collections.singletonMap("foo", StringParameter.builder().name("foo").value("123").build()), Collections.singletonList(Collections.singletonList(mapParameter)), "step1"); assertEquals("test12", mapParameter.getEvaluatedParam("bar").getEvaluatedResult()); assertEquals("signal123", mapParameter.getEvaluatedParam("name").getEvaluatedResult()); paramDefMap = new LinkedHashMap<>(); paramDefMap.put( "name", StringParamDefinition.builder().name("name").expression("'signal' + foo").build()); paramDefMap.put( "bar", SignalParamDefinition.builder() .operator(SignalOperator.EQUALS_TO) .parameter( StringParamDefinition.builder().name("bar").expression("'test' + foo").build()) .build()); mapParamDefinition = MapParamDefinition.builder().value(paramDefMap).build(); mapParameter = (MapParameter) mapParamDefinition.toParameter(); paramEvaluator.evaluateStepDependenciesOrOutputsParameters( Collections.emptyMap(), Collections.emptyMap(), Collections.singletonMap("foo", StringParameter.builder().name("foo").value("123").build()), Collections.singletonMap(StepDependencyType.SIGNAL, Collections.singletonList(mapParameter)) .values(), "step1"); assertEquals("test123", mapParameter.getEvaluatedParam("bar").getEvaluatedResult()); assertEquals("signal123", mapParameter.getEvaluatedParam("name").getEvaluatedResult()); paramDefMap = new LinkedHashMap<>(); paramDefMap.put("name", StringParamDefinition.builder().name("name").expression("foo").build()); paramDefMap.put( "bar", SignalParamDefinition.builder() .operator(SignalOperator.EQUALS_TO) .parameter(StringParamDefinition.builder().name("bar").expression("foo").build()) .build()); mapParamDefinition = MapParamDefinition.builder().value(paramDefMap).build(); mapParameter = (MapParameter) mapParamDefinition.toParameter(); paramEvaluator.evaluateStepDependenciesOrOutputsParameters( Collections.emptyMap(), Collections.emptyMap(), Collections.singletonMap("foo", StringParameter.builder().name("foo").value("123").build()), Collections.singletonList(Collections.singletonList(mapParameter)), "step1"); assertEquals("123", mapParameter.getEvaluatedParam("bar").getEvaluatedResult()); assertEquals("123", mapParameter.getEvaluatedParam("name").getEvaluatedResult()); }
public @CheckForNull String checkSanity() { OUTER: for (int i = 0; i < 5; i++) { long bitMask = i < 4 ? bits[i] : (long) dayOfWeek; for (int j = BaseParser.LOWER_BOUNDS[i]; j <= BaseParser.UPPER_BOUNDS[i]; j++) { if (!checkBits(bitMask, j)) { // this rank has a sparse entry. // if we have a sparse rank, one of them better be the left-most. if (i > 0) return Messages.CronTab_do_you_really_mean_every_minute_when_you(spec, "H " + spec.substring(spec.indexOf(' ') + 1)); // once we find a sparse rank, upper ranks don't matter break OUTER; } } } int daysOfMonth = 0; for (int i = 1; i < 31; i++) { if (checkBits(bits[2], i)) { daysOfMonth++; } } if (daysOfMonth > 5 && daysOfMonth < 28) { // a bit arbitrary return Messages.CronTab_short_cycles_in_the_day_of_month_field_w(); } String hashified = hashify(spec); if (hashified != null) { return Messages.CronTab_spread_load_evenly_by_using_rather_than_(hashified, spec); } return null; }
@Test public void checkSanity() throws Exception { assertNull(new CronTab("@hourly").checkSanity()); assertEquals(Messages.CronTab_do_you_really_mean_every_minute_when_you("* * * * *", "H * * * *"), new CronTab("* * * * *").checkSanity()); assertEquals(Messages.CronTab_do_you_really_mean_every_minute_when_you("*/1 * * * *", "H * * * *"), new CronTab("*/1 * * * *").checkSanity()); assertNull(new CronTab("H H(0-2) * * *", Hash.from("stuff")).checkSanity()); assertEquals(Messages.CronTab_do_you_really_mean_every_minute_when_you("* 0 * * *", "H 0 * * *"), new CronTab("* 0 * * *").checkSanity()); assertEquals(Messages.CronTab_do_you_really_mean_every_minute_when_you("* 6,18 * * *", "H 6,18 * * *"), new CronTab("* 6,18 * * *").checkSanity()); // dubious; could be improved: assertEquals(Messages.CronTab_do_you_really_mean_every_minute_when_you("* * 3 * *", "H * 3 * *"), new CronTab("* * 3 * *").checkSanity()); // promote hashes: assertEquals(Messages.CronTab_spread_load_evenly_by_using_rather_than_("H/15 * * * *", "*/15 * * * *"), new CronTab("*/15 * * * *").checkSanity()); assertEquals(Messages.CronTab_spread_load_evenly_by_using_rather_than_("H/15 * * * *", "0,15,30,45 * * * *"), new CronTab("0,15,30,45 * * * *").checkSanity()); assertEquals(Messages.CronTab_spread_load_evenly_by_using_rather_than_("H * * * *", "0 * * * *"), new CronTab("0 * * * *").checkSanity()); assertEquals(Messages.CronTab_spread_load_evenly_by_using_rather_than_("H * * * *", "5 * * * *"), new CronTab("5 * * * *").checkSanity()); // if the user specifically asked for 3:00 AM, probably we should stick to 3:00–3:59 assertEquals(Messages.CronTab_spread_load_evenly_by_using_rather_than_("H 3 * * *", "0 3 * * *"), new CronTab("0 3 * * *").checkSanity()); assertEquals(Messages.CronTab_spread_load_evenly_by_using_rather_than_("H 22 * * 6", "00 22 * * 6"), new CronTab("00 22 * * 6").checkSanity()); assertNull(new CronTab("H/15 * 1 1 *").checkSanity()); assertNull(new CronTab("0 3 H/15 * *").checkSanity()); assertEquals(Messages.CronTab_short_cycles_in_the_day_of_month_field_w(), new CronTab("0 3 H/3 * *").checkSanity()); assertEquals(Messages.CronTab_short_cycles_in_the_day_of_month_field_w(), new CronTab("0 3 */5 * *").checkSanity()); }
public ProjectMeasuresStatistics searchSupportStatistics() { SearchRequest projectMeasuresSearchRequest = buildProjectMeasureSearchRequest(); SearchResponse projectMeasures = client.search(projectMeasuresSearchRequest); return buildProjectMeasuresStatistics(projectMeasures); }
@Test public void search_statistics_for_large_instances() { int nbProjects = 25000; int javaLocByProjects = 100; int jsLocByProjects = 900; int csLocByProjects = 2; ProjectMeasuresDoc[] documents = IntStream.range(0, nbProjects).mapToObj(i -> newDoc("lines", 10, "coverage", 80) .setLanguages(asList("java", "cs", "js")) .setNclocLanguageDistributionFromMap(ImmutableMap.of("java", javaLocByProjects, "cs", csLocByProjects, "js", jsLocByProjects))).toArray(ProjectMeasuresDoc[]::new); es.putDocuments(TYPE_PROJECT_MEASURES, documents); ProjectMeasuresStatistics result = underTest.searchSupportStatistics(); assertThat(result.getProjectCount()).isEqualTo(nbProjects); assertThat(result.getProjectCountByLanguage()) .hasSize(3) .containsEntry("java", (long) nbProjects) .containsEntry("cs", (long) nbProjects) .containsEntry("js", (long) nbProjects); assertThat(result.getNclocByLanguage()) .hasSize(3) .containsEntry("java", (long) nbProjects * javaLocByProjects) .containsEntry("cs", (long) nbProjects * csLocByProjects) .containsEntry("js", (long) nbProjects * jsLocByProjects); }
@Override public boolean containsValue(CharSequence name, CharSequence value, boolean ignoreCase) { return super.containsValue(name, StringUtil.trimOws(value), ignoreCase); }
@Test public void owsTrimming() { final CombinedHttpHeaders headers = newCombinedHttpHeaders(); headers.set(HEADER_NAME, Arrays.asList("\ta", " ", " b ", "\t \t")); headers.add(HEADER_NAME, " c, d \t"); assertEquals(Arrays.asList("a", "", "b", "", "c, d"), headers.getAll(HEADER_NAME)); assertEquals("a,,b,,\"c, d\"", headers.get(HEADER_NAME)); assertTrue(headers.containsValue(HEADER_NAME, "a", true)); assertTrue(headers.containsValue(HEADER_NAME, " a ", true)); assertTrue(headers.containsValue(HEADER_NAME, "a", true)); assertFalse(headers.containsValue(HEADER_NAME, "a,b", true)); assertFalse(headers.containsValue(HEADER_NAME, " c, d ", true)); assertFalse(headers.containsValue(HEADER_NAME, "c, d", true)); assertTrue(headers.containsValue(HEADER_NAME, " c ", true)); assertTrue(headers.containsValue(HEADER_NAME, "d", true)); assertTrue(headers.containsValue(HEADER_NAME, "\t", true)); assertTrue(headers.containsValue(HEADER_NAME, "", true)); assertFalse(headers.containsValue(HEADER_NAME, "e", true)); HttpHeaders copiedHeaders = newCombinedHttpHeaders().add(headers); assertEquals(Arrays.asList("a", "", "b", "", "c, d"), copiedHeaders.getAll(HEADER_NAME)); }
@Override public void start() { File dbHome = new File(getRequiredSetting(PATH_DATA.getKey())); if (!dbHome.exists()) { dbHome.mkdirs(); } startServer(dbHome); }
@Test public void start_ignores_URL_to_create_database_and_uses_empty_username_and_password_when_then_are_not_set() throws IOException { int port = NetworkUtilsImpl.INSTANCE.getNextLoopbackAvailablePort(); settings .setProperty(PATH_DATA.getKey(), temporaryFolder.newFolder().getAbsolutePath()) .setProperty(JDBC_URL.getKey(), "jdbc url") .setProperty(JDBC_EMBEDDED_PORT.getKey(), "" + port); underTest.start(); checkDbIsUp(port, "", ""); }
static RaftRegistryServiceImpl getInstance() { if (instance == null) { synchronized (RaftRegistryServiceImpl.class) { if (instance == null) { instance = new RaftRegistryServiceImpl(); } } } return instance; }
@Test public void testLoginFailed() throws IOException, NoSuchMethodException { String jwtToken = "null"; String responseBody = "{\"code\":\"401\",\"message\":\"Login failed\",\"data\":\"" + jwtToken + "\",\"success\":false}"; try (MockedStatic<HttpClientUtil> mockedStatic = Mockito.mockStatic(HttpClientUtil.class)) { CloseableHttpResponse mockResponse = mock(CloseableHttpResponse.class); StatusLine mockStatusLine = mock(StatusLine.class); when(mockResponse.getEntity()).thenReturn(new StringEntity(responseBody)); when(mockResponse.getStatusLine()).thenReturn(mockStatusLine); when(mockStatusLine.getStatusCode()).thenReturn(HttpStatus.SC_OK); when(HttpClientUtil.doPost(any(String.class), any(Map.class), any(Map.class), any(int.class))) .thenReturn(mockResponse); // Use reflection to access and invoke the private method Method refreshTokenMethod = RaftRegistryServiceImpl.class.getDeclaredMethod("refreshToken", String.class); refreshTokenMethod.setAccessible(true); assertThrows(Exception.class, () -> refreshTokenMethod.invoke(RaftRegistryServiceImpl.getInstance(), "127.0.0.1:8092")); } }
public void pruneColumns(Configuration conf, Path inputFile, Path outputFile, List<String> cols) throws IOException { RewriteOptions options = new RewriteOptions.Builder(conf, inputFile, outputFile) .prune(cols) .build(); ParquetRewriter rewriter = new ParquetRewriter(options); rewriter.processBlocks(); rewriter.close(); }
@Test public void testPruneMultiColumns() throws Exception { // Create Parquet file String inputFile = createParquetFile("input"); String outputFile = createTempFile("output"); // Remove columns String cargs[] = {inputFile, outputFile, "Name", "Gender"}; List<String> cols = Arrays.asList("Name", "Gender"); columnPruner.pruneColumns(conf, new Path(inputFile), new Path(outputFile), cols); // Verify the schema are not changed for the columns not pruned ParquetMetadata pmd = ParquetFileReader.readFooter(conf, new Path(outputFile), ParquetMetadataConverter.NO_FILTER); MessageType schema = pmd.getFileMetaData().getSchema(); List<Type> fields = schema.getFields(); assertEquals(fields.size(), 2); assertEquals(fields.get(0).getName(), "DocId"); assertEquals(fields.get(1).getName(), "Links"); List<Type> subFields = fields.get(1).asGroupType().getFields(); assertEquals(subFields.size(), 2); assertEquals(subFields.get(0).getName(), "Backward"); assertEquals(subFields.get(1).getName(), "Forward"); // Verify the data are not changed for the columns not pruned List<String> prunePaths = Arrays.asList("Name", "Gender"); validateColumns(inputFile, prunePaths); }