focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public byte[] serialize() { byte[] payloadData = null; if (this.payload != null) { this.payload.setParent(this); payloadData = this.payload.serialize(); } this.length = (short) (8 + (payloadData == null ? 0 : payloadData.length)); final byte[] data = new byte[this.length]; final ByteBuffer bb = ByteBuffer.wrap(data); bb.putShort((short) (this.sourcePort & 0xffff)); bb.putShort((short) (this.destinationPort & 0xffff)); bb.putShort(this.length); bb.putShort(this.checksum); if (payloadData != null) { bb.put(payloadData); } if (this.parent != null && this.parent instanceof IPv4) { ((IPv4) this.parent).setProtocol(IPv4.PROTOCOL_UDP); } // compute checksum if needed if (this.checksum == 0) { bb.rewind(); int accumulation = 0; // compute pseudo header mac if (this.parent != null) { if (this.parent instanceof IPv4) { final IPv4 ipv4 = (IPv4) this.parent; accumulation += (ipv4.getSourceAddress() >> 16 & 0xffff) + (ipv4.getSourceAddress() & 0xffff); accumulation += (ipv4.getDestinationAddress() >> 16 & 0xffff) + (ipv4.getDestinationAddress() & 0xffff); accumulation += ipv4.getProtocol() & 0xff; accumulation += length & 0xffff; } else if (this.parent instanceof IPv6) { final IPv6 ipv6 = (IPv6) this.parent; final int bbLength = Ip6Address.BYTE_LENGTH * 2 // IPv6 src, dst + 2 // nextHeader (with padding) + 4; // length final ByteBuffer bbChecksum = ByteBuffer.allocate(bbLength); bbChecksum.put(ipv6.getSourceAddress()); bbChecksum.put(ipv6.getDestinationAddress()); bbChecksum.put((byte) 0); // padding bbChecksum.put(ipv6.getNextHeader()); bbChecksum.putInt(length); bbChecksum.rewind(); for (int i = 0; i < bbLength / 2; ++i) { accumulation += 0xffff & bbChecksum.getShort(); } } } for (int i = 0; i < this.length / 2; ++i) { accumulation += 0xffff & bb.getShort(); } // pad to an even number of shorts if (this.length % 2 > 0) { accumulation += (bb.get() & 0xff) << 8; } accumulation = (accumulation >> 16 & 0xffff) + (accumulation & 0xffff); this.checksum = (short) (~accumulation & 0xffff); bb.putShort(6, this.checksum); } return data; }
@Test public void testSerialize() { UDP udp = new UDP(); udp.setSourcePort(0x50); udp.setDestinationPort(0x60); udp.setParent(ipv4); assertArrayEquals(bytePacketUDP4, udp.serialize()); udp.resetChecksum(); udp.setParent(ipv6); assertArrayEquals(bytePacketUDP6, udp.serialize()); }
public static boolean del(String fullFileOrDirPath) throws IORuntimeException { return del(file(fullFileOrDirPath)); }
@Test @Disabled public void delTest2() { // 删除一个不存在的文件,应返回true final boolean result = FileUtil.del(Paths.get("e:/Hutool_test_3434543533409843.txt")); assertTrue(result); }
@Override public void onEvent(Event event) { if (EnvUtil.getStandaloneMode()) { return; } if (event instanceof ClientEvent.ClientVerifyFailedEvent) { syncToVerifyFailedServer((ClientEvent.ClientVerifyFailedEvent) event); } else { syncToAllServer((ClientEvent) event); } }
@Test void testOnClientVerifyFailedEventWithPersistentClient() { client = mock(Client.class); when(client.isEphemeral()).thenReturn(false); when(clientManager.getClient(CLIENT_ID)).thenReturn(client); distroClientDataProcessor.onEvent(new ClientEvent.ClientVerifyFailedEvent(CLIENT_ID, MOCK_TARGET_SERVER)); verify(distroProtocol, never()).syncToTarget(any(), any(), anyString(), anyLong()); verify(distroProtocol, never()).sync(any(), any()); }
@Override public Collection<String> getJdbcUrlPrefixes() { return Collections.singleton("jdbc:hive2:"); }
@Test void assertGetJdbcUrlPrefixes() { assertThat(TypedSPILoader.getService(DatabaseType.class, "Hive").getJdbcUrlPrefixes(), is(Collections.singleton("jdbc:hive2:"))); }
@Override public T load(Long key) { long startNanos = Timer.nanos(); try { return delegate.load(key); } finally { loadProbe.recordValue(Timer.nanosElapsed(startNanos)); } }
@Test public void load() { Long key = 1L; String value = "someValue"; when(delegate.load(key)).thenReturn(value); String result = queueStore.load(key); assertEquals(value, result); assertProbeCalledOnce("load"); }
@Override public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { super.onDataReceived(device, data); if (data.size() < 2) { onInvalidDataReceived(device, data); return; } // Read the Op Code final int opCode = data.getIntValue(Data.FORMAT_UINT8, 0); // Estimate the expected operand size based on the Op Code int expectedOperandSize; switch (opCode) { case OP_CODE_COMMUNICATION_INTERVAL_RESPONSE -> // UINT8 expectedOperandSize = 1; case OP_CODE_CALIBRATION_VALUE_RESPONSE -> // Calibration Value expectedOperandSize = 10; case OP_CODE_PATIENT_HIGH_ALERT_LEVEL_RESPONSE, OP_CODE_PATIENT_LOW_ALERT_LEVEL_RESPONSE, OP_CODE_HYPO_ALERT_LEVEL_RESPONSE, OP_CODE_HYPER_ALERT_LEVEL_RESPONSE, OP_CODE_RATE_OF_DECREASE_ALERT_LEVEL_RESPONSE, OP_CODE_RATE_OF_INCREASE_ALERT_LEVEL_RESPONSE -> // SFLOAT expectedOperandSize = 2; case OP_CODE_RESPONSE_CODE -> // Request Op Code (UINT8), Response Code Value (UINT8) expectedOperandSize = 2; default -> { onInvalidDataReceived(device, data); return; } } // Verify packet length if (data.size() != 1 + expectedOperandSize && data.size() != 1 + expectedOperandSize + 2) { onInvalidDataReceived(device, data); return; } // Verify CRC if present final boolean crcPresent = data.size() == 1 + expectedOperandSize + 2; // opCode + expected operand + CRC if (crcPresent) { final int expectedCrc = data.getIntValue(Data.FORMAT_UINT16_LE, 1 + expectedOperandSize); final int actualCrc = CRC16.MCRF4XX(data.getValue(), 0, 1 + expectedOperandSize); if (expectedCrc != actualCrc) { onCGMSpecificOpsResponseReceivedWithCrcError(device, data); return; } } switch (opCode) { case OP_CODE_COMMUNICATION_INTERVAL_RESPONSE -> { final int interval = data.getIntValue(Data.FORMAT_UINT8, 1); onContinuousGlucoseCommunicationIntervalReceived(device, interval, crcPresent); return; } case OP_CODE_CALIBRATION_VALUE_RESPONSE -> { final float glucoseConcentrationOfCalibration = data.getFloatValue(Data.FORMAT_SFLOAT, 1); final int calibrationTime = data.getIntValue(Data.FORMAT_UINT16_LE, 3); final int calibrationTypeAndSampleLocation = data.getIntValue(Data.FORMAT_UINT8, 5); @SuppressLint("WrongConstant") final int calibrationType = calibrationTypeAndSampleLocation & 0x0F; final int calibrationSampleLocation = calibrationTypeAndSampleLocation >> 4; final int nextCalibrationTime = data.getIntValue(Data.FORMAT_UINT16_LE, 6); final int calibrationDataRecordNumber = data.getIntValue(Data.FORMAT_UINT16_LE, 8); final int calibrationStatus = data.getIntValue(Data.FORMAT_UINT8, 10); onContinuousGlucoseCalibrationValueReceived(device, glucoseConcentrationOfCalibration, calibrationTime, nextCalibrationTime, calibrationType, calibrationSampleLocation, calibrationDataRecordNumber, new CGMCalibrationStatus(calibrationStatus), crcPresent); return; } case OP_CODE_RESPONSE_CODE -> { final int requestCode = data.getIntValue(Data.FORMAT_UINT8, 1); // ignore final int responseCode = data.getIntValue(Data.FORMAT_UINT8, 2); if (responseCode == CGM_RESPONSE_SUCCESS) { onCGMSpecificOpsOperationCompleted(device, requestCode, crcPresent); } else { onCGMSpecificOpsOperationError(device, requestCode, responseCode, crcPresent); } return; } } // Read SFLOAT value final float value = data.getFloatValue(Data.FORMAT_SFLOAT, 1); switch (opCode) { case OP_CODE_PATIENT_HIGH_ALERT_LEVEL_RESPONSE -> onContinuousGlucosePatientHighAlertReceived(device, value, crcPresent); case OP_CODE_PATIENT_LOW_ALERT_LEVEL_RESPONSE -> onContinuousGlucosePatientLowAlertReceived(device, value, crcPresent); case OP_CODE_HYPO_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseHypoAlertReceived(device, value, crcPresent); case OP_CODE_HYPER_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseHyperAlertReceived(device, value, crcPresent); case OP_CODE_RATE_OF_DECREASE_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseRateOfDecreaseAlertReceived(device, value, crcPresent); case OP_CODE_RATE_OF_INCREASE_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseRateOfIncreaseAlertReceived(device, value, crcPresent); } }
@Test public void onContinuousGlucosePatientLowAlertReceived() { final Data data = new Data(new byte[] { 12, 11, (byte) 0b111000000, (byte) 0x34, (byte) 0xBE}); callback.onDataReceived(null, data); assertEquals("Level", 0.0011f, patientLowAlertLevel, 0.0001); assertTrue(secured); }
public static FunctionConfig validateUpdate(FunctionConfig existingConfig, FunctionConfig newConfig) { FunctionConfig mergedConfig = existingConfig.toBuilder().build(); if (!existingConfig.getTenant().equals(newConfig.getTenant())) { throw new IllegalArgumentException("Tenants differ"); } if (!existingConfig.getNamespace().equals(newConfig.getNamespace())) { throw new IllegalArgumentException("Namespaces differ"); } if (!existingConfig.getName().equals(newConfig.getName())) { throw new IllegalArgumentException("Function Names differ"); } if (!StringUtils.isEmpty(newConfig.getClassName())) { mergedConfig.setClassName(newConfig.getClassName()); } if (!StringUtils.isEmpty(newConfig.getJar())) { mergedConfig.setJar(newConfig.getJar()); } if (newConfig.getInputSpecs() == null) { newConfig.setInputSpecs(new HashMap<>()); } if (mergedConfig.getInputSpecs() == null) { mergedConfig.setInputSpecs(new HashMap<>()); } if (newConfig.getInputs() != null) { newConfig.getInputs().forEach((topicName -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder().isRegexPattern(false).build()); })); } if (newConfig.getTopicsPattern() != null && !newConfig.getTopicsPattern().isEmpty()) { newConfig.getInputSpecs().put(newConfig.getTopicsPattern(), ConsumerConfig.builder() .isRegexPattern(true) .build()); } if (newConfig.getCustomSerdeInputs() != null) { newConfig.getCustomSerdeInputs().forEach((topicName, serdeClassName) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .serdeClassName(serdeClassName) .isRegexPattern(false) .build()); }); } if (newConfig.getCustomSchemaInputs() != null) { newConfig.getCustomSchemaInputs().forEach((topicName, schemaClassname) -> { newConfig.getInputSpecs().put(topicName, ConsumerConfig.builder() .schemaType(schemaClassname) .isRegexPattern(false) .build()); }); } if (!newConfig.getInputSpecs().isEmpty()) { newConfig.getInputSpecs().forEach((topicName, consumerConfig) -> { if (!existingConfig.getInputSpecs().containsKey(topicName)) { throw new IllegalArgumentException("Input Topics cannot be altered"); } if (consumerConfig.isRegexPattern() != existingConfig.getInputSpecs().get(topicName).isRegexPattern()) { throw new IllegalArgumentException( "isRegexPattern for input topic " + topicName + " cannot be altered"); } mergedConfig.getInputSpecs().put(topicName, consumerConfig); }); } if (!StringUtils.isEmpty(newConfig.getOutputSerdeClassName()) && !newConfig.getOutputSerdeClassName() .equals(existingConfig.getOutputSerdeClassName())) { throw new IllegalArgumentException("Output Serde mismatch"); } if (!StringUtils.isEmpty(newConfig.getOutputSchemaType()) && !newConfig.getOutputSchemaType() .equals(existingConfig.getOutputSchemaType())) { throw new IllegalArgumentException("Output Schema mismatch"); } if (!StringUtils.isEmpty(newConfig.getLogTopic())) { mergedConfig.setLogTopic(newConfig.getLogTopic()); } if (newConfig.getProcessingGuarantees() != null && !newConfig.getProcessingGuarantees() .equals(existingConfig.getProcessingGuarantees())) { throw new IllegalArgumentException("Processing Guarantees cannot be altered"); } if (newConfig.getRetainOrdering() != null && !newConfig.getRetainOrdering() .equals(existingConfig.getRetainOrdering())) { throw new IllegalArgumentException("Retain Ordering cannot be altered"); } if (newConfig.getRetainKeyOrdering() != null && !newConfig.getRetainKeyOrdering() .equals(existingConfig.getRetainKeyOrdering())) { throw new IllegalArgumentException("Retain Key Ordering cannot be altered"); } if (!StringUtils.isEmpty(newConfig.getOutput())) { mergedConfig.setOutput(newConfig.getOutput()); } if (newConfig.getUserConfig() != null) { mergedConfig.setUserConfig(newConfig.getUserConfig()); } if (newConfig.getSecrets() != null) { mergedConfig.setSecrets(newConfig.getSecrets()); } if (newConfig.getRuntime() != null && !newConfig.getRuntime().equals(existingConfig.getRuntime())) { throw new IllegalArgumentException("Runtime cannot be altered"); } if (newConfig.getAutoAck() != null && !newConfig.getAutoAck().equals(existingConfig.getAutoAck())) { throw new IllegalArgumentException("AutoAck cannot be altered"); } if (newConfig.getMaxMessageRetries() != null) { mergedConfig.setMaxMessageRetries(newConfig.getMaxMessageRetries()); } if (!StringUtils.isEmpty(newConfig.getDeadLetterTopic())) { mergedConfig.setDeadLetterTopic(newConfig.getDeadLetterTopic()); } if (!StringUtils.isEmpty(newConfig.getSubName()) && !newConfig.getSubName() .equals(existingConfig.getSubName())) { throw new IllegalArgumentException("Subscription Name cannot be altered"); } if (newConfig.getParallelism() != null) { mergedConfig.setParallelism(newConfig.getParallelism()); } if (newConfig.getResources() != null) { mergedConfig .setResources(ResourceConfigUtils.merge(existingConfig.getResources(), newConfig.getResources())); } if (newConfig.getWindowConfig() != null) { mergedConfig.setWindowConfig(newConfig.getWindowConfig()); } if (newConfig.getTimeoutMs() != null) { mergedConfig.setTimeoutMs(newConfig.getTimeoutMs()); } if (newConfig.getCleanupSubscription() != null) { mergedConfig.setCleanupSubscription(newConfig.getCleanupSubscription()); } if (!StringUtils.isEmpty(newConfig.getRuntimeFlags())) { mergedConfig.setRuntimeFlags(newConfig.getRuntimeFlags()); } if (!StringUtils.isEmpty(newConfig.getCustomRuntimeOptions())) { mergedConfig.setCustomRuntimeOptions(newConfig.getCustomRuntimeOptions()); } if (newConfig.getProducerConfig() != null) { mergedConfig.setProducerConfig(newConfig.getProducerConfig()); } return mergedConfig; }
@Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "Namespaces differ") public void testMergeDifferentNamespace() { FunctionConfig functionConfig = createFunctionConfig(); FunctionConfig newFunctionConfig = createUpdatedFunctionConfig("namespace", "Different"); FunctionConfigUtils.validateUpdate(functionConfig, newFunctionConfig); }
@Override public void visit(Entry entry) { if(Boolean.FALSE.equals(entry.getAttribute("allowed"))) return; if (containsSubmenu(entry)) addSubmenu(entry); else addActionItem(entry); }
@Test public void whenPopupMenuBecomesVisible_itsOwnPopupListenerIsCalled() { if(Compat.isMacOsX()) return; menuEntry.addChild(actionEntry); menuActionGroupBuilder.visit(menuEntry); JMenu item = (JMenu)new EntryAccessor().getComponent(menuEntry); item.getPopupMenu().setVisible(true); verify(popupListener).childEntriesWillBecomeVisible(menuEntry); }
public static final ProtectedTransportEndpointDescription of(List<TransportEndpointDescription> paths, DeviceId peer, String fingerprint) { return new ProtectedTransportEndpointDescription(paths, peer, fingerprint); }
@Test public void testEquals() { List<TransportEndpointDescription> paths = ImmutableList.of(); DeviceId peer = NetTestTools.did("d1"); String fingerprint = "aaa"; ProtectedTransportEndpointDescription a = ProtectedTransportEndpointDescription.of(paths, peer, fingerprint); ProtectedTransportEndpointDescription b = ProtectedTransportEndpointDescription.of(paths, peer, fingerprint); new EqualsTester() .addEqualityGroup(a) .addEqualityGroup(b) .testEquals(); }
@VisibleForTesting String importSingleAlbum(UUID jobId, TokensAndUrlAuthData authData, MediaAlbum inputAlbum) throws IOException, InvalidTokenException, PermissionDeniedException, UploadErrorException { // Set up album GoogleAlbum googleAlbum = new GoogleAlbum(); googleAlbum.setTitle(GooglePhotosImportUtils.cleanAlbumTitle(inputAlbum.getName())); GoogleAlbum responseAlbum = getOrCreatePhotosInterface(jobId, authData).createAlbum(googleAlbum); return responseAlbum.getId(); }
@Test public void retrieveAlbumStringOnlyOnce() throws PermissionDeniedException, InvalidTokenException, IOException, UploadErrorException { String albumId = "Album Id"; String albumName = "Album Name"; String albumDescription = "Album Description"; MediaAlbum albumModel = new MediaAlbum(albumId, albumName, albumDescription); PortabilityJob portabilityJob = mock(PortabilityJob.class); Mockito.when(portabilityJob.userLocale()).thenReturn("it"); JobStore jobStore = mock(JobStore.class); Mockito.when(jobStore.findJob(uuid)).thenReturn(portabilityJob); GoogleAlbum responseAlbum = new GoogleAlbum(); responseAlbum.setId(NEW_ALBUM_ID); Mockito.when(googlePhotosInterface.createAlbum(any(GoogleAlbum.class))) .thenReturn(responseAlbum); PhotosLibraryClient photosLibraryClient = mock(PhotosLibraryClient.class); GoogleMediaImporter sut = new GoogleMediaImporter( null, /*credentialFactory*/ jobStore, null, /*jsonFactory*/ new HashMap<>(), /*photosInterfacesMap*/ new HashMap<>(), /*photosLibraryClientMap*/ appCredentials, googlePhotosInterface, connectionProvider, monitor, 1.0 /*writesPerSecond*/); sut.importSingleAlbum(uuid, null, albumModel); sut.importSingleAlbum(uuid, null, albumModel); Mockito.verify(jobStore, atMostOnce()).findJob(uuid); }
@Override public void readLine(String line) { if (line.startsWith("#") || line.isEmpty()) { return; } // In some cases, ARIN may have multiple results with different NetType values. When that happens, // we want to use the data from the entry with the data closest to the customer actually using the IP. if (line.startsWith("NetType:")) { prevNetworkType = currNetworkType; currNetworkType = NetworkType.getEnum(lineValue(line)); if (null != currNetworkType && currNetworkType.isMoreSpecificThan(prevNetworkType)) { this.organization = null; this.countryCode = null; } } if((line.startsWith("Organization:") || line.startsWith("Customer:")) && this.organization == null) { this.organization = lineValue(line); } if(line.startsWith("Country:") && this.countryCode == null) { this.countryCode = lineValue(line); } if(line.startsWith("ResourceLink") && !line.contains("http")) { this.isRedirect = true; registryRedirect = findRegistryFromWhoisServer(lineValue(line)); } }
@Test public void testRunDirectMatch() throws Exception { ARINResponseParser parser = new ARINResponseParser(); for (String line : MATCH.split("\n")) { parser.readLine(line); } assertFalse(parser.isRedirect()); assertNull(parser.getRegistryRedirect()); assertEquals("US", parser.getCountryCode()); assertEquals("AT&T Internet Services (SIS-80)", parser.getOrganization()); }
public Optional<Measure> toMeasure(@Nullable ScannerReport.Measure batchMeasure, Metric metric) { Objects.requireNonNull(metric); if (batchMeasure == null) { return Optional.empty(); } Measure.NewMeasureBuilder builder = Measure.newMeasureBuilder(); switch (metric.getType().getValueType()) { case INT: return toIntegerMeasure(builder, batchMeasure); case LONG: return toLongMeasure(builder, batchMeasure); case DOUBLE: return toDoubleMeasure(builder, batchMeasure); case BOOLEAN: return toBooleanMeasure(builder, batchMeasure); case STRING: return toStringMeasure(builder, batchMeasure); case LEVEL: return toLevelMeasure(builder, batchMeasure); case NO_VALUE: return toNoValueMeasure(builder); default: throw new IllegalArgumentException("Unsupported Measure.ValueType " + metric.getType().getValueType()); } }
@Test public void toMeasure_maps_data_and_alert_properties_in_dto_for_Double_Metric() { ScannerReport.Measure batchMeasure = ScannerReport.Measure.newBuilder() .setDoubleValue(DoubleValue.newBuilder().setValue(10.6395d).setData(SOME_DATA)) .build(); Optional<Measure> measure = underTest.toMeasure(batchMeasure, SOME_DOUBLE_METRIC); assertThat(measure).isPresent(); assertThat(measure.get().getValueType()).isEqualTo(Measure.ValueType.DOUBLE); assertThat(measure.get().getDoubleValue()).isEqualTo(10.6395d); assertThat(measure.get().getData()).isEqualTo(SOME_DATA); }
public List<Favorite> search(String userId, String appId, Pageable page) { boolean isUserIdEmpty = Strings.isNullOrEmpty(userId); boolean isAppIdEmpty = Strings.isNullOrEmpty(appId); if (isAppIdEmpty && isUserIdEmpty) { throw new BadRequestException("user id and app id can't be empty at the same time"); } if (!isUserIdEmpty) { UserInfo loginUser = userInfoHolder.getUser(); //user can only search his own favorite app if (!Objects.equals(loginUser.getUserId(), userId)) { userId = loginUser.getUserId(); } } //search by userId if (isAppIdEmpty && !isUserIdEmpty) { return favoriteRepository.findByUserIdOrderByPositionAscDataChangeCreatedTimeAsc(userId, page); } //search by appId if (!isAppIdEmpty && isUserIdEmpty) { return favoriteRepository.findByAppIdOrderByPositionAscDataChangeCreatedTimeAsc(appId, page); } //search by userId and appId return Collections.singletonList(favoriteRepository.findByUserIdAndAppId(userId, appId)); }
@Test @Sql(scripts = "/sql/favorites/favorites.sql", executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) @Sql(scripts = "/sql/cleanup.sql", executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD) public void testSearchByAppId() { List<Favorite> favorites = favoriteService.search(null, "test0621-04", PageRequest.of(0, 10)); Assert.assertEquals(3, favorites.size()); }
@Override public KeyValueIterator<Windowed<K>, V> findSessions(final K key, final long earliestSessionEndTime, final long latestSessionStartTime) { Objects.requireNonNull(key, "key cannot be null"); final Bytes bytesKey = keyBytes(key); return new MeteredWindowedKeyValueIterator<>( wrapped().findSessions( bytesKey, earliestSessionEndTime, latestSessionStartTime), fetchSensor, iteratorDurationSensor, streamsMetrics, serdes::keyFrom, serdes::valueFrom, time, numOpenIterators, openIterators); }
@Test public void shouldThrowNullPointerOnFindSessionsRangeIfToIsNull() { setUpWithoutContext(); assertThrows(NullPointerException.class, () -> store.findSessions("a", null, 0, 0)); }
public void send(SlackMessage message, String webhookUrl) throws TemporaryEventNotificationException, PermanentEventNotificationException, JsonProcessingException { final Request request = new Request.Builder() .url(webhookUrl) .post(RequestBody.create(MediaType.parse(APPLICATION_JSON), objectMapper.writeValueAsString(message))) .build(); LOG.debug("Posting to webhook url <{}> the payload is <{}>", webhookUrl, ""); try (final Response r = httpClient.newCall(request).execute()) { if (!r.isSuccessful()) { //ideally this should not happen and the user is expected to fill the //right configuration , while setting up a notification throw new PermanentEventNotificationException( "Expected successful HTTP response [2xx] but got [" + r.code() + "]. " + webhookUrl); } } catch (IOException e) { throw new TemporaryEventNotificationException("Unable to send the slack Message. " + e.getMessage()); } }
@Test public void doesNotFollowRedirects() { server.enqueue(new MockResponse().setResponseCode(302) .setHeader("Location", server.url("/redirected"))); server.enqueue(new MockResponse().setResponseCode(200)); SlackClient slackClient = new SlackClient(httpClient, objectMapper); assertThatThrownBy(() -> slackClient.send(getMessage(), server.url("/").toString())) .isInstanceOf(PermanentEventNotificationException.class) .hasMessageContaining("[2xx] but got [302]"); }
public RuntimeOptionsBuilder parse(Class<?> clazz) { RuntimeOptionsBuilder args = new RuntimeOptionsBuilder(); for (Class<?> classWithOptions = clazz; hasSuperClass( classWithOptions); classWithOptions = classWithOptions.getSuperclass()) { CucumberOptions options = requireNonNull(optionsProvider).getOptions(classWithOptions); if (options != null) { addDryRun(options, args); addMonochrome(options, args); addTags(classWithOptions, options, args); addPlugins(options, args); addPublish(options, args); addName(options, args); addSnippets(options, args); addGlue(options, args); addFeatures(options, args); addObjectFactory(options, args); addUuidGenerator(options, args); } } addDefaultFeaturePathIfNoFeaturePathIsSpecified(args, clazz); addDefaultGlueIfNoOverridingGlueIsSpecified(args, clazz); return args; }
@Test void should_not_set_no_publish_formatter_when_plugin_option_false() { RuntimeOptions runtimeOptions = parser() .parse(WithoutOptions.class) .enablePublishPlugin() .build(); assertThat(runtimeOptions.plugins(), empty()); }
public static <T> Callable<T> recover(Callable<T> callable, Function<Throwable, T> exceptionHandler) { return () -> { try { return callable.call(); } catch (Exception exception) { return exceptionHandler.apply(exception); } }; }
@Test(expected = RuntimeException.class) public void shouldRethrowException() throws Exception { Callable<String> callable = () -> { throw new IOException("BAM!"); }; Callable<String> callableWithRecovery = CallableUtils.recover(callable, (ex) -> { throw new RuntimeException(); }); callableWithRecovery.call(); }
public static String substVars(String val, PropertyContainer pc1) throws ScanException { return substVars(val, pc1, null); }
@Test @Timeout(value = 1, unit = SECONDS) public void detectCircularReferences4() throws ScanException { context.putProperty("A", "${B}"); context.putProperty("B", "${C}"); context.putProperty("C", "${A}"); Exception e = assertThrows(IllegalArgumentException.class, () -> { OptionHelper.substVars("${C} and ${A}", context); }); String expectedMessage = CIRCULAR_VARIABLE_REFERENCE_DETECTED+"${C} --> ${A} --> ${B} --> ${C}]"; assertEquals(expectedMessage, e.getMessage()); }
protected static List<RequestResponsePair> parseRequestResponseTemplateOutput(Service service, Operation operation, String content) throws Exception { List<RequestResponsePair> results = new ArrayList<>(); JsonNode root = YAML_MAPPER.readTree(sanitizeYamlContent(content)); if (root.getNodeType() == JsonNodeType.ARRAY) { Iterator<JsonNode> examples = root.elements(); while (examples.hasNext()) { JsonNode example = examples.next(); // Deal with parsing request. JsonNode requestNode = example.path("request"); Request request = new Request(); JsonNode requestHeadersNode = requestNode.path(HEADERS_NODE); request.setHeaders(buildHeaders(requestHeadersNode)); request.setContent(getRequestContent(requestHeadersNode, requestNode.path("body"))); String url = requestNode.path("url").asText(); if (url.contains("?")) { String[] kvPairs = url.substring(url.indexOf("?") + 1).split("&"); for (String kvPair : kvPairs) { String[] kv = kvPair.split("="); Parameter param = new Parameter(); param.setName(kv[0]); param.setValue(kv[1]); request.addQueryParameter(param); } } // Deal with parsing response. JsonNode responseNode = example.path("response"); Response response = new Response(); JsonNode responseHeadersNode = responseNode.path(HEADERS_NODE); response.setHeaders(buildHeaders(responseHeadersNode)); response.setContent(getResponseContent(responseHeadersNode, responseNode.path("body"))); response.setMediaType(responseHeadersNode.path("content-type").asText(null)); response.setStatus(responseNode.path("code").asText("200")); response.setFault(response.getStatus().startsWith("4") || response.getStatus().startsWith("5")); // String dispatchCriteria = null; if (DispatchStyles.URI_PARTS.equals(operation.getDispatcher())) { String resourcePathPattern = operation.getName().split(" ")[1]; dispatchCriteria = DispatchCriteriaHelper.extractFromURIPattern(operation.getDispatcherRules(), resourcePathPattern, url); } else if (DispatchStyles.URI_PARAMS.equals(operation.getDispatcher())) { dispatchCriteria = DispatchCriteriaHelper.extractFromURIParams(operation.getDispatcherRules(), url); } else if (DispatchStyles.URI_ELEMENTS.equals(operation.getDispatcher())) { String resourcePathPattern = operation.getName().split(" ")[1]; dispatchCriteria = DispatchCriteriaHelper.extractFromURIPattern(operation.getDispatcherRules(), resourcePathPattern, url); dispatchCriteria += DispatchCriteriaHelper.extractFromURIParams(operation.getDispatcherRules(), url); } else if (DispatchStyles.QUERY_ARGS.equals(operation.getDispatcher())) { // This dispatcher is used for GraphQL or gRPC if (ServiceType.GRAPHQL.equals(service.getType())) { Map<String, String> variables = getGraphQLVariables(request.getContent()); dispatchCriteria = DispatchCriteriaHelper.extractFromParamMap(operation.getDispatcherRules(), variables); } else if (ServiceType.GRPC.equals(service.getType())) { Map<String, String> parameters = JSON_MAPPER.readValue(request.getContent(), TypeFactory.defaultInstance().constructMapType(TreeMap.class, String.class, String.class)); dispatchCriteria = DispatchCriteriaHelper.extractFromParamMap(operation.getDispatcherRules(), parameters); } } response.setDispatchCriteria(dispatchCriteria); if (service.getType() == ServiceType.GRAPHQL) { adaptGraphQLRequestContent(request); } results.add(new RequestResponsePair(request, response)); } } return results; }
@Test void testParseRequestResponseOutputYaml() { String aiResponse = """ - example: 1 request: url: /pastries/croissant headers: accept: application/json body: response: code: 200 headers: content-type: application/json body: name: "Croissant" description: "A flaky, buttery pastry" size: "L" price: 2.5 status: "available" - example: 2 request: url: /pastries/donut headers: accept: application/json body: response: code: 200 headers: content-type: application/json body: name: "Donut" description: "A delicious fried pastry" size: "M" price: 1.5 status: "available" """; Service service = new Service(); service.setType(ServiceType.REST); Operation operation = new Operation(); operation.setName("GET /pastries/{name}"); operation.setDispatcher(DispatchStyles.URI_PARTS); operation.setDispatcherRules("name"); List<RequestResponsePair> results = null; try { results = AICopilotHelper.parseRequestResponseTemplateOutput(service, operation, aiResponse); } catch (Exception e) { fail("Exception should not be thrown here"); } assertNotNull(results); assertEquals(2, results.size()); // Check that request has been correctly parsed. RequestResponsePair example1 = results.get(0); assertNull(example1.getRequest().getContent()); assertEquals(1, example1.getRequest().getHeaders().size()); // Check that response has been correctly parsed. assertEquals("200", example1.getResponse().getStatus()); assertNotNull(example1.getResponse().getContent()); assertFalse(example1.getResponse().getContent().contains("\\n")); assertEquals(1, example1.getResponse().getHeaders().size()); }
@Override public void profileSet(JSONObject properties) { }
@Test public void testProfileSet() { mSensorsAPI.setTrackEventCallBack(new SensorsDataTrackEventCallBack() { @Override public boolean onTrackEvent(String eventName, JSONObject eventProperties) { Assert.fail(); return false; } }); JSONObject jsonObject = new JSONObject(); try { jsonObject.put("abcd", "123"); jsonObject.put("abcd2", "1232"); } catch (JSONException e) { e.printStackTrace(); } mSensorsAPI.profileSet(jsonObject); }
public synchronized Stream updateStreamState(String streamId, Stream.State state) { LOG.info("Updating {}'s state to {} in project {}.", streamId, state.name(), projectId); try { Stream.Builder streamBuilder = Stream.newBuilder() .setName(StreamName.format(projectId, location, streamId)) .setState(state); FieldMask.Builder fieldMaskBuilder = FieldMask.newBuilder().addPaths(FIELD_STATE); UpdateStreamRequest request = UpdateStreamRequest.newBuilder() .setStream(streamBuilder) .setUpdateMask(fieldMaskBuilder) .build(); Stream reference = datastreamClient.updateStreamAsync(request).get(); LOG.info( "Successfully updated {}'s state to {} in project {}.", streamId, state.name(), projectId); return reference; } catch (InterruptedException | ExecutionException e) { throw new DatastreamResourceManagerException("Failed to update stream. ", e); } }
@Test public void testUpdateStreamStateShouldCreateSuccessfully() throws ExecutionException, InterruptedException { Stream stream = Stream.getDefaultInstance(); when(datastreamClient.updateStreamAsync(any(UpdateStreamRequest.class)).get()) .thenReturn(stream); assertThat(testManager.updateStreamState(STREAM_ID, State.RUNNING)).isEqualTo(stream); }
public MailConfiguration getConfiguration() { if (configuration == null) { configuration = new MailConfiguration(getCamelContext()); } return configuration; }
@Test public void testAuthenticator() { DefaultAuthenticator auth1 = new DefaultAuthenticator("u1", "p1"); context.getRegistry().bind("auth1", auth1); MailEndpoint endpoint = checkEndpoint("smtp://myhost:25/?authenticator=#auth1&to=james%40myhost"); MailConfiguration config = endpoint.getConfiguration(); assertEquals("smtp", config.getProtocol(), "getProtocol()"); assertEquals("myhost", config.getHost(), "getHost()"); assertEquals(25, config.getPort(), "getPort()"); assertNull(config.getUsername(), "getUsername()"); assertNotNull(config.getPasswordAuthentication(), "getPasswordAuthentication()"); assertEquals("u1", config.getPasswordAuthentication().getUserName(), "getPasswordAuthentication().getUserName()"); assertEquals("p1", config.getPasswordAuthentication().getPassword(), "getPasswordAuthentication().getUserName()"); assertEquals("james@myhost", config.getRecipients().get(Message.RecipientType.TO), "getRecipients().get(Message.RecipientType.TO)"); assertEquals("INBOX", config.getFolderName(), "folder"); assertEquals("camel@localhost", config.getFrom(), "from"); assertNull(config.getPassword(), "password"); assertFalse(config.isDelete()); assertFalse(config.isIgnoreUriScheme()); assertEquals(-1, config.getFetchSize(), "fetchSize"); assertFalse(config.isDebugMode()); }
@Override public <T> T clone(T object) { if (object instanceof String) { return object; } else if (object instanceof Collection) { Object firstElement = findFirstNonNullElement((Collection) object); if (firstElement != null && !(firstElement instanceof Serializable)) { JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), firstElement.getClass()); return objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type); } } else if (object instanceof Map) { Map.Entry firstEntry = this.findFirstNonNullEntry((Map) object); if (firstEntry != null) { Object key = firstEntry.getKey(); Object value = firstEntry.getValue(); if (!(key instanceof Serializable) || !(value instanceof Serializable)) { JavaType type = TypeFactory.defaultInstance().constructParametricType(object.getClass(), key.getClass(), value.getClass()); return (T) objectMapperWrapper.fromBytes(objectMapperWrapper.toBytes(object), type); } } } else if (object instanceof JsonNode) { return (T) ((JsonNode) object).deepCopy(); } if (object instanceof Serializable) { try { return (T) SerializationHelper.clone((Serializable) object); } catch (SerializationException e) { //it is possible that object itself implements java.io.Serializable, but underlying structure does not //in this case we switch to the other JSON marshaling strategy which doesn't use the Java serialization } } return jsonClone(object); }
@Test public void should_clone_serializable_complex_object_with_serializable_nested_object() { Map<String, List<SerializableObject>> map = new LinkedHashMap<>(); map.put("key1", Lists.newArrayList(new SerializableObject("name1"))); map.put("key2", Lists.newArrayList( new SerializableObject("name2"), new SerializableObject("name3") )); Object original = new SerializableComplexObject(map); Object cloned = serializer.clone(original); assertEquals(original, cloned); assertNotSame(original, cloned); }
@VisibleForTesting public SmsChannelDO validateSmsChannel(Long channelId) { SmsChannelDO channelDO = smsChannelService.getSmsChannel(channelId); if (channelDO == null) { throw exception(SMS_CHANNEL_NOT_EXISTS); } if (CommonStatusEnum.isDisable(channelDO.getStatus())) { throw exception(SMS_CHANNEL_DISABLE); } return channelDO; }
@Test public void testValidateSmsChannel_notExists() { // 准备参数 Long channelId = randomLongId(); // 调用,校验异常 assertServiceException(() -> smsTemplateService.validateSmsChannel(channelId), SMS_CHANNEL_NOT_EXISTS); }
@SuppressWarnings("argument") @VisibleForTesting ProducerRecord<byte[], byte[]> transformOutput(Row row) { row = castRow(row, row.getSchema(), schema); String topic = Iterables.getOnlyElement(getTopics()); byte[] key = null; byte[] payload; List<Header> headers = ImmutableList.of(); Long timestampMillis = null; if (schema.hasField(Schemas.MESSAGE_KEY_FIELD)) { key = row.getBytes(Schemas.MESSAGE_KEY_FIELD); } if (schema.hasField(Schemas.EVENT_TIMESTAMP_FIELD)) { ReadableDateTime time = row.getDateTime(Schemas.EVENT_TIMESTAMP_FIELD); if (time != null) { timestampMillis = time.getMillis(); } } if (schema.hasField(Schemas.HEADERS_FIELD)) { Collection<Row> headerRows = checkArgumentNotNull(row.getArray(Schemas.HEADERS_FIELD)); ImmutableList.Builder<Header> headersBuilder = ImmutableList.builder(); headerRows.forEach( entry -> { String headerKey = checkArgumentNotNull(entry.getString(Schemas.HEADERS_KEY_FIELD)); Collection<byte[]> values = checkArgumentNotNull(entry.getArray(Schemas.HEADERS_VALUES_FIELD)); values.forEach(value -> headersBuilder.add(new RecordHeader(headerKey, value))); }); headers = headersBuilder.build(); } if (payloadSerializer == null) { payload = row.getBytes(Schemas.PAYLOAD_FIELD); } else { payload = payloadSerializer.serialize(checkArgumentNotNull(row.getRow(Schemas.PAYLOAD_FIELD))); } return new ProducerRecord<>(topic, null, timestampMillis, key, payload, headers); }
@Test public void rowToRecordFailures() { Schema payloadSchema = Schema.builder().addStringField("def").build(); Schema schema = Schema.builder() .addRowField(Schemas.PAYLOAD_FIELD, payloadSchema) .addField(Schemas.HEADERS_FIELD, Schemas.HEADERS_FIELD_TYPE.withNullable(true)) .build(); NestedPayloadKafkaTable table = newTable(schema, Optional.of(serializer)); // badRow cannot be cast to schema Schema badRowSchema = Schema.builder().addStringField("xxx").build(); Row badRow = Row.withSchema(badRowSchema).attachValues(Row.withSchema(badRowSchema).attachValues("abc")); assertThrows(IllegalArgumentException.class, () -> table.transformOutput(badRow)); Row goodRow = Row.withSchema(schema) .withFieldValue( Schemas.PAYLOAD_FIELD, Row.withSchema(payloadSchema).withFieldValue("def", "abc").build()) .build(); doThrow(new IllegalArgumentException("")).when(serializer).serialize(any()); assertThrows(IllegalArgumentException.class, () -> table.transformOutput(goodRow)); }
@Override public void putAll(Long2LongMap from) { for (LongLongCursor cursor = from.cursor(); cursor.advance();) { put(cursor.key(), cursor.value()); } }
@Test public void testPutAll() throws Exception { int count = 100; Long2LongMap entries = new Long2LongMapHsa(count, memMgr); for (int i = 0; i < count; i++) { long key = newKey(); long value = newValue(); entries.put(key, value); } map.putAll(entries); assertEquals(count, map.size()); for (LongLongCursor cursor = map.cursor(); cursor.advance(); ) { assertEquals(map.get(cursor.key()), cursor.value()); } }
public Map<String, PartitionStatistics> getQuickStats(ConnectorSession session, SemiTransactionalHiveMetastore metastore, SchemaTableName table, MetastoreContext metastoreContext, List<String> partitionIds) { if (!isQuickStatsEnabled(session)) { return partitionIds.stream().collect(toMap(k -> k, v -> empty())); } CompletableFuture<PartitionStatistics>[] partitionQuickStatCompletableFutures = new CompletableFuture[partitionIds.size()]; for (int counter = 0; counter < partitionIds.size(); counter++) { String partitionId = partitionIds.get(counter); partitionQuickStatCompletableFutures[counter] = supplyAsync(() -> getQuickStats(session, metastore, table, metastoreContext, partitionId), backgroundFetchExecutor); } try { // Wait for all the partitions to get their quick stats // If this query is reading a partition for which we do not already have cached quick stats, // we will block the execution of the query until the stats are fetched for all such partitions, // or we time out waiting for the fetch allOf(partitionQuickStatCompletableFutures).get(getQuickStatsInlineBuildTimeoutMillis(session), MILLISECONDS); } catch (InterruptedException | ExecutionException e) { log.error(e); throw new RuntimeException(e); } catch (TimeoutException e) { log.warn(e, "Timeout while building quick stats"); } ImmutableMap.Builder<String, PartitionStatistics> result = ImmutableMap.builder(); for (int counter = 0; counter < partitionQuickStatCompletableFutures.length; counter++) { String partitionId = partitionIds.get(counter); CompletableFuture<PartitionStatistics> future = partitionQuickStatCompletableFutures[counter]; if (future.isDone() && !future.isCancelled() && !future.isCompletedExceptionally()) { try { result.put(partitionId, future.get()); } catch (InterruptedException | ExecutionException e) { // This should not happen because we checked that the future was completed successfully log.error(e, "Failed to get value for a quick stats future which was completed successfully"); throw new RuntimeException(e); } } else { // If a future did not finish, or finished exceptionally, we do not add it to the results // A new query for the same partition could trigger a successful quick stats fetch for this partition result.put(partitionId, empty()); } } return result.build(); }
@Test public void testReadThruCaching() { QuickStatsBuilder quickStatsBuilderMock = (session, metastore, table, metastoreContext, partitionId, files) -> mockPartitionQuickStats; QuickStatsProvider quickStatsProvider = new QuickStatsProvider(hdfsEnvironment, directoryListerMock, hiveClientConfig, new NamenodeStats(), ImmutableList.of(quickStatsBuilderMock)); // Execute ImmutableList<String> testPartitions1 = ImmutableList.of("partition1", "partition2", "partition3"); Map<String, PartitionStatistics> quickStats = quickStatsProvider.getQuickStats(SESSION, metastoreMock, new SchemaTableName(TEST_SCHEMA, TEST_TABLE), metastoreContext, testPartitions1); // Verify only one call was made for each test partition assertEquals(quickStats.entrySet().size(), testPartitions1.size()); assertTrue(quickStats.keySet().containsAll(testPartitions1)); quickStats.values().forEach(ps -> assertEquals(ps, expectedPartitionStats)); // For subsequent calls for the same partitions that are already cached, no new calls are mode to the quick stats builder quickStatsProvider.getQuickStats(SESSION, metastoreMock, new SchemaTableName(TEST_SCHEMA, TEST_TABLE), metastoreContext, testPartitions1); // For subsequent calls with a mix of old and new partitions, we only see calls to the quick stats builder for the new partitions ImmutableList<String> testPartitions2 = ImmutableList.of("partition4", "partition5", "partition6"); ImmutableList<String> testPartitionsMix = ImmutableList.<String>builder().addAll(testPartitions1).addAll(testPartitions2).build(); quickStats = quickStatsProvider.getQuickStats(SESSION, metastoreMock, new SchemaTableName(TEST_SCHEMA, TEST_TABLE), metastoreContext, testPartitionsMix); assertEquals(quickStats.entrySet().size(), testPartitionsMix.size()); assertTrue(quickStats.keySet().containsAll(testPartitionsMix)); quickStats.values().forEach(ps -> assertEquals(ps, expectedPartitionStats)); }
@Override public ValidationResult validate(RuleBuilderStep step) { final RuleFragment ruleFragment = actions.get(step.function()); FunctionDescriptor<?> functionDescriptor = ruleFragment.descriptor(); String functionName = functionDescriptor.name(); if (functionName.equals(SetField.NAME)) { return validateSetField(step.parameters()); } return new ValidationResult(false, ""); }
@Test void validateSetFieldFunctionFailsWithSpaces() { HashMap<String, Object> parameters = new HashMap<>(); parameters.put(FIELD_PARAM, WITH_SPACES); RuleBuilderStep invalidStep = RuleBuilderStep.builder() .parameters(parameters) .function(SetField.NAME) .build(); ValidationResult result = classUnderTest.validate(invalidStep); assertThat(result.failed()).isTrue(); }
@Override public int size() { return map.size(); }
@Test public void testSize() { ExtendedSet<TestValue> nonemptyset = new ExtendedSet<>(Maps.newConcurrentMap()); TestValue val = new TestValue("foo", 1); assertTrue(nonemptyset.add(val)); assertTrue(nonemptyset.contains(val)); assertEquals(1, nonemptyset.size()); TestValue secval = new TestValue("goo", 2); assertTrue(nonemptyset.add(secval)); assertTrue(nonemptyset.contains(secval)); assertEquals(2, nonemptyset.size()); ExtendedSet<TestValue> emptyset = new ExtendedSet<>(Maps.newConcurrentMap()); assertEquals(0, emptyset.size()); }
@SuppressWarnings("unchecked") public <T> T[] toArray(final T[] a) { final Class<?> componentType = a.getClass().getComponentType(); if (!componentType.isAssignableFrom(Integer.class)) { throw new ArrayStoreException("cannot store Integers in array of type " + componentType); } @DoNotSub final int size = size(); final T[] arrayCopy = a.length >= size ? a : (T[])Array.newInstance(componentType, size); copyValues(arrayCopy); return arrayCopy; }
@Test @SuppressWarnings("SuspiciousToArrayCall") void toArrayThrowsArrayStoreExceptionForWrongType() { assertThrows(ArrayStoreException.class, () -> testSet.toArray(new String[1])); }
boolean isCycle(String topic) { String source = replicationPolicy.topicSource(topic); if (source == null) { return false; } else if (source.equals(sourceAndTarget.target())) { return true; } else { String upstreamTopic = replicationPolicy.upstreamTopic(topic); if (upstreamTopic == null || upstreamTopic.equals(topic)) { // Extra check for IdentityReplicationPolicy and similar impls that don't prevent cycles. return false; } return isCycle(upstreamTopic); } }
@Test public void testIsCycleWithNullUpstreamTopic() { class CustomReplicationPolicy extends DefaultReplicationPolicy { @Override public String upstreamTopic(String topic) { return null; } } MirrorSourceConnector connector = new MirrorSourceConnector(new SourceAndTarget("source", "target"), new CustomReplicationPolicy(), new DefaultTopicFilter(), new DefaultConfigPropertyFilter()); assertDoesNotThrow(() -> connector.isCycle(".b")); }
@Deprecated @Override public <K, T> List<RequestInfo> scatterRequest(Request<T> request, RequestContext requestContext, Map<URI, Set<K>> mappedKeys) { return defaultScatterRequestImpl(request, requestContext, mappedKeys); }
@Test(dataProvider = "scatterBatchRequestProvider") public void testScatterRequest(Request<?> request, Request<?> firstRequest, URI firstHost, Request<?> secondRequest, URI secondHost) { RequestContext requestContext = new RequestContext(); List<RequestInfo> scatteredRequests = _sgStrategy.scatterRequest(request, requestContext, _mappingResult); Assert.assertNotNull(scatteredRequests); Assert.assertEquals(scatteredRequests.size(), 2); for (RequestInfo req : scatteredRequests) { RequestContext context = req.getRequestContext(); Assert.assertNotNull(context.getLocalAttr(TARGET_HOST_KEY_NAME)); if (context.getLocalAttr(TARGET_HOST_KEY_NAME).equals(firstHost)) { Assert.assertEquals(req.getRequest(), firstRequest); } else if (context.getLocalAttr(TARGET_HOST_KEY_NAME).equals(secondHost)) { Assert.assertEquals(req.getRequest(), secondRequest); } else { Assert.fail("Scattered request should have " + TARGET_HOST_KEY_NAME + " set in request context!"); } } }
@Override public void clear() { mainLock.runWithWriteLock(() -> { Collection<ThreadPoolPlugin> plugins = new ArrayList<>(registeredPlugins.values()); registeredPlugins.clear(); forQuickIndexes(QuickIndex::clear); plugins.forEach(ThreadPoolPlugin::stop); }); }
@Test public void testClear() { manager.register(new TestExecuteAwarePlugin()); manager.clear(); Assert.assertTrue(manager.getAllPlugins().isEmpty()); }
protected void disableWrite(FedBalanceContext fbcontext) throws IOException { // Save and cancel permission. FileStatus status = srcFs.getFileStatus(src); fPerm = status.getPermission(); acl = srcFs.getAclStatus(src); srcFs.setPermission(src, FsPermission.createImmutable((short) 0)); updateStage(Stage.FINAL_DISTCP); }
@Test public void testDisableWrite() throws Exception { String testRoot = nnUri + "/user/foo/testdir." + getMethodName(); DistributedFileSystem fs = (DistributedFileSystem) FileSystem.get(URI.create(nnUri), conf); createFiles(fs, testRoot, srcfiles); Path src = new Path(testRoot, SRCDAT); Path dst = new Path(testRoot, DSTDAT); FedBalanceContext context = buildContext(src, dst, MOUNT); DistCpProcedure dcProcedure = new DistCpProcedure("distcp-procedure", null, 1000, context); assertNotEquals(0, fs.getFileStatus(src).getPermission().toShort()); executeProcedure(dcProcedure, Stage.FINAL_DISTCP, () -> dcProcedure.disableWrite(context)); assertEquals(0, fs.getFileStatus(src).getPermission().toShort()); cleanup(fs, new Path(testRoot)); }
public static IntrinsicMapTaskExecutor withSharedCounterSet( List<Operation> operations, CounterSet counters, ExecutionStateTracker executionStateTracker) { return new IntrinsicMapTaskExecutor(operations, counters, executionStateTracker); }
@Test @SuppressWarnings("unchecked") public void testGetOutputCounters() throws Exception { List<Operation> operations = Arrays.asList( new Operation[] { createOperation("o1", 1), createOperation("o2", 2), createOperation("o3", 3) }); ExecutionStateTracker stateTracker = ExecutionStateTracker.newForTest(); try (IntrinsicMapTaskExecutor executor = IntrinsicMapTaskExecutor.withSharedCounterSet(operations, counterSet, stateTracker)) { CounterSet counterSet = executor.getOutputCounters(); CounterUpdateExtractor<?> updateExtractor = Mockito.mock(CounterUpdateExtractor.class); counterSet.extractUpdates(false, updateExtractor); verify(updateExtractor).longSum(eq(named("test-o1-ElementCount")), anyBoolean(), eq(1L)); verify(updateExtractor).longSum(eq(named("test-o2-ElementCount")), anyBoolean(), eq(2L)); verify(updateExtractor).longSum(eq(named("test-o3-ElementCount")), anyBoolean(), eq(3L)); verifyNoMoreInteractions(updateExtractor); } }
@Override public boolean shouldRescale( VertexParallelism currentParallelism, VertexParallelism newParallelism) { for (JobVertexID vertex : currentParallelism.getVertices()) { int parallelismChange = newParallelism.getParallelism(vertex) - currentParallelism.getParallelism(vertex); if (parallelismChange < 0 || parallelismChange >= minParallelismIncrease) { return true; } } return false; }
@Test void testAlwaysScaleDown() { final RescalingController rescalingController = new EnforceMinimalIncreaseRescalingController(2); assertThat(rescalingController.shouldRescale(forParallelism(2), forParallelism(1))) .isTrue(); }
public static IpAddress makeMaskPrefix(Version version, int prefixLength) { byte[] mask = makeMaskPrefixArray(version, prefixLength); return new IpAddress(version, mask); }
@Test(expected = IllegalArgumentException.class) public void testInvalidMakeNegativeMaskPrefixIPv6() { IpAddress ipAddress; ipAddress = IpAddress.makeMaskPrefix(IpAddress.Version.INET6, -1); }
private static String approximateSimpleName(Class<?> clazz, boolean dropOuterClassNames) { checkArgument(!clazz.isAnonymousClass(), "Attempted to get simple name of anonymous class"); return approximateSimpleName(clazz.getName(), dropOuterClassNames); }
@Test public void testApproximateSimpleNameCustomAnonymous() { Object overriddenName = new Object() {}; assertEquals("CUSTOM_NAME", NameUtils.approximateSimpleName(overriddenName, "CUSTOM_NAME")); }
protected boolean isClusterVersionLessOrEqual(Version version) { Version clusterVersion = getNodeEngine().getClusterService().getClusterVersion(); return clusterVersion.isLessOrEqual(version); }
@Test public void testClusterVersion_isLessOrEqual_currentVersion() { assertTrue(object.isClusterVersionLessOrEqual(CURRENT_CLUSTER_VERSION)); }
@Override public SchemaAndValue get(final ProcessingLogConfig config) { final Struct struct = new Struct(ProcessingLogMessageSchema.PROCESSING_LOG_SCHEMA) .put(ProcessingLogMessageSchema.TYPE, MessageType.DESERIALIZATION_ERROR.getTypeId()) .put(ProcessingLogMessageSchema.DESERIALIZATION_ERROR, deserializationError(config)); return new SchemaAndValue(ProcessingLogMessageSchema.PROCESSING_LOG_SCHEMA, struct); }
@Test public void shouldBuildDeserializationErrorWithNullRecordIfIncludeRowFalse() { // Given: final ProcessingLogConfig config = new ProcessingLogConfig( Collections.singletonMap(ProcessingLogConfig.INCLUDE_ROWS, false) ); final DeserializationError deserError = new DeserializationError( error, Optional.of(record), "topic", false ); // When: final SchemaAndValue msg = deserError.get(config); // Then: final Struct struct = (Struct) msg.value(); assertThat( struct.get(ProcessingLogMessageSchema.TYPE), equalTo(MessageType.DESERIALIZATION_ERROR.getTypeId())); final Struct deserializationError = struct.getStruct(DESERIALIZATION_ERROR); assertThat(deserializationError.get(DESERIALIZATION_ERROR_FIELD_RECORD_B64), nullValue()); }
public boolean isAllLogsToConsoleEnabled(Props props) { return props.valueAsBoolean(LOG_CONSOLE.getKey(), false); }
@Test public void log_to_console_setting_enabled() { Properties properties = new Properties(); properties.setProperty("sonar.log.console", "true"); assertThat(underTest.isAllLogsToConsoleEnabled(new Props(properties))).isTrue(); }
public static InetSocketAddress createInetSocketAddress(String hostname, int port, boolean resolve) { requireNonNull(hostname, "hostname"); InetSocketAddress inetAddressForIpString = createForIpString(hostname, port); if (inetAddressForIpString != null) { return inetAddressForIpString; } else { return resolve ? new InetSocketAddress(hostname, port) : InetSocketAddress.createUnresolved(hostname, port); } }
@Test void createInetSocketAddressBadValues() { assertThatExceptionOfType(NullPointerException.class) .isThrownBy(() -> AddressUtils.createInetSocketAddress(null, 0, true)) .withMessage("hostname"); assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> AddressUtils.createInetSocketAddress("hostname", -1, true)) .withMessage("port out of range:-1"); }
public static Long jqLong(String value, String expression) { return H2Functions.jq(value, expression, JsonNode::asLong); }
@Test public void jqLong() { Long jqString = H2Functions.jqLong("{\"a\": 9223372036854775807}", ".a"); assertThat(jqString, is(9223372036854775807L)); }
public AndDocIdIterator(BlockDocIdIterator[] docIdIterators) { _docIdIterators = docIdIterators; }
@Test public void testAndDocIdIterator() { // AND result: [2, 7, 13, 15, 16, 20] int[] docIds1 = new int[]{0, 1, 2, 3, 5, 7, 10, 12, 13, 15, 16, 18, 20}; int[] docIds2 = new int[]{1, 2, 4, 5, 6, 7, 9, 11, 12, 13, 15, 16, 17, 19, 20}; int[] docIds3 = new int[]{0, 2, 3, 4, 7, 8, 10, 11, 13, 15, 16, 19, 20}; MutableRoaringBitmap bitmap1 = new MutableRoaringBitmap(); bitmap1.add(docIds1); MutableRoaringBitmap bitmap2 = new MutableRoaringBitmap(); bitmap2.add(docIds2); MutableRoaringBitmap bitmap3 = new MutableRoaringBitmap(); bitmap3.add(docIds3); AndDocIdIterator andDocIdIterator = new AndDocIdIterator(new BlockDocIdIterator[]{ new RangelessBitmapDocIdIterator(bitmap1), new RangelessBitmapDocIdIterator(bitmap2), new RangelessBitmapDocIdIterator(bitmap3) }); assertEquals(andDocIdIterator.next(), 2); assertEquals(andDocIdIterator.next(), 7); assertEquals(andDocIdIterator.advance(10), 13); assertEquals(andDocIdIterator.advance(16), 16); assertEquals(andDocIdIterator.next(), 20); assertEquals(andDocIdIterator.next(), Constants.EOF); }
static MessageId findFirstLedgerWithinThreshold(List<PersistentTopicInternalStats.LedgerInfo> ledgers, long sizeThreshold) { long suffixSize = 0L; ledgers = Lists.reverse(ledgers); long previousLedger = ledgers.get(0).ledgerId; for (PersistentTopicInternalStats.LedgerInfo l : ledgers) { suffixSize += l.size; if (suffixSize > sizeThreshold) { return new MessageIdImpl(previousLedger, 0L, -1); } previousLedger = l.ledgerId; } return null; }
@Test public void testFindFirstLedgerWithinThreshold() throws Exception { List<LedgerInfo> ledgers = new ArrayList<>(); ledgers.add(newLedger(0, 10, 1000)); ledgers.add(newLedger(1, 10, 2000)); ledgers.add(newLedger(2, 10, 3000)); // test huge threshold Assert.assertNull(CmdTopics.findFirstLedgerWithinThreshold(ledgers, Long.MAX_VALUE)); // test small threshold Assert.assertEquals(CmdTopics.findFirstLedgerWithinThreshold(ledgers, 0), new MessageIdImpl(2, 0, -1)); // test middling thresholds Assert.assertEquals(CmdTopics.findFirstLedgerWithinThreshold(ledgers, 1000), new MessageIdImpl(2, 0, -1)); Assert.assertEquals(CmdTopics.findFirstLedgerWithinThreshold(ledgers, 5000), new MessageIdImpl(1, 0, -1)); }
@Override public Result invoke(Invocation invocation) throws RpcException { Result result; String value = getUrl().getMethodParameter( RpcUtils.getMethodName(invocation), MOCK_KEY, Boolean.FALSE.toString()) .trim(); if (ConfigUtils.isEmpty(value)) { // no mock result = this.invoker.invoke(invocation); } else if (value.startsWith(FORCE_KEY)) { if (logger.isWarnEnabled()) { logger.warn( CLUSTER_FAILED_MOCK_REQUEST, "force mock", "", "force-mock: " + RpcUtils.getMethodName(invocation) + " force-mock enabled , url : " + getUrl()); } // force:direct mock result = doMockInvoke(invocation, null); } else { // fail-mock try { result = this.invoker.invoke(invocation); // fix:#4585 if (result.getException() != null && result.getException() instanceof RpcException) { RpcException rpcException = (RpcException) result.getException(); if (rpcException.isBiz()) { throw rpcException; } else { result = doMockInvoke(invocation, rpcException); } } } catch (RpcException e) { if (e.isBiz()) { throw e; } if (logger.isWarnEnabled()) { logger.warn( CLUSTER_FAILED_MOCK_REQUEST, "failed to mock invoke", "", "fail-mock: " + RpcUtils.getMethodName(invocation) + " fail-mock enabled , url : " + getUrl(), e); } result = doMockInvoke(invocation, e); } } return result; }
@Test void testMockInvokerFromOverride_Invoke_check_ListPojoAsync() throws ExecutionException, InterruptedException { URL url = URL.valueOf("remote://1.2.3.4/" + IHelloService.class.getName()) .addParameter( REFER_KEY, URL.encode(PATH_KEY + "=" + IHelloService.class.getName() + "&" + "getUsersAsync.mock=force")) .addParameter("invoke_return_error", "true"); Invoker<IHelloService> cluster = getClusterInvoker(url); // Configured with mock RpcInvocation invocation = new RpcInvocation(); invocation.setMethodName("getUsersAsync"); invocation.setReturnType(CompletableFuture.class); Result ret = cluster.invoke(invocation); CompletableFuture<List<User>> cf = null; try { cf = (CompletableFuture<List<User>>) ret.recreate(); } catch (Throwable e) { e.printStackTrace(); } Assertions.assertEquals(2, cf.get().size()); Assertions.assertEquals("Tommock", cf.get().get(0).getName()); }
@Override public Result apply(ApplyNode applyNode, Captures captures, Context context) { return Result.ofPlanNode(applyNode.getInput()); }
@Test public void testDoesNotFire() { tester().assertThat(new RemoveUnreferencedScalarApplyNodes()) .on(p -> p.apply( assignment(p.variable("z"), new InSubqueryExpression(Optional.empty(), p.variable("x"), p.variable("y"))), ImmutableList.of(), p.values(p.variable("x")), p.values(p.variable("y")))) .doesNotFire(); }
@Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof NiciraEncapEthSrc) { NiciraEncapEthSrc that = (NiciraEncapEthSrc) obj; return Objects.equals(encapEthSrc, that.encapEthSrc); } return false; }
@Test public void testEquals() { final NiciraEncapEthDst encapEthDst1 = new NiciraEncapEthDst(mac1); final NiciraEncapEthDst sameAsEncapEthDst1 = new NiciraEncapEthDst(mac1); final NiciraEncapEthDst encapEthDst2 = new NiciraEncapEthDst(mac2); new EqualsTester().addEqualityGroup(encapEthDst1, sameAsEncapEthDst1).addEqualityGroup(encapEthDst2) .testEquals(); }
public String buildRealData(final ConditionData condition, final ServerWebExchange exchange) { return ParameterDataFactory.builderData(condition.getParamType(), condition.getParamName(), exchange); }
@Test public void testBuildRealDataQueryBranch() { conditionData.setParamType(ParamTypeEnum.QUERY.getName()); assertEquals("shenyuQueryParam", abstractMatchStrategy.buildRealData(conditionData, exchange)); }
public static List<AclEntry> replaceAclEntries(List<AclEntry> existingAcl, List<AclEntry> inAclSpec) throws AclException { ValidatedAclSpec aclSpec = new ValidatedAclSpec(inAclSpec); ArrayList<AclEntry> aclBuilder = Lists.newArrayListWithCapacity(MAX_ENTRIES); // Replacement is done separately for each scope: access and default. EnumMap<AclEntryScope, AclEntry> providedMask = Maps.newEnumMap(AclEntryScope.class); EnumSet<AclEntryScope> maskDirty = EnumSet.noneOf(AclEntryScope.class); EnumSet<AclEntryScope> scopeDirty = EnumSet.noneOf(AclEntryScope.class); for (AclEntry aclSpecEntry: aclSpec) { scopeDirty.add(aclSpecEntry.getScope()); if (aclSpecEntry.getType() == MASK) { providedMask.put(aclSpecEntry.getScope(), aclSpecEntry); maskDirty.add(aclSpecEntry.getScope()); } else { aclBuilder.add(aclSpecEntry); } } // Copy existing entries if the scope was not replaced. for (AclEntry existingEntry: existingAcl) { if (!scopeDirty.contains(existingEntry.getScope())) { if (existingEntry.getType() == MASK) { providedMask.put(existingEntry.getScope(), existingEntry); } else { aclBuilder.add(existingEntry); } } } copyDefaultsIfNeeded(aclBuilder); calculateMasks(aclBuilder, providedMask, maskDirty, scopeDirty); return buildAndValidateAcl(aclBuilder); }
@Test public void testReplaceAclEntriesOnlyDefaults() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(DEFAULT, USER, "bruce", READ)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); assertEquals(expected, replaceAclEntries(existing, aclSpec)); }
public static <InputT, OutputT> PTransformRunnerFactory<?> forWindowedValueMapFnFactory( WindowedValueMapFnFactory<InputT, OutputT> fnFactory) { return new Factory<>(new ExplodedWindowedValueMapperFactory<>(fnFactory)); }
@Test public void testFullWindowedValueMapping() throws Exception { PTransformRunnerFactoryTestContext context = PTransformRunnerFactoryTestContext.builder(EXPECTED_ID, EXPECTED_PTRANSFORM) .processBundleInstructionId("57") .pCollections(Collections.singletonMap("inputPC", INPUT_PCOLLECTION)) .coders(Collections.singletonMap("coder-id", valueCoder)) .build(); List<WindowedValue<?>> outputConsumer = new ArrayList<>(); context.addPCollectionConsumer("outputPC", outputConsumer::add); MapFnRunners.forWindowedValueMapFnFactory(this::createMapFunctionForPTransform) .createRunnerForPTransform(context); assertThat(context.getStartBundleFunctions(), empty()); assertThat(context.getFinishBundleFunctions(), empty()); assertThat(context.getTearDownFunctions(), empty()); assertThat( context.getPCollectionConsumers().keySet(), containsInAnyOrder("inputPC", "outputPC")); context.getPCollectionConsumer("inputPC").accept(valueInGlobalWindow("abc")); assertThat(outputConsumer, contains(valueInGlobalWindow("ABC"))); }
public CoercedExpressionResult coerce() { final Class<?> leftClass = left.getRawClass(); final Class<?> nonPrimitiveLeftClass = toNonPrimitiveType(leftClass); final Class<?> rightClass = right.getRawClass(); final Class<?> nonPrimitiveRightClass = toNonPrimitiveType(rightClass); boolean sameClass = leftClass == rightClass; boolean isUnificationExpression = left instanceof UnificationTypedExpression || right instanceof UnificationTypedExpression; if (sameClass || isUnificationExpression) { return new CoercedExpressionResult(left, right); } if (!canCoerce()) { throw new CoercedExpressionException(new InvalidExpressionErrorResult("Comparison operation requires compatible types. Found " + leftClass + " and " + rightClass)); } if ((nonPrimitiveLeftClass == Integer.class || nonPrimitiveLeftClass == Long.class) && nonPrimitiveRightClass == Double.class) { CastExpr castExpression = new CastExpr(PrimitiveType.doubleType(), this.left.getExpression()); return new CoercedExpressionResult( new TypedExpression(castExpression, double.class, left.getType()), right, false); } final boolean leftIsPrimitive = leftClass.isPrimitive() || Number.class.isAssignableFrom( leftClass ); final boolean canCoerceLiteralNumberExpr = canCoerceLiteralNumberExpr(leftClass); boolean rightAsStaticField = false; final Expression rightExpression = right.getExpression(); final TypedExpression coercedRight; if (leftIsPrimitive && canCoerceLiteralNumberExpr && rightExpression instanceof LiteralStringValueExpr) { final Expression coercedLiteralNumberExprToType = coerceLiteralNumberExprToType((LiteralStringValueExpr) right.getExpression(), leftClass); coercedRight = right.cloneWithNewExpression(coercedLiteralNumberExprToType); coercedRight.setType( leftClass ); } else if (shouldCoerceBToString(left, right)) { coercedRight = coerceToString(right); } else if (isNotBinaryExpression(right) && canBeNarrowed(leftClass, rightClass) && right.isNumberLiteral()) { coercedRight = castToClass(leftClass); } else if (leftClass == long.class && rightClass == int.class) { coercedRight = right.cloneWithNewExpression(new CastExpr(PrimitiveType.longType(), right.getExpression())); } else if (leftClass == Date.class && rightClass == String.class) { coercedRight = coerceToDate(right); rightAsStaticField = true; } else if (leftClass == LocalDate.class && rightClass == String.class) { coercedRight = coerceToLocalDate(right); rightAsStaticField = true; } else if (leftClass == LocalDateTime.class && rightClass == String.class) { coercedRight = coerceToLocalDateTime(right); rightAsStaticField = true; } else if (shouldCoerceBToMap()) { coercedRight = castToClass(toNonPrimitiveType(leftClass)); } else if (isBoolean(leftClass) && !isBoolean(rightClass)) { coercedRight = coerceBoolean(right); } else { coercedRight = right; } final TypedExpression coercedLeft; if (nonPrimitiveLeftClass == Character.class && shouldCoerceBToString(right, left)) { coercedLeft = coerceToString(left); } else { coercedLeft = left; } return new CoercedExpressionResult(coercedLeft, coercedRight, rightAsStaticField); }
@Test public void charToStringOnLeft() { final TypedExpression left = expr(THIS_PLACEHOLDER + ".getCharPrimitive()", char.class); final TypedExpression right = expr("$c1", java.lang.String.class); final CoercedExpression.CoercedExpressionResult coerce = new CoercedExpression(left, right, false).coerce(); assertThat(coerce.getCoercedLeft()).isEqualTo(expr("String.valueOf(_this.getCharPrimitive())", String.class)); }
@Override public void appendDataInfluence(String entityName, String entityId, String fieldName, String fieldCurrentValue) { // might be if (traceContext.tracer() == null) { return; } if (traceContext.tracer().getActiveSpan() == null) { return; } String spanId = traceContext.tracer().getActiveSpan().spanId(); OpType type = traceContext.tracer().getActiveSpan().getOpType(); ApolloAuditLogDataInfluence.Builder builder = ApolloAuditLogDataInfluence.builder().spanId(spanId) .entityName(entityName).entityId(entityId).fieldName(fieldName); if (type == null) { return; } switch (type) { case CREATE: case UPDATE: builder.newVal(fieldCurrentValue); break; case DELETE: builder.oldVal(fieldCurrentValue); } dataInfluenceService.save(builder.build()); }
@Test public void testAppendDataInfluenceCaseActiveSpanIsNull() { Mockito.when(tracer.getActiveSpan()).thenReturn(null); api.appendDataInfluence(entityName, entityId, fieldName, fieldCurrentValue); Mockito.verify(traceContext, Mockito.times(2)).tracer(); Mockito.verify(tracer, Mockito.times(1)).getActiveSpan(); }
public void checkKeyRotation() { if (!isEncrypted()) { return; } long keyValidSec = Config.key_rotation_days * 24 * 3600; keysLock.writeLock().lock(); try { if (idToKey.isEmpty()) { return; } EncryptionKey lastKEK = idToKey.lastEntry().getValue(); Preconditions.checkState(lastKEK.isKEK(), "should be KEK:" + lastKEK); long now = System.currentTimeMillis() / 1000; if (lastKEK.createTime + keyValidSec <= now) { generateNewKEK(); } if (MetricRepo.hasInit) { MetricRepo.GAUGE_ENCRYPTION_KEY_NUM.setValue((long) idToKey.size()); } } finally { keysLock.writeLock().unlock(); } }
@Test public void testCheckKeyRotation() { String oldConfig = Config.default_master_key; try { Config.default_master_key = "plain:aes_128:enwSdCUAiCLLx2Bs9E/neQ=="; KeyMgr keyMgr = new KeyMgr(); EncryptionKeyPB pb = new EncryptionKeyPB(); pb.id = 1L; pb.algorithm = EncryptionAlgorithmPB.AES_128; pb.encryptedKey = new byte[16]; pb.type = EncryptionKeyTypePB.NORMAL_KEY; pb.createTime = 1L; keyMgr.replayAddKey(pb); Assert.assertEquals(1, keyMgr.numKeys()); EncryptionKey root = keyMgr.getKeyById(1); byte[] plainKey = new byte[16]; plainKey[0] = 1; plainKey[8] = 1; ((NormalKey) root).setPlainKey(plainKey); EncryptionKey kek = root.generateKey(); kek.id = 2; EncryptionKeyPB pb2 = new EncryptionKeyPB(); kek.toPB(pb2, keyMgr); // set time to 1 so rotation do happen pb2.createTime = 1L; keyMgr.replayAddKey(pb2); Assert.assertEquals(2, keyMgr.numKeys()); keyMgr.checkKeyRotation(); Assert.assertEquals(3, keyMgr.numKeys()); } finally { Config.default_master_key = oldConfig; } }
public <T> HttpResponse<T> httpRequest(String url, String method, HttpHeaders headers, Object requestBodyData, TypeReference<T> responseFormat) { return httpRequest(url, method, headers, requestBodyData, responseFormat, null, null); }
@Test public void testSuccess() throws Exception { int statusCode = Response.Status.OK.getStatusCode(); Request req = mock(Request.class); ContentResponse resp = mock(ContentResponse.class); when(resp.getContentAsString()).thenReturn(toJsonString(TEST_DTO)); setupHttpClient(statusCode, req, resp); RestClient.HttpResponse<TestDTO> httpResp = httpRequest( httpClient, MOCK_URL, TEST_METHOD, TEST_TYPE, TEST_SIGNATURE_ALGORITHM ); assertEquals(statusCode, httpResp.status()); assertEquals(TEST_DTO, httpResp.body()); }
public boolean isJsonValidationBool() { return getPropertyAsBoolean(JSONVALIDATION); }
@Test void testIsJsonValidationBool() { JSONPathAssertion instance = new JSONPathAssertion(); boolean result = instance.isJsonValidationBool(); assertFalse(result); }
public static void instantiateFlinkMemoryMetricGroup( MetricGroup parentMetricGroup, TaskSlotTable<?> taskSlotTable, Supplier<Long> managedMemoryTotalSupplier) { checkNotNull(parentMetricGroup); checkNotNull(taskSlotTable); checkNotNull(managedMemoryTotalSupplier); MetricGroup flinkMemoryMetricGroup = parentMetricGroup.addGroup(METRIC_GROUP_FLINK).addGroup(METRIC_GROUP_MEMORY); instantiateManagedMemoryMetrics( flinkMemoryMetricGroup, taskSlotTable, managedMemoryTotalSupplier); }
@Test void testManagedMemoryMetricsInitialization() throws MemoryAllocationException, FlinkException { final int maxMemorySize = 16284; final int numberOfAllocatedPages = 2; final int pageSize = 4096; final Object owner = new Object(); final MemoryManager memoryManager = MemoryManager.create(maxMemorySize, pageSize); memoryManager.allocatePages(owner, numberOfAllocatedPages); final TaskManagerServices taskManagerServices = new TaskManagerServicesBuilder() .setTaskSlotTable( new TestingTaskSlotTable.TestingTaskSlotTableBuilder<Task>() .memoryManagerGetterReturns(memoryManager) .allActiveSlotAllocationIds( () -> Sets.newHashSet(new AllocationID())) .build()) .setManagedMemorySize(maxMemorySize) .build(); try { List<String> actualSubGroupPath = new ArrayList<>(); final InterceptingOperatorMetricGroup metricGroup = new InterceptingOperatorMetricGroup() { @Override public MetricGroup addGroup(String name) { actualSubGroupPath.add(name); return this; } }; MetricUtils.instantiateFlinkMemoryMetricGroup( metricGroup, taskManagerServices.getTaskSlotTable(), taskManagerServices::getManagedMemorySize); Gauge<Number> usedMetric = (Gauge<Number>) metricGroup.get("Used"); Gauge<Number> maxMetric = (Gauge<Number>) metricGroup.get("Total"); assertThat(usedMetric.getValue().intValue()) .isEqualTo(numberOfAllocatedPages * pageSize); assertThat(maxMetric.getValue().intValue()).isEqualTo(maxMemorySize); assertThat(actualSubGroupPath) .containsAnyElementsOf( Arrays.asList( METRIC_GROUP_FLINK, METRIC_GROUP_MEMORY, METRIC_GROUP_MANAGED_MEMORY)); } finally { taskManagerServices.shutDown(); } }
public void setContract(@Nullable Produce contract) { this.contract = contract; setStoredContract(contract); handleContractState(); }
@Test public void redberriesContractRedberriesHarvestable() { final long unixNow = Instant.now().getEpochSecond(); // Get the bush patch final FarmingPatch patch = farmingGuildPatches.get(Varbits.FARMING_4772); assertNotNull(patch); // For berries, Harvestable means already checked when(farmingTracker.predictPatch(patch)) .thenReturn(new PatchPrediction(Produce.REDBERRIES, CropState.HARVESTABLE, unixNow, 3, 3)); farmingContractManager.setContract(Produce.REDBERRIES); assertEquals(SummaryState.OCCUPIED, farmingContractManager.getSummary()); }
@SuppressWarnings("unchecked") @Override public RegisterNodeManagerResponse registerNodeManager( RegisterNodeManagerRequest request) throws YarnException, IOException { NodeId nodeId = request.getNodeId(); String host = nodeId.getHost(); int cmPort = nodeId.getPort(); int httpPort = request.getHttpPort(); Resource capability = request.getResource(); String nodeManagerVersion = request.getNMVersion(); Resource physicalResource = request.getPhysicalResource(); NodeStatus nodeStatus = request.getNodeStatus(); RegisterNodeManagerResponse response = recordFactory .newRecordInstance(RegisterNodeManagerResponse.class); if (!minimumNodeManagerVersion.equals("NONE")) { if (minimumNodeManagerVersion.equals("EqualToRM")) { minimumNodeManagerVersion = YarnVersionInfo.getVersion(); } if ((nodeManagerVersion == null) || (VersionUtil.compareVersions(nodeManagerVersion,minimumNodeManagerVersion)) < 0) { String message = "Disallowed NodeManager Version " + nodeManagerVersion + ", is less than the minimum version " + minimumNodeManagerVersion + " sending SHUTDOWN signal to " + "NodeManager."; LOG.info(message); response.setDiagnosticsMessage(message); response.setNodeAction(NodeAction.SHUTDOWN); return response; } } if (checkIpHostnameInRegistration) { InetSocketAddress nmAddress = NetUtils.createSocketAddrForHost(host, cmPort); InetAddress inetAddress = Server.getRemoteIp(); if (inetAddress != null && nmAddress.isUnresolved()) { // Reject registration of unresolved nm to prevent resourcemanager // getting stuck at allocations. final String message = "hostname cannot be resolved (ip=" + inetAddress.getHostAddress() + ", hostname=" + host + ")"; LOG.warn("Unresolved nodemanager registration: " + message); response.setDiagnosticsMessage(message); response.setNodeAction(NodeAction.SHUTDOWN); return response; } } // Check if this node is a 'valid' node if (!this.nodesListManager.isValidNode(host) && !isNodeInDecommissioning(nodeId)) { String message = "Disallowed NodeManager from " + host + ", Sending SHUTDOWN signal to the NodeManager."; LOG.info(message); response.setDiagnosticsMessage(message); response.setNodeAction(NodeAction.SHUTDOWN); return response; } // check if node's capacity is load from dynamic-resources.xml String nid = nodeId.toString(); Resource dynamicLoadCapability = loadNodeResourceFromDRConfiguration(nid); if (dynamicLoadCapability != null) { LOG.debug("Resource for node: {} is adjusted from: {} to: {} due to" + " settings in dynamic-resources.xml.", nid, capability, dynamicLoadCapability); capability = dynamicLoadCapability; // sync back with new resource. response.setResource(capability); } // Check if this node has minimum allocations if (capability.getMemorySize() < minAllocMb || capability.getVirtualCores() < minAllocVcores) { String message = "NodeManager from " + host + " doesn't satisfy minimum allocations, Sending SHUTDOWN" + " signal to the NodeManager. Node capabilities are " + capability + "; minimums are " + minAllocMb + "mb and " + minAllocVcores + " vcores"; LOG.info(message); response.setDiagnosticsMessage(message); response.setNodeAction(NodeAction.SHUTDOWN); return response; } response.setContainerTokenMasterKey(containerTokenSecretManager .getCurrentKey()); response.setNMTokenMasterKey(nmTokenSecretManager .getCurrentKey()); RMNode rmNode = new RMNodeImpl(nodeId, rmContext, host, cmPort, httpPort, resolve(host), capability, nodeManagerVersion, physicalResource); RMNode oldNode = this.rmContext.getRMNodes().putIfAbsent(nodeId, rmNode); if (oldNode == null) { RMNodeStartedEvent startEvent = new RMNodeStartedEvent(nodeId, request.getNMContainerStatuses(), request.getRunningApplications(), nodeStatus); if (request.getLogAggregationReportsForApps() != null && !request.getLogAggregationReportsForApps().isEmpty()) { if (LOG.isDebugEnabled()) { LOG.debug("Found the number of previous cached log aggregation " + "status from nodemanager:" + nodeId + " is :" + request.getLogAggregationReportsForApps().size()); } startEvent.setLogAggregationReportsForApps(request .getLogAggregationReportsForApps()); } this.rmContext.getDispatcher().getEventHandler().handle( startEvent); } else { LOG.info("Reconnect from the node at: " + host); this.nmLivelinessMonitor.unregister(nodeId); if (CollectionUtils.isEmpty(request.getRunningApplications()) && rmNode.getState() != NodeState.DECOMMISSIONING && rmNode.getHttpPort() != oldNode.getHttpPort()) { // Reconnected node differs, so replace old node and start new node switch (rmNode.getState()) { case RUNNING: ClusterMetrics.getMetrics().decrNumActiveNodes(); break; case UNHEALTHY: ClusterMetrics.getMetrics().decrNumUnhealthyNMs(); break; default: LOG.debug("Unexpected Rmnode state"); } this.rmContext.getDispatcher().getEventHandler() .handle(new NodeRemovedSchedulerEvent(rmNode)); this.rmContext.getRMNodes().put(nodeId, rmNode); this.rmContext.getDispatcher().getEventHandler() .handle(new RMNodeStartedEvent(nodeId, null, null, nodeStatus)); } else { // Reset heartbeat ID since node just restarted. oldNode.resetLastNodeHeartBeatResponse(); this.rmContext.getDispatcher().getEventHandler() .handle(new RMNodeReconnectEvent(nodeId, rmNode, request.getRunningApplications(), request.getNMContainerStatuses())); } } // On every node manager register we will be clearing NMToken keys if // present for any running application. this.nmTokenSecretManager.removeNodeKey(nodeId); this.nmLivelinessMonitor.register(nodeId); // Handle received container status, this should be processed after new // RMNode inserted if (!rmContext.isWorkPreservingRecoveryEnabled()) { if (!request.getNMContainerStatuses().isEmpty()) { LOG.info("received container statuses on node manager register :" + request.getNMContainerStatuses()); for (NMContainerStatus status : request.getNMContainerStatuses()) { handleNMContainerStatus(status, nodeId); } } } // Update node's labels to RM's NodeLabelManager. Set<String> nodeLabels = NodeLabelsUtils.convertToStringSet( request.getNodeLabels()); if (isDistributedNodeLabelsConf && nodeLabels != null) { try { updateNodeLabelsFromNMReport(nodeLabels, nodeId); response.setAreNodeLabelsAcceptedByRM(true); } catch (IOException ex) { // Ensure the exception is captured in the response response.setDiagnosticsMessage(ex.getMessage()); response.setAreNodeLabelsAcceptedByRM(false); } } else if (isDelegatedCentralizedNodeLabelsConf) { this.rmContext.getRMDelegatedNodeLabelsUpdater().updateNodeLabels(nodeId); } // Update node's attributes to RM's NodeAttributesManager. if (request.getNodeAttributes() != null) { try { // update node attributes if necessary then update heartbeat response updateNodeAttributesIfNecessary(nodeId, request.getNodeAttributes()); response.setAreNodeAttributesAcceptedByRM(true); } catch (IOException ex) { //ensure the error message is captured and sent across in response String errorMsg = response.getDiagnosticsMessage() == null ? ex.getMessage() : response.getDiagnosticsMessage() + "\n" + ex.getMessage(); response.setDiagnosticsMessage(errorMsg); response.setAreNodeAttributesAcceptedByRM(false); } } StringBuilder message = new StringBuilder(); message.append("NodeManager from node ").append(host).append("(cmPort: ") .append(cmPort).append(" httpPort: "); message.append(httpPort).append(") ") .append("registered with capability: ").append(capability); message.append(", assigned nodeId ").append(nodeId); if (response.getAreNodeLabelsAcceptedByRM()) { message.append(", node labels { ").append( StringUtils.join(",", nodeLabels) + " } "); } if (response.getAreNodeAttributesAcceptedByRM()) { message.append(", node attributes { ") .append(request.getNodeAttributes() + " } "); } LOG.info(message.toString()); response.setNodeAction(NodeAction.NORMAL); response.setRMIdentifier(ResourceManager.getClusterTimeStamp()); response.setRMVersion(YarnVersionInfo.getVersion()); return response; }
@Test public void testNodeRegistrationVersionLessThanRM() throws Exception { writeToHostsFile("host2"); Configuration conf = new Configuration(); conf.set(YarnConfiguration.RM_NODES_INCLUDE_FILE_PATH, hostFile .getAbsolutePath()); conf.set(YarnConfiguration.RM_NODEMANAGER_MINIMUM_VERSION,"EqualToRM" ); rm = new MockRM(conf); rm.start(); String nmVersion = "1.9.9"; ResourceTrackerService resourceTrackerService = rm.getResourceTrackerService(); RegisterNodeManagerRequest req = Records.newRecord( RegisterNodeManagerRequest.class); NodeId nodeId = NodeId.newInstance("host2", 1234); Resource capability = Resources.createResource(1024); req.setResource(capability); req.setNodeId(nodeId); req.setHttpPort(1234); req.setNMVersion(nmVersion); // trying to register a invalid node. RegisterNodeManagerResponse response = resourceTrackerService.registerNodeManager(req); Assert.assertEquals(NodeAction.SHUTDOWN,response.getNodeAction()); Assert.assertTrue("Diagnostic message did not contain: 'Disallowed NodeManager " + "Version "+ nmVersion + ", is less than the minimum version'", response.getDiagnosticsMessage().contains("Disallowed NodeManager Version " + nmVersion + ", is less than the minimum version ")); }
public HashMap<Long, TStorageMedium> getPartitionIdToStorageMediumMap() { HashMap<Long, TStorageMedium> storageMediumMap = new HashMap<>(); // record partition which need to change storage medium // dbId -> (tableId -> partitionId) HashMap<Long, Multimap<Long, Long>> changedPartitionsMap = new HashMap<>(); long currentTimeMs = System.currentTimeMillis(); List<Long> dbIds = getDbIds(); for (long dbId : dbIds) { Database db = getDb(dbId); if (db == null) { LOG.warn("db {} does not exist while doing backend report", dbId); continue; } Locker locker = new Locker(); locker.lockDatabase(db, LockType.READ); try { for (Table table : db.getTables()) { if (!table.isOlapTableOrMaterializedView()) { continue; } long tableId = table.getId(); OlapTable olapTable = (OlapTable) table; PartitionInfo partitionInfo = olapTable.getPartitionInfo(); for (Partition partition : olapTable.getAllPartitions()) { long partitionId = partition.getId(); DataProperty dataProperty = partitionInfo.getDataProperty(partition.getId()); Preconditions.checkNotNull(dataProperty, partition.getName() + ", pId:" + partitionId + ", db: " + dbId + ", tbl: " + tableId); // only normal state table can migrate. // PRIMARY_KEYS table does not support local migration. if (dataProperty.getStorageMedium() == TStorageMedium.SSD && dataProperty.getCooldownTimeMs() < currentTimeMs && olapTable.getState() == OlapTable.OlapTableState.NORMAL) { // expire. change to HDD. // record and change when holding write lock Multimap<Long, Long> multimap = changedPartitionsMap.get(dbId); if (multimap == null) { multimap = HashMultimap.create(); changedPartitionsMap.put(dbId, multimap); } multimap.put(tableId, partitionId); } else { storageMediumMap.put(partitionId, dataProperty.getStorageMedium()); } } // end for partitions } // end for tables } finally { locker.unLockDatabase(db, LockType.READ); } } // end for dbs // handle data property changed for (Long dbId : changedPartitionsMap.keySet()) { Database db = getDb(dbId); if (db == null) { LOG.warn("db {} does not exist while checking backend storage medium", dbId); continue; } Multimap<Long, Long> tableIdToPartitionIds = changedPartitionsMap.get(dbId); // use try lock to avoid blocking a long time. // if block too long, backend report rpc will timeout. Locker locker = new Locker(); if (!locker.tryLockDatabase(db, LockType.WRITE, Database.TRY_LOCK_TIMEOUT_MS, TimeUnit.MILLISECONDS)) { LOG.warn("try get db {}-{} write lock but failed when checking backend storage medium", db.getFullName(), dbId); continue; } Preconditions.checkState(locker.isDbWriteLockHeldByCurrentThread(db)); try { for (Long tableId : tableIdToPartitionIds.keySet()) { Table table = db.getTable(tableId); if (table == null) { continue; } OlapTable olapTable = (OlapTable) table; PartitionInfo partitionInfo = olapTable.getPartitionInfo(); Collection<Long> partitionIds = tableIdToPartitionIds.get(tableId); for (Long partitionId : partitionIds) { Partition partition = olapTable.getPartition(partitionId); if (partition == null) { continue; } DataProperty dataProperty = partitionInfo.getDataProperty(partition.getId()); if (dataProperty.getStorageMedium() == TStorageMedium.SSD && dataProperty.getCooldownTimeMs() < currentTimeMs) { // expire. change to HDD. DataProperty hdd = new DataProperty(TStorageMedium.HDD); partitionInfo.setDataProperty(partition.getId(), hdd); storageMediumMap.put(partitionId, TStorageMedium.HDD); LOG.debug("partition[{}-{}-{}] storage medium changed from SSD to HDD", dbId, tableId, partitionId); // log ModifyPartitionInfo info = new ModifyPartitionInfo(db.getId(), olapTable.getId(), partition.getId(), hdd, (short) -1, partitionInfo.getIsInMemory(partition.getId())); GlobalStateMgr.getCurrentState().getEditLog().logModifyPartition(info); } } // end for partitions } // end for tables } finally { locker.unLockDatabase(db, LockType.WRITE); } } // end for dbs return storageMediumMap; }
@Test public void testGetPartitionIdToStorageMediumMap() throws Exception { starRocksAssert.withMaterializedView( "CREATE MATERIALIZED VIEW test.mv1\n" + "distributed by hash(k1) buckets 3\n" + "refresh async\n" + "properties(\n" + "'replication_num' = '1'\n" + ")\n" + "as\n" + "select k1,k2 from test.t1;"); Database db = GlobalStateMgr.getCurrentState().getDb("test"); new MockUp<PartitionInfo>() { @Mock public DataProperty getDataProperty(long partitionId) { return new DataProperty(TStorageMedium.SSD, 0); } }; new MockUp<EditLog>() { @Mock public void logModifyPartition(ModifyPartitionInfo info) { Assert.assertNotNull(info); Assert.assertTrue(db.getTable(info.getTableId()).isOlapTableOrMaterializedView()); Assert.assertEquals(TStorageMedium.HDD, info.getDataProperty().getStorageMedium()); Assert.assertEquals(DataProperty.MAX_COOLDOWN_TIME_MS, info.getDataProperty().getCooldownTimeMs()); } }; LocalMetastore localMetastore = connectContext.getGlobalStateMgr().getLocalMetastore(); localMetastore.getPartitionIdToStorageMediumMap(); // Clean test.mv1, avoid its refreshment affecting other cases in this testsuite. starRocksAssert.dropMaterializedView("test.mv1"); }
@ModelAttribute("product") public Product getProduct(@PathVariable("productId") int productId) { return this.productService.findProduct(productId) .orElseThrow(() -> new NoSuchElementException("catalogue.errors.product.not_found")); }
@Test void getProduct_ProductDoesNotExist_ThrowsNoSuchElementException() { // given // when var exception = assertThrows(NoSuchElementException.class, () -> this.controller.getProduct(1)); // then assertEquals("catalogue.errors.product.not_found", exception.getMessage()); }
Properties consumerProps() { return consumerProps; }
@Test public void shouldParseConfigsFromFile() throws IOException { Map<String, String> configs = new HashMap<>(); configs.put("request.timeout.ms", "1000"); configs.put("group.id", "group1"); File propsFile = ToolsTestUtils.tempPropertiesFile(configs); String[] args = new String[]{ "--bootstrap-server", "localhost:9092", "--topic", "test", "--consumer.config", propsFile.getAbsolutePath() }; ConsoleConsumerOptions config = new ConsoleConsumerOptions(args); assertEquals("1000", config.consumerProps().get("request.timeout.ms")); assertEquals("group1", config.consumerProps().get("group.id")); }
@Override public void onLongPressDone(@NonNull Keyboard.Key key) { mParentListener.listener().onLongPressDone(key); }
@Test public void testOnLongPressDone() { final AnyKeyboard.AnyKey key = Mockito.mock(AnyKeyboard.AnyKey.class); mUnderTest.onLongPressDone(key); Mockito.verify(mMockParentListener).onLongPressDone(Mockito.same(key)); Mockito.verifyNoMoreInteractions(mMockParentListener); Mockito.verifyZeroInteractions(mMockKeyboardDismissAction); }
@Override public SelJodaDateTimeDays assignOps(SelOp op, SelType rhs) { if (op == SelOp.ASSIGN) { SelTypeUtil.checkTypeMatch(this.type(), rhs.type()); this.val = ((SelJodaDateTimeDays) rhs).val; return this; } throw new UnsupportedOperationException(type() + " DO NOT support assignment operation " + op); }
@Test public void testAssignOps() { one.assignOps(SelOp.ASSIGN, another); assertEquals("DATETIME_DAYS: P5D", one.type() + ": " + one); }
private void init() { add(food(-5), POISON_KARAMBWAN); add(food(1), POTATO, ONION, CABBAGE, POT_OF_CREAM, CHOPPED_ONION, ANCHOVIES, NETTLEWATER, EQUA_LEAVES, FRESH_MONKFISH_7943 /* Cooked */, COOKED_CHICKEN_4291 /* Undead */, COOKED_MEAT_4293 /* Undead */); add(food(2), TOMATO, CHOPPED_TOMATO, BANANA, SLICED_BANANA, ORANGE, ORANGE_SLICES, ORANGE_CHUNKS, PINEAPPLE_RING, PINEAPPLE_CHUNKS, SPICY_SAUCE, CHEESE, SPINACH_ROLL, LEMON, LEMON_CHUNKS, LEMON_SLICES, LIME, LIME_CHUNKS, LIME_SLICES, DWELLBERRIES, KING_WORM, MINCED_MEAT, SPICY_TOMATO, WHITE_PEARL); add(food(3), SHRIMPS, COOKED_MEAT, COOKED_CHICKEN, ROE, CHOCOLATE_BAR, UGTHANKI_MEAT, TOADS_LEGS, ONION__TOMATO, SPICY_MINCED_MEAT, SLICE_OF_BIRTHDAY_CAKE, LOCUST_MEAT, RELICYMS_MIX1, RELICYMS_MIX2, ANTIPOISON_MIX1, ANTIPOISON_MIX2); add(food(4), SARDINE, CAKE, _23_CAKE, SLICE_OF_CAKE, CHOCOLATEY_MILK, BAKED_POTATO, EDIBLE_SEAWEED, MOONLIGHT_MEAD, MOONLIGHT_MEAD4, MOONLIGHT_MEAD3, MOONLIGHT_MEAD2, MOONLIGHT_MEAD1, MONKEY_NUTS); add(food(5), BREAD, HERRING, CHOCOLATE_CAKE, _23_CHOCOLATE_CAKE, CHOCOLATE_SLICE, COOKED_RABBIT, CHILLI_CON_CARNE, FRIED_MUSHROOMS, FRIED_ONIONS, REDBERRY_PIE, HALF_A_REDBERRY_PIE, CAVIAR, PYSK_FISH_0, COOKED_MYSTERY_MEAT, SCRAMBLED_EGG, MONKEY_BAR, TCHIKI_MONKEY_NUTS, TCHIKI_NUT_PASTE, RED_BANANA, SLICED_RED_BANANA); add(food(6), MACKEREL, MEAT_PIE, HALF_A_MEAT_PIE, GUANIC_BAT_0, ROAST_BIRD_MEAT, SQUARE_SANDWICH, ROLL, BAGUETTE, TRIANGLE_SANDWICH, GIANT_CARP, MOONLIGHT_MEADM, MOONLIGHT_MEADM4, MOONLIGHT_MEADM3, MOONLIGHT_MEADM2, MOONLIGHT_MEADM1, STEAK_SANDWICH, GIANT_FROG_LEGS, ANTIFIRE_MIX1, ANTIFIRE_MIX2, EXTENDED_ANTIFIRE_MIX1, EXTENDED_ANTIFIRE_MIX2, SUPER_ANTIFIRE_MIX1, SUPER_ANTIFIRE_MIX2, EXTENDED_SUPER_ANTIFIRE_MIX1, EXTENDED_SUPER_ANTIFIRE_MIX2, ANTIPOISON_SUPERMIX1, ANTIPOISON_SUPERMIX2, ANTIDOTE_MIX1, ANTIDOTE_MIX2); add(food(7), CHOCICE, TROUT, COD, PLAIN_PIZZA, _12_PLAIN_PIZZA, APPLE_PIE, HALF_AN_APPLE_PIE, ROAST_RABBIT, PREMADE_CH_CRUNCH, CHOCCHIP_CRUNCHIES, PREMADE_SY_CRUNCH, SPICY_CRUNCHIES); add(food(8), PIKE, ROAST_BEAST_MEAT, MEAT_PIZZA, _12_MEAT_PIZZA, PREMADE_WM_CRUN, WORM_CRUNCHIES, PREMADE_TD_CRUNCH, TOAD_CRUNCHIES, EGG_AND_TOMATO, PRAEL_BAT_1, PEACH, SUPHI_FISH_1); add(food(9), PREMADE_P_PUNCH, PINEAPPLE_PUNCH, PREMADE_FR_BLAST, FRUIT_BLAST, SALMON, ANCHOVY_PIZZA, _12_ANCHOVY_PIZZA); add(food(10), TUNA, COOKED_CRAB_MEAT, CHOPPED_TUNA, COOKED_CHOMPY, FIELD_RATION, DRAGONFRUIT, TUNA_26149 /* Deadman Starter Pack */); add(food(11), RAINBOW_FISH, STEW, PINEAPPLE_PIZZA, _12_PINEAPPLE_PIZZA, COOKED_FISHCAKE, PREMADE_VEG_BATTA, VEGETABLE_BATTA, PREMADE_WM_BATTA, WORM_BATTA, PREMADE_TD_BATTA, TOAD_BATTA, PREMADE_CT_BATTA, CHEESETOM_BATTA, PREMADE_FRT_BATTA, FRUIT_BATTA, MUSHROOM__ONION, GIRAL_BAT_2, LAVA_EEL, LECKISH_FISH_2, BANANA_STEW); add(food(12), LOBSTER, PREMADE_WORM_HOLE, WORM_HOLE, PREMADE_VEG_BALL, VEG_BALL); add(food(13), BASS, TUNA_AND_CORN); add(food(14), POTATO_WITH_BUTTER, CHILLI_POTATO, SWORDFISH, PHLUXIA_BAT_3, PUMPKIN, EASTER_EGG, BRAWK_FISH_3, COOKED_OOMLIE_WRAP); add(food(15), PREMADE_TTL, TANGLED_TOADS_LEGS, PREMADE_CHOC_BOMB, CHOCOLATE_BOMB, COOKED_JUBBLY); add(food(16), MONKFISH, POTATO_WITH_CHEESE, EGG_POTATO, CRYSTAL_PADDLEFISH, CORRUPTED_PADDLEFISH); add(food(17), MYCIL_FISH_4, KRYKET_BAT_4); add(food(18), COOKED_KARAMBWAN, BLIGHTED_KARAMBWAN, COOKED_KARAMBWAN_23533 /* LMS */); add(food(19), CURRY, UGTHANKI_KEBAB, UGTHANKI_KEBAB_1885); add(food(20), MUSHROOM_POTATO, SHARK, ROQED_FISH_5, MURNG_BAT_5, STUFFED_SNAKE, SHARK_20390 /* LMS */, PADDLEFISH); add(food(21), SEA_TURTLE); add(food(22), MANTA_RAY, BLIGHTED_MANTA_RAY, DARK_CRAB, TUNA_POTATO); add(food(23), KYREN_FISH_6, PSYKK_BAT_6); add(new Anglerfish(), ANGLERFISH, BLIGHTED_ANGLERFISH); add(food(maxHP -> (int) Math.ceil(maxHP * .06)), STRAWBERRY); add(food(maxHP -> (int) Math.ceil(maxHP * .05)), WATERMELON_SLICE); add(food(perc(.1, 1)), COOKED_SWEETCORN, SWEETCORN_7088 /* Bowl of cooked sweetcorn */); add(combo(food(1), boost(DEFENCE, perc(.02, 1))), CABBAGE_1967 /* Draynor Manor */); add(combo(food(8), heal(RUN_ENERGY, 5)), PAPAYA_FRUIT); add(combo(food(3), boost(ATTACK, perc(.02, 2))), CUP_OF_TEA_1978 /* Standard tea */); add(combo(food(3), new NettleTeaRunEnergy()), NETTLE_TEA, NETTLE_TEA_4240 /* Milky */, CUP_OF_TEA_4242 /* Nettle */, CUP_OF_TEA_4243 /* Milky nettle */); add(range(food(5), food(7)), THIN_SNAIL_MEAT); add(range(food(5), food(8)), LEAN_SNAIL_MEAT); add(range(food(7), food(9)), FAT_SNAIL_MEAT); add(range(food(7), food(10)), SPIDER_ON_STICK_6297, SPIDER_ON_SHAFT_6299); add(combo(food(8), food(6)), COOKED_GRAAHK); add(combo(food(9), food(8)), COOKED_KYATT); add(combo(food(11), food(8)), COOKED_PYRE_FOX); add(combo(food(13), food(10), heal(RUN_ENERGY, 10)), COOKED_DASHING_KEBBIT); add(combo(food(12), food(9)), COOKED_SUNLIGHT_ANTELOPE); add(combo(food(14), food(12)), COOKED_MOONLIGHT_ANTELOPE); // Dorgeshuun Cuisine add(food(2), BAT_SHISH, COATED_FROGS_LEGS, FILLETS, FINGERS, FROGBURGER, FROGSPAWN_GUMBO, GREEN_GLOOP_SOUP, GRUBS__LA_MODE, MUSHROOMS, ROAST_FROG); add(food(3), LOACH); add(range(food(3), food(6)), FROG_SPAWN); add(range(food(6), food(10)), COOKED_SLIMY_EEL); add(range(food(8), food(12)), CAVE_EEL); add(food(10), EEL_SUSHI); // Alcoholic Beverages // Many of these retrieved from https://oldschool.runescape.wiki/w/Temporary_skill_drain add(combo(food(11), dec(ATTACK, 2)), JUG_OF_WINE); add(combo(food(14), dec(ATTACK, 3)), BOTTLE_OF_WINE); add(combo(food(7), dec(ATTACK, 2)), HALF_FULL_WINE_JUG); add(dec(ATTACK, 3), JUG_OF_BAD_WINE); add(combo(food(5), new SimpleStatBoost(STRENGTH, true, perc(.05, 1)), new BoostedStatBoost(ATTACK, false, perc(.02, -3))), PREMADE_SGG, SHORT_GREEN_GUY, BRANDY, GIN, VODKA, WHISKY); add(combo(food(7), new SimpleStatBoost(STRENGTH, true, perc(.05, 2)), new BoostedStatBoost(ATTACK, false, perc(.02, -3))), PREMADE_BLURB_SP, BLURBERRY_SPECIAL); add(combo(food(5), new SimpleStatBoost(STRENGTH, true, perc(.05, 2)), new BoostedStatBoost(ATTACK, false, perc(.02, -3))), PREMADE_DR_DRAGON, DRUNK_DRAGON, PREMADE_CHOC_SDY, CHOC_SATURDAY); add(combo(food(5), new SimpleStatBoost(STRENGTH, true, perc(.06, 1)), new BoostedStatBoost(ATTACK, false, perc(.02, -3))), PREMADE_WIZ_BLZD, WIZARD_BLIZZARD); add(combo(food(3), new SimpleStatBoost(STRENGTH, true, perc(.04, 1)), new BoostedStatBoost(ATTACK, false, perc(.05, -3))), GROG); add(combo(food(1), boost(STRENGTH, perc(.02, 1)), new BoostedStatBoost(ATTACK, false, perc(.06, -1))), BEER, BEER_7740); add(combo(food(4), boost(STRENGTH, perc(.04, 2)), new BoostedStatBoost(ATTACK, false, perc(.1, -2))), BEER_TANKARD); add(combo(food(15), boost(STRENGTH, perc(.1, 2)), new BoostedStatBoost(ATTACK, false, perc(.5, -4))), KEG_OF_BEER_3801 /* Non-quest version */); add(combo(boost(ATTACK, 5), boost(STRENGTH, 5), heal(MAGIC, -5), heal(PRAYER, -5)), BLOOD_PINT); add(combo(food(1), boost(STRENGTH, 2), new BoostedStatBoost(ATTACK, false, perc(.05, -2))), ASGARNIAN_ALE, ASGARNIAN_ALE1, ASGARNIAN_ALE2, ASGARNIAN_ALE3, ASGARNIAN_ALE4, ASGARNIAN_ALE_7744); add(combo(food(1), boost(STRENGTH, 3), new BoostedStatBoost(ATTACK, false, perc(.05, -3))), ASGARNIAN_ALEM, ASGARNIAN_ALEM1, ASGARNIAN_ALEM2, ASGARNIAN_ALEM3, ASGARNIAN_ALEM4); add(combo(food(1), boost(WOODCUTTING, 1), new BoostedStatBoost(ATTACK, false, perc(.02, -2)), new BoostedStatBoost(STRENGTH, false, perc(.02, -2))), AXEMANS_FOLLY, AXEMANS_FOLLY1, AXEMANS_FOLLY2, AXEMANS_FOLLY3, AXEMANS_FOLLY4); add(combo(food(2), boost(WOODCUTTING, 2), new BoostedStatBoost(ATTACK, false, perc(.02, -3)), new BoostedStatBoost(STRENGTH, false, perc(.02, -3))), AXEMANS_FOLLYM, AXEMANS_FOLLYM1, AXEMANS_FOLLYM2, AXEMANS_FOLLYM3, AXEMANS_FOLLYM4); add(combo(food(1), boost(THIEVING, 1), boost(ATTACK, 1), new BoostedStatBoost(DEFENCE, false, perc(.06, -3)), new BoostedStatBoost(STRENGTH, false, perc(.06, -3))), BANDITS_BREW); add(combo(food(1), new SimpleStatBoost(COOKING, true, perc(.05, 1)), new BoostedStatBoost(ATTACK, false, perc(.05, -2)), new BoostedStatBoost(STRENGTH, false, perc(.05, -2))), CHEFS_DELIGHT, CHEFS_DELIGHT1, CHEFS_DELIGHT2, CHEFS_DELIGHT3, CHEFS_DELIGHT4, CHEFS_DELIGHT_7754); add(combo(food(2), new SimpleStatBoost(COOKING, true, perc(.05, 2)), new BoostedStatBoost(ATTACK, false, perc(.05, -3)), new BoostedStatBoost(STRENGTH, false, perc(.05, -3))), CHEFS_DELIGHTM, CHEFS_DELIGHTM1, CHEFS_DELIGHTM2, CHEFS_DELIGHTM3, CHEFS_DELIGHTM4); add(combo(food(1), boost(FARMING, 1), new BoostedStatBoost(ATTACK, false, perc(.02, -2)), new BoostedStatBoost(STRENGTH, false, perc(.02, -2))), CIDER, CIDER1, CIDER2, CIDER3, CIDER4, CIDER_7752); add(combo(food(2), boost(FARMING, 2), new BoostedStatBoost(ATTACK, false, perc(.02, -3)), new BoostedStatBoost(STRENGTH, false, perc(.02, -3))), MATURE_CIDER, CIDERM1, CIDERM2, CIDERM3, CIDERM4); add(combo(food(1), boost(STRENGTH, 2), new BoostedStatBoost(ATTACK, false, perc(.05, -2))), DRAGON_BITTER, DRAGON_BITTER1, DRAGON_BITTER2, DRAGON_BITTER3, DRAGON_BITTER4, DRAGON_BITTER_7748); add(combo(food(2), boost(STRENGTH, 3), new BoostedStatBoost(ATTACK, false, perc(.05, -2))), DRAGON_BITTERM, DRAGON_BITTERM1, DRAGON_BITTERM2, DRAGON_BITTERM3, DRAGON_BITTERM4); add(combo(food(1), boost(MINING, 1), boost(SMITHING, 1), new BoostedStatBoost(ATTACK, false, perc(.04, -2)), new BoostedStatBoost(DEFENCE, false, perc(.04, -2)), new BoostedStatBoost(STRENGTH, false, perc(.04, -2))), DWARVEN_STOUT, DWARVEN_STOUT1, DWARVEN_STOUT2, DWARVEN_STOUT3, DWARVEN_STOUT4); add(combo(food(2), boost(MINING, 2), boost(SMITHING, 2), new BoostedStatBoost(ATTACK, false, perc(.04, -3)), new BoostedStatBoost(DEFENCE, false, perc(.04, -3)), new BoostedStatBoost(STRENGTH, false, perc(.04, -3))), DWARVEN_STOUTM, DWARVEN_STOUTM1, DWARVEN_STOUTM2, DWARVEN_STOUTM3, DWARVEN_STOUTM4); add(combo(food(1), boost(SMITHING, 4), dec(ATTACK, 2), dec(RANGED, 2), dec(MAGIC, 2)), KOVACS_GROG); add(combo(food(1), boost(HERBLORE, 1), new BoostedStatBoost(ATTACK, false, perc(.04, -2)), new BoostedStatBoost(DEFENCE, false, perc(.04, -2)), new BoostedStatBoost(STRENGTH, false, perc(.04, -2))), GREENMANS_ALE, GREENMANS_ALE1, GREENMANS_ALE2, GREENMANS_ALE3, GREENMANS_ALE4, GREENMANS_ALE_7746); add(combo(food(2), boost(HERBLORE, 2), new BoostedStatBoost(ATTACK, false, perc(.04, -3)), new BoostedStatBoost(DEFENCE, false, perc(.04, -3)), new BoostedStatBoost(STRENGTH, false, perc(.04, -3))), GREENMANS_ALEM, GREENMANS_ALEM1, GREENMANS_ALEM2, GREENMANS_ALEM3, GREENMANS_ALEM4); add(combo(food(1), boost(SLAYER, 2), new BoostedStatBoost(ATTACK, false, perc(.02, -2)), new BoostedStatBoost(DEFENCE, false, perc(.02, -2)), new BoostedStatBoost(STRENGTH, false, perc(.02, -2))), SLAYERS_RESPITE, SLAYERS_RESPITE1, SLAYERS_RESPITE2, SLAYERS_RESPITE3, SLAYERS_RESPITE4); add(combo(food(2), boost(SLAYER, 4), new BoostedStatBoost(ATTACK, false, perc(.02, -3)), new BoostedStatBoost(DEFENCE, false, perc(.02, -3)), new BoostedStatBoost(STRENGTH, false, perc(.02, -3))), SLAYERS_RESPITEM, SLAYERS_RESPITEM1, SLAYERS_RESPITEM2, SLAYERS_RESPITEM3, SLAYERS_RESPITEM4); add(combo(food(1), new SimpleStatBoost(MAGIC, true, perc(.02, 2)), new BoostedStatBoost(ATTACK, false, perc(.05, -1)), new BoostedStatBoost(DEFENCE, false, perc(.05, -1)), new BoostedStatBoost(STRENGTH, false, perc(.05, -1))), WIZARDS_MIND_BOMB, MIND_BOMB1, MIND_BOMB2, MIND_BOMB3, MIND_BOMB4); add(combo(food(2), new SimpleStatBoost(MAGIC, true, perc(.02, 3)), new BoostedStatBoost(ATTACK, false, perc(.05, -2)), new BoostedStatBoost(DEFENCE, false, perc(.05, -2)), new BoostedStatBoost(STRENGTH, false, perc(.05, -2))), MATURE_WMB, MIND_BOMBM1, MIND_BOMBM2, MIND_BOMBM3, MIND_BOMBM4); add(combo(food(14), boost(STRENGTH, 3), boost(MINING, 1), heal(PRAYER, perc(.06, -1)), new BoostedStatBoost(AGILITY, false, perc(.09, -3)), new BoostedStatBoost(ATTACK, false, perc(.06, -1)), new BoostedStatBoost(DEFENCE, false, perc(.08, -2)), new BoostedStatBoost(HERBLORE, false, perc(.06, -1)), new BoostedStatBoost(MAGIC, false, perc(.05, -1)), new BoostedStatBoost(RANGED, false, perc(.06, -1))), BRAINDEATH_RUM); add(combo(food(2), heal(PRAYER, perc(.04, -2))), BLOODY_BRACER); add(combo(food(1), boost(AGILITY, 1), heal(STRENGTH, -1)), ELVEN_DAWN); add(combo(boost(RANGED, 4), new BoostedStatBoost(STRENGTH, false, perc(.04, -2)), new BoostedStatBoost(MAGIC, false, perc(.04, -2))), LIZARDKICKER); // Sq'irk Juice add(heal(RUN_ENERGY, 5), WINTER_SQIRKJUICE); add(combo(heal(RUN_ENERGY, 10), boost(THIEVING, 1)), SPRING_SQIRKJUICE); add(combo(heal(RUN_ENERGY, 15), boost(THIEVING, 2)), AUTUMN_SQIRKJUICE); add(combo(heal(RUN_ENERGY, 20), boost(THIEVING, 3)), SUMMER_SQIRKJUICE); // Combat potions final SingleEffect attackPot = boost(ATTACK, perc(.10, 3)); final SingleEffect strengthPot = boost(STRENGTH, perc(.10, 3)); final SingleEffect defencePot = boost(DEFENCE, perc(.10, 3)); final Effect combatPot = combo(attackPot, strengthPot); final Effect magicEssence = boost(MAGIC, 3); final SingleEffect magicPot = boost(MAGIC, 4); final SingleEffect imbuedHeart = boost(MAGIC, perc(.10, 1)); final SingleEffect rangingPot = boost(RANGED, perc(.10, 4)); final SingleEffect superAttackPot = boost(ATTACK, perc(.15, 5)); final SingleEffect superStrengthPot = boost(STRENGTH, perc(.15, 5)); final SingleEffect superDefencePot = boost(DEFENCE, perc(.15, 5)); final SingleEffect superMagicPot = boost(MAGIC, perc(.15, 5)); final SingleEffect superRangingPot = boost(RANGED, perc(.15, 5)); final SingleEffect divinePot = heal(HITPOINTS, -10); final Effect zamorakBrew = combo(boost(ATTACK, perc(.20, 2)), boost(STRENGTH, perc(.12, 2)), heal(PRAYER, perc(.10, 0)), new BoostedStatBoost(DEFENCE, false, perc(.10, -2)), new BoostedStatBoost(HITPOINTS, false, perc(-.12, 0))); final Effect ancientBrew = new AncientBrew(.05, 2); add(attackPot, ATTACK_POTION1, ATTACK_POTION2, ATTACK_POTION3, ATTACK_POTION4); add(strengthPot, STRENGTH_POTION1, STRENGTH_POTION2, STRENGTH_POTION3, STRENGTH_POTION4); add(defencePot, DEFENCE_POTION1, DEFENCE_POTION2, DEFENCE_POTION3, DEFENCE_POTION4); add(magicPot, MAGIC_POTION1, MAGIC_POTION2, MAGIC_POTION3, MAGIC_POTION4); add(rangingPot, RANGING_POTION1, RANGING_POTION2, RANGING_POTION3, RANGING_POTION4, RANGING_POTION4_23551, RANGING_POTION3_23553, RANGING_POTION2_23555, RANGING_POTION1_23557 /* LMS */); add(combatPot, COMBAT_POTION1, COMBAT_POTION2, COMBAT_POTION3, COMBAT_POTION4, COMBAT_POTION4_26150, COMBAT_POTION3_26151, COMBAT_POTION2_26152, COMBAT_POTION1_26153 /* Deadman starter pack */); add(superAttackPot, SUPER_ATTACK1, SUPER_ATTACK2, SUPER_ATTACK3, SUPER_ATTACK4); add(superStrengthPot, SUPER_STRENGTH1, SUPER_STRENGTH2, SUPER_STRENGTH3, SUPER_STRENGTH4); add(superDefencePot, SUPER_DEFENCE1, SUPER_DEFENCE2, SUPER_DEFENCE3, SUPER_DEFENCE4); add(magicEssence, MAGIC_ESSENCE1, MAGIC_ESSENCE2, MAGIC_ESSENCE3, MAGIC_ESSENCE4); add(combo(superAttackPot, superStrengthPot, superDefencePot), SUPER_COMBAT_POTION1, SUPER_COMBAT_POTION2, SUPER_COMBAT_POTION3, SUPER_COMBAT_POTION4); add(zamorakBrew, ZAMORAK_BREW1, ZAMORAK_BREW2, ZAMORAK_BREW3, ZAMORAK_BREW4); add(new SaradominBrew(0.15, 0.2, 0.1, 2, 2), SARADOMIN_BREW1, SARADOMIN_BREW2, SARADOMIN_BREW3, SARADOMIN_BREW4, SARADOMIN_BREW4_23575, SARADOMIN_BREW3_23577, SARADOMIN_BREW2_23579, SARADOMIN_BREW1_23581 /* LMS */); add(superRangingPot, SUPER_RANGING_1, SUPER_RANGING_2, SUPER_RANGING_3, SUPER_RANGING_4); add(superMagicPot, SUPER_MAGIC_POTION_1, SUPER_MAGIC_POTION_2, SUPER_MAGIC_POTION_3, SUPER_MAGIC_POTION_4); add(combo(rangingPot, superDefencePot), BASTION_POTION1, BASTION_POTION2, BASTION_POTION3, BASTION_POTION4); add(combo(magicPot, superDefencePot), BATTLEMAGE_POTION1, BATTLEMAGE_POTION2, BATTLEMAGE_POTION3, BATTLEMAGE_POTION4); add(combo(magicPot, divinePot), DIVINE_MAGIC_POTION1, DIVINE_MAGIC_POTION2, DIVINE_MAGIC_POTION3, DIVINE_MAGIC_POTION4); add(combo(rangingPot, divinePot), DIVINE_RANGING_POTION1, DIVINE_RANGING_POTION2, DIVINE_RANGING_POTION3, DIVINE_RANGING_POTION4); add(combo(superAttackPot, divinePot), DIVINE_SUPER_ATTACK_POTION1, DIVINE_SUPER_ATTACK_POTION2, DIVINE_SUPER_ATTACK_POTION3, DIVINE_SUPER_ATTACK_POTION4); add(combo(superStrengthPot, divinePot), DIVINE_SUPER_STRENGTH_POTION1, DIVINE_SUPER_STRENGTH_POTION2, DIVINE_SUPER_STRENGTH_POTION3, DIVINE_SUPER_STRENGTH_POTION4); add(combo(superDefencePot, divinePot), DIVINE_SUPER_DEFENCE_POTION1, DIVINE_SUPER_DEFENCE_POTION2, DIVINE_SUPER_DEFENCE_POTION3, DIVINE_SUPER_DEFENCE_POTION4); add(combo(superAttackPot, superStrengthPot, superDefencePot, divinePot), DIVINE_SUPER_COMBAT_POTION1, DIVINE_SUPER_COMBAT_POTION2, DIVINE_SUPER_COMBAT_POTION3, DIVINE_SUPER_COMBAT_POTION4); add(combo(rangingPot, superDefencePot, divinePot), DIVINE_BASTION_POTION1, DIVINE_BASTION_POTION2, DIVINE_BASTION_POTION3, DIVINE_BASTION_POTION4); add(combo(magicPot, superDefencePot, divinePot), DIVINE_BATTLEMAGE_POTION1, DIVINE_BATTLEMAGE_POTION2, DIVINE_BATTLEMAGE_POTION3, DIVINE_BATTLEMAGE_POTION4); add(combo(superAttackPot, superStrengthPot, superDefencePot, rangingPot, imbuedHeart), CASTLEWARS_BREW4, CASTLEWARS_BREW3, CASTLEWARS_BREW2, CASTLEWARS_BREW1); add(combo(superAttackPot, superStrengthPot), SUPER_COMBAT_POTION4_23543, SUPER_COMBAT_POTION3_23545, SUPER_COMBAT_POTION2_23547, SUPER_COMBAT_POTION1_23549 /* LMS */); add(ancientBrew, ANCIENT_BREW1, ANCIENT_BREW2, ANCIENT_BREW3, ANCIENT_BREW4); add(new AncientBrew(.08, 3), FORGOTTEN_BREW1, FORGOTTEN_BREW2, FORGOTTEN_BREW3, FORGOTTEN_BREW4); // Mixed combat potions add(new MixedPotion(3, attackPot), ATTACK_MIX1, ATTACK_MIX2); add(new MixedPotion(3, strengthPot), STRENGTH_MIX1, STRENGTH_MIX2); add(new MixedPotion(3, combatPot), COMBAT_MIX1, COMBAT_MIX2); add(new MixedPotion(6, defencePot), DEFENCE_MIX1, DEFENCE_MIX2); add(new MixedPotion(6, magicPot), MAGIC_MIX1, MAGIC_MIX2); add(new MixedPotion(6, rangingPot), RANGING_MIX1, RANGING_MIX2); add(new MixedPotion(6, superAttackPot), SUPERATTACK_MIX1, SUPERATTACK_MIX2); add(new MixedPotion(6, superStrengthPot), SUPER_STR_MIX1, SUPER_STR_MIX2); add(new MixedPotion(6, superDefencePot), SUPER_DEF_MIX1, SUPER_DEF_MIX2); add(new MixedPotion(6, magicEssence), MAGIC_ESSENCE_MIX1, MAGIC_ESSENCE_MIX2); add(new MixedPotion(6, zamorakBrew), ZAMORAK_MIX1, ZAMORAK_MIX2); add(new MixedPotion(6, ancientBrew), ANCIENT_MIX1, ANCIENT_MIX2); // Regular overload (NMZ) add(combo(superAttackPot, superStrengthPot, superDefencePot, superRangingPot, superMagicPot, heal(HITPOINTS, -50)), OVERLOAD_1, OVERLOAD_2, OVERLOAD_3, OVERLOAD_4); // Bandages (Castle Wars) add(new CastleWarsBandage(), BANDAGES); // Bandages (Theatre of Blood entry mode) add(combo(food(20), heal(PRAYER, perc(0.25, 5)), heal(RUN_ENERGY, 20), boost(ATTACK, perc(0.15, 4)), boost(STRENGTH, perc(0.15, 4)), boost(DEFENCE, perc(0.15, 4)), rangingPot, magicPot), BANDAGES_25730); // Recovery potions final Effect restorePot = combo(heal(ATTACK, perc(.30, 10)), heal(STRENGTH, perc(.30, 10)), heal(DEFENCE, perc(.30, 10)), heal(RANGED, perc(.30, 10)), heal(MAGIC, perc(.30, 10))); final Effect energyPot = heal(RUN_ENERGY, 10); final SingleEffect prayerPot = new PrayerPotion(7); final Effect superEnergyPot = heal(RUN_ENERGY, 20); final Effect superRestorePot = new SuperRestore(.25, 8); final Effect staminaPot = new StaminaPotion(); final DeltaPercentage remedyHeal = perc(0.16, 6); add(restorePot, RESTORE_POTION1, RESTORE_POTION2, RESTORE_POTION3, RESTORE_POTION4); add(energyPot, ENERGY_POTION1, ENERGY_POTION2, ENERGY_POTION3, ENERGY_POTION4); add(prayerPot, PRAYER_POTION1, PRAYER_POTION2, PRAYER_POTION3, PRAYER_POTION4); add(superEnergyPot, SUPER_ENERGY1, SUPER_ENERGY2, SUPER_ENERGY3, SUPER_ENERGY4); add(superRestorePot, SUPER_RESTORE1, SUPER_RESTORE2, SUPER_RESTORE3, SUPER_RESTORE4, BLIGHTED_SUPER_RESTORE1, BLIGHTED_SUPER_RESTORE2, BLIGHTED_SUPER_RESTORE3, BLIGHTED_SUPER_RESTORE4, SUPER_RESTORE4_23567, SUPER_RESTORE3_23569, SUPER_RESTORE2_23571, SUPER_RESTORE1_23573 /* LMS */); add(new SuperRestore(.30, 4), SANFEW_SERUM1, SANFEW_SERUM2, SANFEW_SERUM3, SANFEW_SERUM4, SANFEW_SERUM4_23559, SANFEW_SERUM3_23561, SANFEW_SERUM2_23563, SANFEW_SERUM1_23565 /* LMS */); add(combo(heal(ATTACK, remedyHeal), heal(STRENGTH, remedyHeal), heal(DEFENCE, remedyHeal), heal(RANGED, remedyHeal), heal(MAGIC, remedyHeal)), MENAPHITE_REMEDY1, MENAPHITE_REMEDY2, MENAPHITE_REMEDY3, MENAPHITE_REMEDY4); add(staminaPot, STAMINA_POTION1, STAMINA_POTION2, STAMINA_POTION3, STAMINA_POTION4); // Mixed recovery potions add(new MixedPotion(3, restorePot), RESTORE_MIX1, RESTORE_MIX2); add(new MixedPotion(3, energyPot), ENERGY_MIX1, ENERGY_MIX2); add(new MixedPotion(6, prayerPot), PRAYER_MIX1, PRAYER_MIX2); add(new MixedPotion(6, superEnergyPot), SUPER_ENERGY_MIX1, SUPER_ENERGY_MIX2); add(new MixedPotion(6, superRestorePot), SUPER_RESTORE_MIX1, SUPER_RESTORE_MIX2); add(new MixedPotion(6, staminaPot), STAMINA_MIX1, STAMINA_MIX2); // Chambers of Xeric potions (+) final DeltaPercentage coxPlusPotionBoost = perc(.16, 6); add(combo(boost(ATTACK, coxPlusPotionBoost), boost(STRENGTH, coxPlusPotionBoost), boost(DEFENCE, coxPlusPotionBoost), boost(RANGED, coxPlusPotionBoost), boost(MAGIC, coxPlusPotionBoost), heal(HITPOINTS, -50)), OVERLOAD_1_20993, OVERLOAD_2_20994, OVERLOAD_3_20995, OVERLOAD_4_20996); add(combo(boost(ATTACK, coxPlusPotionBoost), boost(STRENGTH, coxPlusPotionBoost), boost(DEFENCE, coxPlusPotionBoost)), ELDER_1_20921, ELDER_2_20922, ELDER_3_20923, ELDER_4_20924); add(combo(boost(RANGED, coxPlusPotionBoost), boost(DEFENCE, coxPlusPotionBoost)), TWISTED_1_20933, TWISTED_2_20934, TWISTED_3_20935, TWISTED_4_20936); add(combo(boost(MAGIC, coxPlusPotionBoost), boost(DEFENCE, coxPlusPotionBoost)), KODAI_1_20945, KODAI_2_20946, KODAI_3_20947, KODAI_4_20948); add(new SuperRestore(.30, 11), REVITALISATION_1_20957, REVITALISATION_2_20958, REVITALISATION_3_20959, REVITALISATION_4_20960); add(new SaradominBrew(0.15, 0.2, 0.1, 5, 4), XERICS_AID_1_20981, XERICS_AID_2_20982, XERICS_AID_3_20983, XERICS_AID_4_20984); // Chambers of Xeric potions final DeltaPercentage coxPotionBoost = perc(.13, 5); add(combo(boost(ATTACK, coxPotionBoost), boost(STRENGTH, coxPotionBoost), boost(DEFENCE, coxPotionBoost), boost(RANGED, coxPotionBoost), boost(MAGIC, coxPotionBoost), heal(HITPOINTS, -50)), OVERLOAD_1_20989, OVERLOAD_2_20990, OVERLOAD_3_20991, OVERLOAD_4_20992); add(combo(boost(ATTACK, coxPotionBoost), boost(STRENGTH, coxPotionBoost), boost(DEFENCE, coxPotionBoost)), ELDER_POTION_1, ELDER_POTION_2, ELDER_POTION_3, ELDER_POTION_4); add(combo(boost(RANGED, coxPotionBoost), boost(DEFENCE, coxPotionBoost)), TWISTED_POTION_1, TWISTED_POTION_2, TWISTED_POTION_3, TWISTED_POTION_4); add(combo(boost(MAGIC, coxPotionBoost), boost(DEFENCE, coxPotionBoost)), KODAI_POTION_1, KODAI_POTION_2, KODAI_POTION_3, KODAI_POTION_4); // Chambers of Xeric potions (-) final DeltaPercentage coxMinusPotionBoost = perc(.10, 4); add(combo(boost(ATTACK, coxMinusPotionBoost), boost(STRENGTH, coxMinusPotionBoost), boost(DEFENCE, coxMinusPotionBoost), boost(RANGED, coxMinusPotionBoost), boost(MAGIC, coxMinusPotionBoost), heal(HITPOINTS, -50)), OVERLOAD_1_20985, OVERLOAD_2_20986, OVERLOAD_3_20987, OVERLOAD_4_20988); add(combo(boost(ATTACK, coxMinusPotionBoost), boost(STRENGTH, coxMinusPotionBoost), boost(DEFENCE, coxMinusPotionBoost)), ELDER_1, ELDER_2, ELDER_3, ELDER_4); add(combo(boost(RANGED, coxMinusPotionBoost), boost(DEFENCE, coxMinusPotionBoost)), TWISTED_1, TWISTED_2, TWISTED_3, TWISTED_4); add(combo(boost(MAGIC, coxMinusPotionBoost), boost(DEFENCE, coxMinusPotionBoost)), KODAI_1, KODAI_2, KODAI_3, KODAI_4); // Tombs of Amascut consumables final DeltaPercentage nectarDrain = perc(0.2, -5); final DeltaPercentage saltBoost = perc(.16, 11); final DeltaPercentage tearsRestore = perc(.25, 3); add(combo(boost(HITPOINTS, 20), prayerPot), HONEY_LOCUST); add(new Ambrosia(), AMBROSIA_1, AMBROSIA_2); add(combo(heal(PRAYER, 8), heal(PRAYER, (8 * 9))), BLESSED_CRYSTAL_SCARAB_1, BLESSED_CRYSTAL_SCARAB_2); add(combo(new SimpleStatBoost(HITPOINTS, true, perc(.15, 3)), new BoostedStatBoost(ATTACK, false, nectarDrain), new BoostedStatBoost(STRENGTH, false, nectarDrain), new BoostedStatBoost(DEFENCE, false, nectarDrain), new BoostedStatBoost(RANGED, false, nectarDrain), new BoostedStatBoost(MAGIC, false, nectarDrain)), NECTAR_1, NECTAR_2, NECTAR_3, NECTAR_4); add(combo(heal(HITPOINTS, 5), heal(HITPOINTS, 5 * 20)), SILK_DRESSING_1, SILK_DRESSING_2); add(combo(new SimpleStatBoost(ATTACK, true, saltBoost), new SimpleStatBoost(STRENGTH, true, saltBoost), new SimpleStatBoost(DEFENCE, true, saltBoost), new SimpleStatBoost(RANGED, true, saltBoost), new SimpleStatBoost(MAGIC, true, saltBoost)), SMELLING_SALTS_1, SMELLING_SALTS_2); add(combo(heal(PRAYER, perc(.25, 10)), heal(ATTACK, tearsRestore), heal(STRENGTH, tearsRestore), heal(DEFENCE, tearsRestore), heal(RANGED, tearsRestore), heal(MAGIC, tearsRestore)), TEARS_OF_ELIDINIS_1, TEARS_OF_ELIDINIS_2, TEARS_OF_ELIDINIS_3, TEARS_OF_ELIDINIS_4); // Skill potions final Effect agilityPot = boost(AGILITY, 3); final Effect fishingPot = boost(FISHING, 3); final Effect hunterPot = boost(HUNTER, 3); add(agilityPot, AGILITY_POTION1, AGILITY_POTION2, AGILITY_POTION3, AGILITY_POTION4); add(fishingPot, FISHING_POTION1, FISHING_POTION2, FISHING_POTION3, FISHING_POTION4); add(hunterPot, HUNTER_POTION1, HUNTER_POTION2, HUNTER_POTION3, HUNTER_POTION4); add(combo(boost(HITPOINTS, 5), heal(RUN_ENERGY, 5)), GUTHIX_REST1, GUTHIX_REST2, GUTHIX_REST3, GUTHIX_REST4); // Mixed skill potions add(new MixedPotion(6, agilityPot), AGILITY_MIX1, AGILITY_MIX2); add(new MixedPotion(6, fishingPot), FISHING_MIX1, FISHING_MIX2); add(new MixedPotion(6, hunterPot), HUNTING_MIX1, HUNTING_MIX2); // Misc/run energy add(combo(food(3), range(heal(RUN_ENERGY, 5), heal(RUN_ENERGY, 10))), WHITE_TREE_FRUIT); add(heal(RUN_ENERGY, 30), STRANGE_FRUIT); add(heal(RUN_ENERGY, 50), MINT_CAKE); add(combo(food(12), heal(RUN_ENERGY, 50)), GOUT_TUBER); // Pies add(combo(heal(HITPOINTS, 6), boost(FARMING, 3)), GARDEN_PIE, HALF_A_GARDEN_PIE); add(combo(heal(HITPOINTS, 6), boost(FISHING, 3)), FISH_PIE, HALF_A_FISH_PIE); add(combo(heal(HITPOINTS, 7), boost(HERBLORE, 4)), BOTANICAL_PIE, HALF_A_BOTANICAL_PIE); add(combo(heal(HITPOINTS, 8), boost(CRAFTING, 4)), MUSHROOM_PIE, HALF_A_MUSHROOM_PIE); add(combo(heal(HITPOINTS, 8), boost(FISHING, 5)), ADMIRAL_PIE, HALF_AN_ADMIRAL_PIE); add(combo(heal(HITPOINTS, 11), boost(SLAYER, 5), boost(RANGED, 4)), WILD_PIE, HALF_A_WILD_PIE); add(combo(heal(HITPOINTS, 11), boost(AGILITY, 5), heal(RUN_ENERGY, 10)), SUMMER_PIE, HALF_A_SUMMER_PIE); add(combo(heal(HITPOINTS, 10), boost(FLETCHING, 4)), DRAGONFRUIT_PIE, HALF_A_DRAGONFRUIT_PIE); // Other add(combo(range(food(1), food(3)), heal(RUN_ENERGY, 10)), PURPLE_SWEETS_10476); add(new SpicyStew(), SPICY_STEW); add(imbuedHeart, IMBUED_HEART); add(boost(MAGIC, perc(.10, 4)), SATURATED_HEART); add(combo(boost(ATTACK, 2), boost(STRENGTH, 1), heal(PRAYER, 1), heal(DEFENCE, -1)), JANGERBERRIES); add(new CaveNightshade(), CAVE_NIGHTSHADE); // Gauntlet add(combo(heal(PRAYER, perc(.25, 7)), heal(RUN_ENERGY, 40)), EGNIOL_POTION_1, EGNIOL_POTION_2, EGNIOL_POTION_3, EGNIOL_POTION_4); // Soul Wars add(combo(heal(HITPOINTS, perc(.15, 1)), heal(RUN_ENERGY, 100)), BANDAGES_25202); add(combo(boost(ATTACK, perc(.15, 5)), boost(STRENGTH, perc(.15, 5)), boost(DEFENCE, perc(.15, 5)), boost(RANGED, perc(.15, 5)), boost(MAGIC, perc(.15, 5)), heal(PRAYER, perc(.25, 8))), POTION_OF_POWER1, POTION_OF_POWER2, POTION_OF_POWER3, POTION_OF_POWER4); log.debug("{} items; {} behaviours loaded", effects.size(), new HashSet<>(effects.values()).size()); }
@Test public void testInit() { new ItemStatChanges(); }
public static <T extends Throwable> void checkState(final boolean expectedExpression, final Supplier<T> exceptionSupplierIfUnexpected) throws T { if (!expectedExpression) { throw exceptionSupplierIfUnexpected.get(); } }
@Test void assertCheckStateToNotThrowException() { assertDoesNotThrow(() -> ShardingSpherePreconditions.checkState(true, SQLException::new)); }
public static Future execute(ParseContext context, Runnable runnable) { Future future = null; ExecutorService executorService = context.get(ExecutorService.class); if (executorService == null) { FutureTask task = new FutureTask<>(runnable, null); Thread thread = new Thread(task, "Tika Thread"); thread.start(); future = task; } else { future = executorService.submit(runnable); } return future; }
@Test public void testExecuteThread() throws Exception { ParseContext context = new ParseContext(); Future result = ConcurrentUtils.execute(context, () -> { //Do nothing }); assertNull(result.get()); }
public static void limit(Buffer buffer, int newLimit) { buffer.limit(newLimit); }
@Test public void testLimit() { ByteBuffer byteBuffer = ByteBuffer.allocate(4); byteBuffer.putInt(1); Assertions.assertDoesNotThrow(() -> BufferUtils.limit(byteBuffer, 4)); }
public void processVerstrekkingAanAfnemer(VerstrekkingAanAfnemer verstrekkingAanAfnemer){ if (logger.isDebugEnabled()) logger.debug("Processing verstrekkingAanAfnemer: {}", marshallElement(verstrekkingAanAfnemer)); Afnemersbericht afnemersbericht = afnemersberichtRepository.findByOnzeReferentie(verstrekkingAanAfnemer.getReferentieId()); if(mismatch(verstrekkingAanAfnemer, afnemersbericht)){ digidXClient.remoteLogBericht(Log.NO_RELATION_TO_SENT_MESSAGE, verstrekkingAanAfnemer, afnemersbericht); return; } switch (verstrekkingAanAfnemer.getGebeurtenissoort().getNaam()) { case "Null" -> { logger.info("Start processing Null message"); dglResponseService.processNullMessage(verstrekkingAanAfnemer.getGebeurtenisinhoud().getNull(), afnemersbericht); digidXClient.remoteLogWithoutRelatingToAccount(Log.MESSAGE_PROCESSED, "Null"); } case "Ag01" -> { logger.info("Start processing Ag01 message"); dglResponseService.processAg01(verstrekkingAanAfnemer.getGebeurtenisinhoud().getAg01(), afnemersbericht); digidXClient.remoteLogBericht(Log.MESSAGE_PROCESSED, verstrekkingAanAfnemer, afnemersbericht); } case "Ag31" -> { logger.info("Start processing Ag31 message"); dglResponseService.processAg31(verstrekkingAanAfnemer.getGebeurtenisinhoud().getAg31(), afnemersbericht); digidXClient.remoteLogBericht(Log.MESSAGE_PROCESSED, verstrekkingAanAfnemer, afnemersbericht); } case "Af01" -> { logger.info("Start processing Af01 message"); dglResponseService.processAf01(verstrekkingAanAfnemer.getGebeurtenisinhoud().getAf01(), afnemersbericht); digidXClient.remoteLogBericht(Log.MESSAGE_PROCESSED, verstrekkingAanAfnemer, afnemersbericht); } case "Af11" -> { logger.info("Start processing Af11 message"); dglResponseService.processAf11(verstrekkingAanAfnemer.getGebeurtenisinhoud().getAf11(), afnemersbericht); digidXClient.remoteLogWithoutRelatingToAccount(Log.MESSAGE_PROCESSED, "Af11"); } case "Gv01" -> { logger.info("Start processing Gv01 message"); Gv01 gv01 = verstrekkingAanAfnemer.getGebeurtenisinhoud().getGv01(); dglResponseService.processGv01(gv01); String bsn = CategorieUtil.findBsnOudeWaarde(gv01.getCategorie()); if (bsn == null) { bsn = CategorieUtil.findBsn(gv01.getCategorie()); } digidXClient.remoteLogSpontaneVerstrekking(Log.MESSAGE_PROCESSED, "Gv01", gv01.getANummer(), bsn); } case "Ng01" -> { logger.info("Start processing Ng01 message"); Ng01 ng01 = verstrekkingAanAfnemer.getGebeurtenisinhoud().getNg01(); dglResponseService.processNg01(ng01); digidXClient.remoteLogSpontaneVerstrekking(Log.MESSAGE_PROCESSED, "Ng01", CategorieUtil.findANummer(ng01.getCategorie()), ""); } case "Wa11" -> { logger.info("Start processing Wa11 message"); dglResponseService.processWa11(verstrekkingAanAfnemer.getGebeurtenisinhoud().getWa11()); } } }
@Test public void testProcessAg01(){ String testBsn = "SSSSSSSSS"; Ag01 testAg01 = TestDglMessagesUtil.createTestAg01(testBsn, "A", "SSSSSSSS"); VerstrekkingInhoudType inhoudType = new VerstrekkingInhoudType(); inhoudType.setAg01(testAg01); GeversioneerdType type = new GeversioneerdType(); type.setNaam("Ag01"); when(verstrekkingAanAfnemer.getReferentieId()).thenReturn("referentieId"); when(afnemersberichtRepository.findByOnzeReferentie("referentieId")).thenReturn(afnemersbericht); when(verstrekkingAanAfnemer.getGebeurtenissoort()).thenReturn(type); when(verstrekkingAanAfnemer.getGebeurtenisinhoud()).thenReturn(inhoudType); when(afnemersbericht.getBsn()).thenReturn(testBsn); classUnderTest.processVerstrekkingAanAfnemer(verstrekkingAanAfnemer); verify(dglResponseService, times(1)).processAg01(testAg01, afnemersbericht); verify(digidXClient, times(1)).remoteLogBericht(Log.MESSAGE_PROCESSED, verstrekkingAanAfnemer, afnemersbericht); }
public static <T> CheckedFunction0<T> recover(CheckedFunction0<T> function, CheckedFunction1<Throwable, T> exceptionHandler) { return () -> { try { return function.apply(); } catch (Throwable throwable) { return exceptionHandler.apply(throwable); } }; }
@Test public void shouldRecoverFromSpecificExceptions() throws Throwable { CheckedFunction0<String> callable = () -> { throw new IOException("BAM!"); }; CheckedFunction0<String> callableWithRecovery = VavrCheckedFunctionUtils.recover(callable, asList(IllegalArgumentException.class, IOException.class), (ex) -> "Bla"); String result = callableWithRecovery.apply(); assertThat(result).isEqualTo("Bla"); }
public B sent(Boolean sent) { this.sent = sent; return getThis(); }
@Test void sent() { MethodBuilder builder = new MethodBuilder(); builder.sent(true); Assertions.assertTrue(builder.build().getSent()); }
protected PackageModel getPackageModel(PackageDescr packageDescr, PackageRegistry pkgRegistry, String pkgName) { return packageModels.getPackageModel(packageDescr, pkgRegistry, pkgName); }
@Test public void getPackageModelWithoutPkgUUID() { PackageDescr packageDescr = getPackageDescr(null); PackageModel retrieved = modelBuilder.getPackageModel(packageDescr, packageRegistry, internalKnowledgePackage.getName()); assertThat(retrieved).isNotNull(); String expected = getPkgUUID(retrieved.getConfiguration(), RELEASE_ID, internalKnowledgePackage.getName()); assertThat(retrieved.getPackageUUID()).isEqualTo(expected); }
public Page<Connection> findAll(int pageIndex, int pageSize) { return connectionRepository.findAll(PageRequest.of(pageIndex, pageSize)); }
@Test void pageWithAllConnections() { when(connectionRepositoryMock.findAll(PageRequest.of(1, 10))).thenReturn(getPageConnections()); Page<Connection> result = connectionServiceMock.findAll(1, 10); verify(connectionRepositoryMock, times(1)).findAll(PageRequest.of(1, 10)); assertNotNull(result); assertEquals(1, result.getTotalPages()); assertEquals(2, result.getContent().size()); }
Future<RecordMetadata> send(final ProducerRecord<byte[], byte[]> record, final Callback callback) { maybeBeginTransaction(); try { return producer.send(record, callback); } catch (final KafkaException uncaughtException) { if (isRecoverable(uncaughtException)) { // producer.send() call may throw a KafkaException which wraps a FencedException, // in this case we should throw its wrapped inner cause so that it can be // captured and re-wrapped as TaskMigratedException throw new TaskMigratedException( formatException("Producer got fenced trying to send a record"), uncaughtException.getCause() ); } else { throw new StreamsException( formatException(String.format("Error encountered trying to send record to topic %s", record.topic())), uncaughtException ); } } }
@Test public void shouldForwardRecordOnSend() { nonEosStreamsProducer.send(record, null); assertThat(nonEosMockProducer.history().size(), is(1)); assertThat(nonEosMockProducer.history().get(0), is(record)); }
@Override public void handleRequest(RestRequest request, RequestContext requestContext, Callback<RestResponse> callback) { //This code path cannot accept content types or accept types that contain //multipart/related. This is because these types of requests will usually have very large payloads and therefore //would degrade server performance since RestRequest reads everything into memory. if (!isMultipart(request, requestContext, callback)) { _restRestLiServer.handleRequest(request, requestContext, callback); } }
@Test(dataProvider = "validClientProtocolVersionDataStreamOnly") public void testValidUnstructuredDataRequestMissingHeader(RestLiServer server, ProtocolVersion clientProtocolVersion, String headerConstant) throws URISyntaxException, IOException { StreamRequest streamRequest = new StreamRequestBuilder(new URI("/feedDownloads/1")).setHeader(headerConstant, clientProtocolVersion.toString()) .build(EntityStreams.emptyStream()); final FeedDownloadResource resource = getMockResource(FeedDownloadResource.class); resource.get(eq(1L), anyObject(UnstructuredDataWriter.class)); EasyMock.expectLastCall().andDelegateTo(new FeedDownloadResource() { @Override public void get(Long key, UnstructuredDataWriter writer) { // do nothing here, this should cause error } }).once(); replay(resource); @SuppressWarnings("unchecked") Callback<StreamResponse> r2Callback = createMock(Callback.class); r2Callback.onError(anyObject()); expectLastCall().once(); replay(r2Callback); RequestContext requestContext = new RequestContext(); server.handleRequest(streamRequest, requestContext, r2Callback); verify(resource); verify(r2Callback); }
public static LoggingContext forOffsets(ConnectorTaskId id) { Objects.requireNonNull(id); LoggingContext context = new LoggingContext(); MDC.put(CONNECTOR_CONTEXT, prefixFor(id.connector(), Scope.OFFSETS, id.task())); return context; }
@Test public void shouldCreateOffsetsLoggingContext() { assertMdcExtrasUntouched(); try (LoggingContext loggingContext = LoggingContext.forOffsets(TASK_ID1)) { assertMdc(TASK_ID1.connector(), TASK_ID1.task(), Scope.OFFSETS); log.info("Running task"); } assertMdcExtrasUntouched(); assertMdc(null, null, null); }
public static String formatExpression(final Expression expression) { return formatExpression(expression, FormatOptions.of(s -> false)); }
@Test public void shouldFormatTimeLiteral() { assertThat(ExpressionFormatter.formatExpression(new TimeLiteral(new Time(10000))), equalTo("00:00:10")); }
@Override public DeleteConsumerGroupOffsetsResult deleteConsumerGroupOffsets( String groupId, Set<TopicPartition> partitions, DeleteConsumerGroupOffsetsOptions options) { SimpleAdminApiFuture<CoordinatorKey, Map<TopicPartition, Errors>> future = DeleteConsumerGroupOffsetsHandler.newFuture(groupId); DeleteConsumerGroupOffsetsHandler handler = new DeleteConsumerGroupOffsetsHandler(groupId, partitions, logContext); invokeDriver(handler, future, options.timeoutMs); return new DeleteConsumerGroupOffsetsResult(future.get(CoordinatorKey.byGroupId(groupId)), partitions); }
@Test public void testDeleteConsumerGroupOffsetsFindCoordinatorRetriableErrors() throws Exception { // Retriable FindCoordinatorResponse errors should be retried final TopicPartition tp1 = new TopicPartition("foo", 0); try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(mockCluster(1, 0))) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); env.kafkaClient().prepareResponse( prepareFindCoordinatorResponse(Errors.COORDINATOR_NOT_AVAILABLE, Node.noNode())); env.kafkaClient().prepareResponse( prepareFindCoordinatorResponse(Errors.COORDINATOR_LOAD_IN_PROGRESS, Node.noNode())); env.kafkaClient().prepareResponse( prepareFindCoordinatorResponse(Errors.NONE, env.cluster().controller())); env.kafkaClient().prepareResponse( prepareOffsetDeleteResponse("foo", 0, Errors.NONE)); final DeleteConsumerGroupOffsetsResult result = env.adminClient() .deleteConsumerGroupOffsets(GROUP_ID, Stream.of(tp1).collect(Collectors.toSet())); assertNull(result.all().get()); assertNull(result.partitionResult(tp1).get()); } }
public static DoubleRange union(DoubleRange first, DoubleRange second) { requireNonNull(first, "first is null"); requireNonNull(second, "second is null"); return new DoubleRange(min(first.min, second.min), max(first.max, second.max)); }
@Test public void testUnion() { assertEquals(union(new DoubleRange(1, 2), new DoubleRange(4, 5)), new DoubleRange(1, 5)); assertEquals(union(new DoubleRange(1, 2), new DoubleRange(1, 2)), new DoubleRange(1, 2)); assertEquals(union(new DoubleRange(4, 5), new DoubleRange(1, 2)), new DoubleRange(1, 5)); assertEquals(union(new DoubleRange(Double.NEGATIVE_INFINITY, 0), new DoubleRange(0, Double.POSITIVE_INFINITY)), new DoubleRange(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY)); assertEquals(union(new DoubleRange(0, Double.POSITIVE_INFINITY), new DoubleRange(Double.NEGATIVE_INFINITY, 0)), new DoubleRange(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY)); }
@Override public void initializeState(StateInitializationContext context) throws Exception { super.initializeState(context); final ListState<byte[]> rawState = context.getOperatorStateStore().getListState(SPLITS_STATE_DESC); readerState = new SimpleVersionedListState<>(rawState, splitSerializer); }
@Test void testInitializeState() throws Exception { StateInitializationContext stateContext = context.createStateContext(); operator.initializeState(stateContext); assertThat( stateContext .getOperatorStateStore() .getListState(SourceOperator.SPLITS_STATE_DESC)) .isNotNull(); }
public static <T> T nullIsNotFound(T item, String message) { if (item == null) { log.error(message); throw new ItemNotFoundException(message); } return item; }
@Test(expected = ItemNotFoundException.class) public void testNullIsNotFoundThrow() { Tools.nullIsNotFound(null, "Not found!"); fail("Should've thrown some thing"); }
public void resetToDatetime(final Consumer<byte[], byte[]> client, final Set<TopicPartition> inputTopicPartitions, final Long timestamp) { final Map<TopicPartition, Long> topicPartitionsAndTimes = new HashMap<>(inputTopicPartitions.size()); for (final TopicPartition topicPartition : inputTopicPartitions) { topicPartitionsAndTimes.put(topicPartition, timestamp); } final Map<TopicPartition, OffsetAndTimestamp> topicPartitionsAndOffset = client.offsetsForTimes(topicPartitionsAndTimes); for (final TopicPartition topicPartition : inputTopicPartitions) { final Optional<Long> partitionOffset = Optional.ofNullable(topicPartitionsAndOffset.get(topicPartition)) .map(OffsetAndTimestamp::offset) .filter(offset -> offset != ListOffsetsResponse.UNKNOWN_OFFSET); if (partitionOffset.isPresent()) { client.seek(topicPartition, partitionOffset.get()); } else { client.seekToEnd(Collections.singletonList(topicPartition)); System.out.println("Partition " + topicPartition.partition() + " from topic " + topicPartition.topic() + " is empty, without a committed record. Falling back to latest known offset."); } } }
@Test public void testResetToDatetimeWhenPartitionIsEmptyResetsToLatestOffset() { final long beginningAndEndOffset = 5L; // Empty partition implies beginning offset == end offset final MockConsumer<byte[], byte[]> emptyConsumer = new EmptyPartitionConsumer<>(OffsetResetStrategy.EARLIEST); emptyConsumer.assign(Collections.singletonList(topicPartition)); final Map<TopicPartition, Long> beginningOffsetsMap = new HashMap<>(); beginningOffsetsMap.put(topicPartition, beginningAndEndOffset); emptyConsumer.updateBeginningOffsets(beginningOffsetsMap); final Map<TopicPartition, Long> endOffsetsMap = new HashMap<>(); endOffsetsMap.put(topicPartition, beginningAndEndOffset); emptyConsumer.updateEndOffsets(endOffsetsMap); final long yesterdayTimestamp = Instant.now().minus(Duration.ofDays(1)).toEpochMilli(); // resetToDatetime only seeks the offset, but does not commit. streamsResetter.resetToDatetime(emptyConsumer, inputTopicPartitions, yesterdayTimestamp); final long position = emptyConsumer.position(topicPartition); assertEquals(beginningAndEndOffset, position); }
public BernoulliDistribution(double p) { if (p < 0 || p > 1) { throw new IllegalArgumentException("Invalid p: " + p); } this.p = p; q = 1 - p; entropy = -p * MathEx.log2(p) - q * MathEx.log2(q); }
@Test public void testBernoulliDistribution() { System.out.println("BernoulliDistribution"); MathEx.setSeed(19650218); // to get repeatable results. BernoulliDistribution instance = new BernoulliDistribution(0.4); int[] data = instance.randi(1000); BernoulliDistribution est = BernoulliDistribution.fit(data); assertEquals(0.4, est.p, 1E-2); }
public Result<ConfigInfo> getConfig(ConfigInfo request) { Result<Boolean> result = checkConnection(request); if (!result.isSuccess()) { return new Result<>(result.getCode(), result.getMessage()); } String group = request.getGroup(); ConfigClient client = getConfigClient(request.getNamespace()); if (client instanceof NacosClient) { group = NacosUtils.rebuildGroup(request.getGroup()); } String content = client.getConfig(request.getKey(), group); ConfigInfo configInfo = new ConfigInfo(); configInfo.setGroup(request.getGroup()); configInfo.setKey(request.getKey()); configInfo.setContent(content); return new Result<>(ResultCodeType.SUCCESS.getCode(), null, configInfo); }
@Test public void getConfig() { ConfigInfo configInfo = new ConfigInfo(); configInfo.setGroup(GROUP); configInfo.setKey(KEY); configInfo.setPluginType(PluginType.SPRINGBOOT_REGISTRY.getPluginName()); Result<ConfigInfo> result = configService.getConfig(configInfo); ConfigInfo info = result.getData(); Assert.assertEquals(info.getGroup(), GROUP); Assert.assertEquals(info.getKey(), KEY); Assert.assertEquals(info.getContent(), CONTENT); }
public static boolean randomBoolean() { return 0 == randomInt(2); }
@Test @Disabled public void randomBooleanTest() { Console.log(RandomUtil.randomBoolean()); }
@Override public BasicTypeDefine reconvert(Column column) { BasicTypeDefine.BasicTypeDefineBuilder builder = BasicTypeDefine.builder() .name(column.getName()) .nullable(column.isNullable()) .comment(column.getComment()) .defaultValue(column.getDefaultValue()); switch (column.getDataType().getSqlType()) { case BOOLEAN: builder.columnType(DB2_BOOLEAN); builder.dataType(DB2_BOOLEAN); break; case TINYINT: case SMALLINT: builder.columnType(DB2_SMALLINT); builder.dataType(DB2_SMALLINT); break; case INT: builder.columnType(DB2_INT); builder.dataType(DB2_INT); break; case BIGINT: builder.columnType(DB2_BIGINT); builder.dataType(DB2_BIGINT); break; case FLOAT: builder.columnType(DB2_REAL); builder.dataType(DB2_REAL); break; case DOUBLE: builder.columnType(DB2_DOUBLE); builder.dataType(DB2_DOUBLE); break; case DECIMAL: DecimalType decimalType = (DecimalType) column.getDataType(); long precision = decimalType.getPrecision(); int scale = decimalType.getScale(); if (precision <= 0) { precision = DEFAULT_PRECISION; scale = DEFAULT_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is precision less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (precision > MAX_PRECISION) { scale = (int) Math.max(0, scale - (precision - MAX_PRECISION)); precision = MAX_PRECISION; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum precision of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_PRECISION, precision, scale); } if (scale < 0) { scale = 0; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is scale less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (scale > MAX_SCALE) { scale = MAX_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_SCALE, precision, scale); } builder.columnType(String.format("%s(%s,%s)", DB2_DECIMAL, precision, scale)); builder.dataType(DB2_DECIMAL); builder.precision(precision); builder.scale(scale); break; case BYTES: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType( String.format("%s(%s)", DB2_VARBINARY, MAX_VARBINARY_LENGTH)); builder.dataType(DB2_VARBINARY); builder.length(column.getColumnLength()); } else if (column.getColumnLength() <= MAX_BINARY_LENGTH) { builder.columnType( String.format("%s(%s)", DB2_BINARY, column.getColumnLength())); builder.dataType(DB2_BINARY); builder.length(column.getColumnLength()); } else if (column.getColumnLength() <= MAX_VARBINARY_LENGTH) { builder.columnType( String.format("%s(%s)", DB2_VARBINARY, column.getColumnLength())); builder.dataType(DB2_VARBINARY); builder.length(column.getColumnLength()); } else { long length = column.getColumnLength(); if (length > MAX_BLOB_LENGTH) { length = MAX_BLOB_LENGTH; log.warn( "The length of blob type {} is out of range, " + "it will be converted to {}({})", column.getName(), DB2_BLOB, length); } builder.columnType(String.format("%s(%s)", DB2_BLOB, length)); builder.dataType(DB2_BLOB); builder.length(length); } break; case STRING: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType(String.format("%s(%s)", DB2_VARCHAR, MAX_VARCHAR_LENGTH)); builder.dataType(DB2_VARCHAR); builder.length(column.getColumnLength()); } else if (column.getColumnLength() <= MAX_CHAR_LENGTH) { builder.columnType(String.format("%s(%s)", DB2_CHAR, column.getColumnLength())); builder.dataType(DB2_CHAR); builder.length(column.getColumnLength()); } else if (column.getColumnLength() <= MAX_VARCHAR_LENGTH) { builder.columnType( String.format("%s(%s)", DB2_VARCHAR, column.getColumnLength())); builder.dataType(DB2_VARCHAR); builder.length(column.getColumnLength()); } else { long length = column.getColumnLength(); if (length > MAX_CLOB_LENGTH) { length = MAX_CLOB_LENGTH; log.warn( "The length of clob type {} is out of range, " + "it will be converted to {}({})", column.getName(), DB2_CLOB, length); } builder.columnType(String.format("%s(%s)", DB2_CLOB, length)); builder.dataType(DB2_CLOB); builder.length(length); } break; case DATE: builder.columnType(DB2_DATE); builder.dataType(DB2_DATE); break; case TIME: builder.columnType(DB2_TIME); builder.dataType(DB2_TIME); break; case TIMESTAMP: if (column.getScale() != null && column.getScale() > 0) { int timestampScale = column.getScale(); if (column.getScale() > MAX_TIMESTAMP_SCALE) { timestampScale = MAX_TIMESTAMP_SCALE; log.warn( "The timestamp column {} type timestamp({}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to timestamp({})", column.getName(), column.getScale(), MAX_TIMESTAMP_SCALE, timestampScale); } builder.columnType(String.format("%s(%s)", DB2_TIMESTAMP, timestampScale)); builder.scale(timestampScale); } else { builder.columnType(DB2_TIMESTAMP); } builder.dataType(DB2_TIMESTAMP); break; default: throw CommonError.convertToConnectorTypeError( DatabaseIdentifier.DB_2, column.getDataType().getSqlType().name(), column.getName()); } return builder.build(); }
@Test public void testReconvertTime() { Column column = PhysicalColumn.builder() .name("test") .dataType(LocalTimeType.LOCAL_TIME_TYPE) .build(); BasicTypeDefine typeDefine = DB2TypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals(DB2TypeConverter.DB2_TIME, typeDefine.getColumnType()); Assertions.assertEquals(DB2TypeConverter.DB2_TIME, typeDefine.getDataType()); }
@Override public boolean accept(FilterableIssue issue) { if (filters.isEmpty()) { return true; } else { return filters.get(0).accept(issue, new DefaultIssueFilterChain(filters.subList(1, filters.size()))); } }
@Test public void should_refuse_and_not_go_further_if_filter_refuses() { assertThat(new DefaultIssueFilterChain(List.of( new PassingFilter(), new RefusingFilter(), new FailingFilter()) ).accept(issue)).isFalse(); }
@HighFrequencyInvocation public EncryptTable getEncryptTable(final String tableName) { return findEncryptTable(tableName).orElseThrow(() -> new EncryptTableNotFoundException(tableName)); }
@Test void assertGetEncryptTable() { assertThat(new EncryptRule("foo_db", createEncryptRuleConfiguration()).getEncryptTable("t_encrypt").getTable(), is("t_encrypt")); }
public static Serializer getSerializer(String alias) { // 工厂模式 托管给ExtensionLoader return EXTENSION_LOADER.getExtension(alias); }
@Test public void getSerializerNotExist() { try { SerializerFactory.getSerializer((byte) 999); Assert.fail(); } catch (SofaRpcRuntimeException e) { } }
@Override public boolean equals(final Object o) { if (this == o) { return true; } if (!(o instanceof PageParameter)) { return false; } PageParameter that = (PageParameter) o; return currentPage == that.currentPage && prePage == that.prePage && nextPage == that.nextPage && pageSize == that.pageSize && offset == that.offset && totalPage == that.totalPage && totalCount == that.totalCount; }
@Test public void testEquals() { assertEquals(pageParameterUnderTest.getCurrentPage(), 1); assertEquals(pageParameterUnderTest.getPageSize(), 10); assertEquals(pageParameterUnderTest.getNextPage(), 2); }
public boolean verifyTopicCleanupPolicyOnlyCompact(String topic, String workerTopicConfig, String topicPurpose) { Set<String> cleanupPolicies = topicCleanupPolicy(topic); if (cleanupPolicies.isEmpty()) { log.info("Unable to use admin client to verify the cleanup policy of '{}' " + "topic is '{}', either because the broker is an older " + "version or because the Kafka principal used for Connect " + "internal topics does not have the required permission to " + "describe topic configurations.", topic, TopicConfig.CLEANUP_POLICY_COMPACT); return false; } Set<String> expectedPolicies = Collections.singleton(TopicConfig.CLEANUP_POLICY_COMPACT); if (!cleanupPolicies.equals(expectedPolicies)) { String expectedPolicyStr = String.join(",", expectedPolicies); String cleanupPolicyStr = String.join(",", cleanupPolicies); String msg = String.format("Topic '%s' supplied via the '%s' property is required " + "to have '%s=%s' to guarantee consistency and durability of " + "%s, but found the topic currently has '%s=%s'. Continuing would likely " + "result in eventually losing %s and problems restarting this Connect " + "cluster in the future. Change the '%s' property in the " + "Connect worker configurations to use a topic with '%s=%s'.", topic, workerTopicConfig, TopicConfig.CLEANUP_POLICY_CONFIG, expectedPolicyStr, topicPurpose, TopicConfig.CLEANUP_POLICY_CONFIG, cleanupPolicyStr, topicPurpose, workerTopicConfig, TopicConfig.CLEANUP_POLICY_CONFIG, expectedPolicyStr); throw new ConfigException(msg); } return true; }
@Test public void verifyingTopicCleanupPolicyShouldReturnTrueWhenTopicHasCorrectPolicy() { String topicName = "myTopic"; Map<String, String> topicConfigs = Collections.singletonMap("cleanup.policy", "compact"); Cluster cluster = createCluster(1); try (MockAdminClient mockAdminClient = new MockAdminClient(cluster.nodes(), cluster.nodeById(0))) { TopicPartitionInfo topicPartitionInfo = new TopicPartitionInfo(0, cluster.nodeById(0), cluster.nodes(), Collections.emptyList()); mockAdminClient.addTopic(false, topicName, Collections.singletonList(topicPartitionInfo), topicConfigs); TopicAdmin admin = new TopicAdmin(mockAdminClient); boolean result = admin.verifyTopicCleanupPolicyOnlyCompact("myTopic", "worker.topic", "purpose"); assertTrue(result); } }
public List<ArtifactResult> resolve(List<Dependency> dependencies) throws DependencyResolutionException, ArtifactResolutionException { if (dependencies.size() == 0) { return Collections.EMPTY_LIST; } CollectRequest collectRequest = new CollectRequest(); collectRequest.setRoot(dependencies.get(0)); for (int idx = 1; idx < dependencies.size(); idx++) { collectRequest.addDependency(dependencies.get(idx)); } for (RemoteRepository repository : remoteRepositories) { collectRequest.addRepository(repository); } DependencyFilter classpathFilter = DependencyFilterUtils .classpathFilter(JavaScopes.COMPILE, JavaScopes.RUNTIME); DependencyRequest dependencyRequest = new DependencyRequest(collectRequest, classpathFilter); return system.resolveDependencies(session, dependencyRequest).getArtifactResults(); }
@Test public void resolveValid() throws Exception { // please pick small artifact which has small transitive dependency // and let's mark as Ignore if we want to run test even without internet or maven central is often not stable Dependency dependency = new Dependency(new DefaultArtifact("org.apache.storm:flux-core:1.0.0"), JavaScopes.COMPILE); List<ArtifactResult> results = sut.resolve(Lists.newArrayList(dependency)); assertTrue(results.size() > 0); // it should be org.apache.storm:flux-core:jar:1.0.0 and commons-cli:commons-cli:jar:1.2 assertContains(results, "org.apache.storm", "flux-core", "1.0.0"); assertContains(results, "commons-cli", "commons-cli", "1.2"); }
@Override public String toString() { return getClass().getSimpleName() + " barCount: " + barCount; }
@Test public void onlyNaNValues() { BaseBarSeries series = new BaseBarSeries("NaN test"); for (long i = 0; i <= 1000; i++) { series.addBar(ZonedDateTime.now().plusDays(i), NaN, NaN, NaN, NaN, NaN); } AroonUpIndicator aroonUpIndicator = new AroonUpIndicator(series, 5); for (int i = series.getBeginIndex(); i <= series.getEndIndex(); i++) { assertEquals(NaN.toString(), aroonUpIndicator.getValue(i).toString()); } }
public static long toSeconds(org.joda.time.Instant jodaInstant) { return jodaInstant.getMillis() / 1000; }
@Test public void testToSeconds() { assertEquals(1, TimestampConverter.toSeconds(org.joda.time.Instant.ofEpochSecond(1))); assertEquals(1000, TimestampConverter.toSeconds(org.joda.time.Instant.ofEpochSecond(1000))); }
@Override public ExecuteContext doAfter(ExecuteContext context) { final Object result = context.getResult(); if (result instanceof Boolean) { boolean health = (boolean) result; if (health) { RegisterContext.INSTANCE.compareAndSet(false, true); LOGGER.info("Nacos registry center recover healthy status!"); } else { RegisterContext.INSTANCE.compareAndSet(true, false); LOGGER.info("Nacos registry center may be unhealthy!"); } } return context; }
@Test public void doAfter() throws NoSuchMethodException { final NacosRpcClientHealthInterceptor interceptor = new NacosRpcClientHealthInterceptor(); final ExecuteContext context = buildContext(new Object()); interceptor.doAfter(context); Assert.assertFalse(context.isSkip()); final ExecuteContext context1 = buildContext(new Object()); context1.changeResult(Boolean.FALSE); interceptor.doAfter(context1); Assert.assertFalse(RegisterContext.INSTANCE.isAvailable()); final ExecuteContext context2 = buildContext(new Object()); context2.changeResult(Boolean.TRUE); interceptor.doAfter(context2); Assert.assertTrue(RegisterContext.INSTANCE.isAvailable()); }
public static Statement sanitize( final Statement node, final MetaStore metaStore) { return sanitize(node, metaStore, true); }
@Test public void shouldPreserveQualifierOnQualifiedColumnReference() { // Given: final Statement stmt = givenQuery("SELECT TEST1.COL0 FROM TEST1;"); // When: final Query result = (Query) AstSanitizer.sanitize(stmt, META_STORE); // Then: assertThat(result.getSelect(), is(new Select(ImmutableList.of( new SingleColumn( column(TEST1_NAME, "COL0"), Optional.of(ColumnName.of("COL0"))) )))); }
public Stream<Flow> keepLastVersion(Stream<Flow> stream) { return keepLastVersionCollector(stream); }
@Test void multipleFlow() { Stream<Flow> stream = Stream.of( create("test", "test", 2), create("test", "test2", 1), create("test2", "test2", 1), create("test2", "test3", 3), create("test3", "test1", 2), create("test3", "test2", 3) ); List<Flow> collect = flowService.keepLastVersion(stream).toList(); assertThat(collect.size(), is(3)); assertThat(collect.stream().filter(flow -> flow.getId().equals("test")).findFirst().orElseThrow().getRevision(), is(2)); assertThat(collect.stream().filter(flow -> flow.getId().equals("test2")).findFirst().orElseThrow().getRevision(), is(3)); assertThat(collect.stream().filter(flow -> flow.getId().equals("test3")).findFirst().orElseThrow().getRevision(), is(3)); }
public Optional<File> get(InstalledPlugin plugin) { // Does not fail if another process tries to create the directory at the same time. Path jarInCache = jarInCache(plugin.key, plugin.hash); if (Files.isRegularFile(jarInCache)) { return Optional.of(jarInCache.toFile()); } return download(plugin).map(Path::toFile); }
@Test void getPlugin_whenTimeOutReached_thenDownloadFails() { sonarqube.stubFor(get(anyUrl()) .willReturn(ok() .withFixedDelay(5000))); InstalledPlugin plugin = newInstalledPlugin("foo", "abc"); assertThatThrownBy(() -> underTest.get(plugin)) .isInstanceOf(IllegalStateException.class) .hasMessageStartingWith("Fail to request url") .cause().isInstanceOf(SocketTimeoutException.class); }
@Override public void remove(String key) { if (key == null) { return; } Map<String, String> current = readWriteThreadLocalMap.get(); if (current != null) { current.remove(key); nullifyReadOnlyThreadLocalMap(); } }
@Test public void removeForNullKeyTest() { mdcAdapter.remove(null); }
@VisibleForTesting protected Token<?>[] obtainSystemTokensForUser(String user, final Credentials credentials) throws IOException, InterruptedException { // Get new hdfs tokens on behalf of this user UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(user, UserGroupInformation.getLoginUser()); Token<?>[] newTokens = proxyUser.doAs(new PrivilegedExceptionAction<Token<?>[]>() { @Override public Token<?>[] run() throws Exception { FileSystem fs = FileSystem.get(getConfig()); try { return fs.addDelegationTokens( UserGroupInformation.getLoginUser().getUserName(), credentials); } finally { // Close the FileSystem created by the new proxy user, // So that we don't leave an entry in the FileSystem cache fs.close(); } } }); return newTokens; }
@Test public void testFSLeakInObtainSystemTokensForUser() throws Exception{ Credentials credentials = new Credentials(); String user = "test"; int oldCounter = MyFS.getInstanceCounter(); delegationTokenRenewer.obtainSystemTokensForUser(user, credentials); delegationTokenRenewer.obtainSystemTokensForUser(user, credentials); delegationTokenRenewer.obtainSystemTokensForUser(user, credentials); Assert.assertEquals(oldCounter, MyFS.getInstanceCounter()); }