focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static <Req extends MessagingRequest> Matcher<Req> channelKindEquals(String channelKind) { if (channelKind == null) throw new NullPointerException("channelKind == null"); if (channelKind.isEmpty()) throw new NullPointerException("channelKind is empty"); return new MessagingChannelKindEquals<Req>(channelKind); }
@Test void channelKindEquals_matched() { when(request.channelKind()).thenReturn("queue"); assertThat(channelKindEquals("queue").matches(request)).isTrue(); }
@Override public Schema getSourceSchema() { String registryUrl = getStringWithAltKeys(config, HoodieSchemaProviderConfig.SRC_SCHEMA_REGISTRY_URL); try { return parseSchemaFromRegistry(registryUrl); } catch (Exception e) { throw new HoodieSchemaFetchException(String.format( "Error reading source schema from registry. Please check %s is configured correctly. Truncated URL: %s", Config.SRC_SCHEMA_REGISTRY_URL_PROP, StringUtils.truncate(registryUrl, 10, 10)), e); } }
@Test public void testGetSourceSchemaShouldRequestSchemaWithoutCreds() throws Exception { TypedProperties props = getProps(); props.put("hoodie.deltastreamer.schemaprovider.registry.url", "http://localhost/subjects/test/versions/latest"); SchemaRegistryProvider underTest = getUnderTest(props, -1, true); Schema actual = underTest.getSourceSchema(); assertNotNull(actual); assertEquals(getExpectedConvertedSchema(), actual); verify(mockRestService, never()).setHttpHeaders(any()); }
public Properties extractProperties() { Properties properties = new Properties(); properties.setOwner(owner); properties.setAccessControl(accessControl); properties.setRunStrategy(runStrategy); properties.setStepConcurrency(stepConcurrency); properties.setAlerting(alerting); properties.setAlertingDisabled(alertingDisabled); properties.setSignalTriggerDisabled(signalTriggerDisabled); properties.setTimeTriggerDisabled(timeTriggerDisabled); properties.setDescription(description); properties.setTags(tags); return properties; }
@Test public void testExtractProperties() throws Exception { PropertiesSnapshot ps = loadObject( "fixtures/workflows/definition/sample-properties.json", PropertiesSnapshot.class); Properties extracted = ps.extractProperties(); assertEquals(ps.getOwner(), extracted.getOwner()); assertEquals(ps.getAccessControl(), extracted.getAccessControl()); assertEquals(ps.getRunStrategy(), extracted.getRunStrategy()); assertEquals(ps.getStepConcurrency(), extracted.getStepConcurrency()); assertEquals(ps.getAlerting(), extracted.getAlerting()); assertEquals(ps.getAlertingDisabled(), extracted.getAlertingDisabled()); assertEquals(ps.getSignalTriggerDisabled(), extracted.getSignalTriggerDisabled()); assertEquals(ps.getTimeTriggerDisabled(), extracted.getTimeTriggerDisabled()); assertEquals(ps.getDescription(), extracted.getDescription()); assertEquals(ps.getTags(), extracted.getTags()); }
@Override public HostInfo getHostInfo(ApplicationInstanceReference reference, HostName hostName) { return hostInfosCache.getHostInfos(reference).getOrNoRemarks(hostName); }
@Test public void host_state_for_unknown_hosts_is_no_remarks() { assertEquals(HostStatus.NO_REMARKS, statusService.getHostInfo(TestIds.APPLICATION_INSTANCE_REFERENCE, TestIds.HOST_NAME1).status()); }
@Override public V computeIfAbsent(String key, Function<? super String, ? extends V> mappingFunction) { return Map.super.computeIfAbsent(key.toLowerCase(), mappingFunction); }
@Test void computeIfAbsent() { Map<String, Object> map = new LowerCaseLinkHashMap<>(lowerCaseLinkHashMap); Object result = map.computeIfAbsent("key", String::toUpperCase); Assertions.assertEquals("Value", result); Assertions.assertEquals("Value", map.get("key")); result = map.computeIfAbsent("computeIfAbsent", String::toUpperCase); Assertions.assertEquals("COMPUTEIFABSENT", result); Assertions.assertEquals("COMPUTEIFABSENT", map.get("computeIfAbsent")); }
public static <T> PCollection<T> getSingletonMainInput( AppliedPTransform<? extends PCollection<? extends T>, ?, ?> application) { return getSingletonMainInput( application.getInputs(), application.getTransform().getAdditionalInputs().keySet()); }
@Test public void getMainInputExtraMainInputsThrows() { PCollection<Long> notInParDo = pipeline.apply("otherPCollection", Create.of(1L, 2L, 3L)); ImmutableMap<TupleTag<?>, PCollection<?>> inputs = ImmutableMap.<TupleTag<?>, PCollection<?>>builder() .putAll(PValues.expandInput(mainInput)) // Not represnted as an input .put(new TupleTag<Long>(), notInParDo) .put(sideInput.getTagInternal(), sideInput.getPCollection()) .build(); AppliedPTransform<PCollection<Long>, ?, ?> application = AppliedPTransform.of( "application", inputs, Collections.singletonMap(new TupleTag<Long>(), output), ParDo.of(new TestDoFn()).withSideInputs(sideInput), ResourceHints.create(), pipeline); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("multiple inputs"); thrown.expectMessage("not additional inputs"); thrown.expectMessage(mainInput.toString()); thrown.expectMessage(notInParDo.toString()); PTransformReplacements.getSingletonMainInput(application); }
public FEELFnResult<Boolean> invoke(@ParameterName("negand") Object negand) { if ( negand != null && !(negand instanceof Boolean) ) { return FEELFnResult.ofError( new InvalidParametersEvent( Severity.ERROR, "negand", "must be a boolean value" ) ); } return FEELFnResult.ofResult( negand == null ? null : !((Boolean) negand) ); }
@Test void invokeTrue() { FunctionTestUtil.assertResult(notFunction.invoke(true), false); }
public static String makeUnique(String className) { final Matcher m = UNIQUE_NAMING_PATTERN.matcher(className); if (m.matches()) { // get the current number final Integer number = Integer.parseInt(m.group(2)); // replace the current number in the string with the number +1 return m.group(1) + (number + 1); } else { return className + "__1"; } }
@Test public void testClassNameStrategy() { assertThat(MakeUniqueClassName.makeUnique("NodeMode"), equalTo("NodeMode__1")); assertThat(MakeUniqueClassName.makeUnique("NodeMode__5"), equalTo("NodeMode__6")); assertThat(MakeUniqueClassName.makeUnique("NodeMode__10"), equalTo("NodeMode__11")); assertThat(MakeUniqueClassName.makeUnique("NodeMode__100"), equalTo("NodeMode__101")); }
@Override public ResultSet getImportedKeys(final String catalog, final String schema, final String table) throws SQLException { return createDatabaseMetaDataResultSet(getDatabaseMetaData().getImportedKeys(getActualCatalog(catalog), getActualSchema(schema), getActualTable(getActualCatalog(catalog), table))); }
@Test void assertGetImportedKeys() throws SQLException { when(databaseMetaData.getImportedKeys("test", null, null)).thenReturn(resultSet); assertThat(shardingSphereDatabaseMetaData.getImportedKeys("test", null, null), instanceOf(DatabaseMetaDataResultSet.class)); }
public void unlock() { if (log.isDebugEnabled()) { owner = null; position = null; log.debug(">>> Lock {} released at {}", this.hashCode(), Thread.currentThread().getStackTrace()[2]); } semaphore.release(); }
@Test(enabled = false) public void unlock() { ProtectedCode pc = new ProtectedCode(); // Spawn many threads and start them all, all executing the same protected code // Not entirely sure if this models the main use case or idea. int i = 0; while (i < numberOfThreads) { (new Thread(pc)).start(); i++; } try { // To ensure that all threads have started Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); } // All threads did start Assert.assertEquals(counter, numberOfThreads); }
Set<Integer> changedLines() { return tracker.changedLines(); }
@Test public void compute_from_multiple_hunks() throws IOException { String example = "diff --git a/lao b/lao\n" + "index 635ef2c..5af88a8 100644\n" + "--- a/lao\n" + "+++ b/lao\n" + "@@ -1,7 +1,6 @@\n" + "-The Way that can be told of is not the eternal Way;\n" + "-The name that can be named is not the eternal name.\n" + " The Nameless is the origin of Heaven and Earth;\n" + "-The Named is the mother of all things.\n" + "+The named is the mother of all things.\n" + "+\n" + " Therefore let there always be non-being,\n" + " so we may see their subtlety,\n" + " And let there always be being,\n" + "@@ -9,3 +8,6 @@ And let there always be being,\n" + " The two are the same,\n" + " But after they are produced,\n" + " they have different names.\n" + "+They both may be called deep and profound.\n" + "+Deeper and more profound,\n" + "+The door of all subtleties!\n"; printDiff(example); assertThat(underTest.changedLines()).containsExactly(2, 3, 11, 12, 13); }
public Row() {}
@Test public void testRow() { Row r1 = new Row(); r1.addField(new Field(AccumuloRowSerializer.getBlockFromArray(VARCHAR, ImmutableList.of("a", "b", "c")), new ArrayType(VARCHAR))); r1.addField(true, BOOLEAN); r1.addField(new Field(new Date(new GregorianCalendar(1999, 0, 1).getTime().getTime()), DATE)); r1.addField(123.45678, DOUBLE); r1.addField(new Field(123.45678f, REAL)); r1.addField(12345678, INTEGER); r1.addField(new Field(12345678L, BIGINT)); r1.addField(new Field((short) 12345, SMALLINT)); r1.addField(new GregorianCalendar(1999, 0, 1, 12, 30, 0).getTime().getTime(), TIME); r1.addField(new Field(new Timestamp(new GregorianCalendar(1999, 0, 1, 12, 30, 0).getTime().getTime()), TIMESTAMP)); r1.addField((byte) 123, TINYINT); r1.addField(new Field("O'Leary".getBytes(UTF_8), VARBINARY)); r1.addField("O'Leary", VARCHAR); r1.addField(null, VARCHAR); assertEquals(r1.length(), 14); assertEquals(r1.toString(), "(ARRAY ['a','b','c'],true,DATE '1999-01-01',123.45678,123.45678,12345678,12345678,12345,TIME '12:30:00',TIMESTAMP '1999-01-01 12:30:00.0',123,CAST('O''Leary' AS VARBINARY),'O''Leary',null)"); Row r2 = new Row(r1); assertEquals(r2, r1); }
static Counter getJobCounter() { return JOB_COUNTER; }
@Test public void testGetJobCounter() { assertNotNull("getJobCounter", JobGlobalListener.getJobCounter()); }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ClusterStateChange<?> that = (ClusterStateChange<?>) o; if (!type.equals(that.type)) { return false; } return newState.equals(that.newState); }
@Test public void testEquals() { assertEquals(clusterStateChange, clusterStateChange); assertEquals(clusterStateChange, clusterStateChangeSameAttributes); assertNotEquals(null, clusterStateChange); assertNotEquals(new Object(), clusterStateChange); assertNotEquals(clusterStateChange, clusterStateChangeOtherType); assertNotEquals(clusterStateChange, clusterStateChangeOtherNewState); }
public CompletableFuture<Void> scheduleDelayedNotification(final Account account, final Device device, final Duration minDelay) { return pushSchedulingCluster.withCluster(connection -> connection.async().zadd(getDelayedNotificationQueueKey(account, device), clock.instant().plus(minDelay).toEpochMilli(), encodeAciAndDeviceId(account, device))) .thenRun(() -> Metrics.counter(DELAYED_NOTIFICATION_SCHEDULED_COUNTER_NAME, TOKEN_TYPE_TAG, getTokenType(device)) .increment()) .toCompletableFuture(); }
@Test void testScheduleDelayedNotification() { clock.pin(Instant.now()); assertEquals(Optional.empty(), pushNotificationScheduler.getNextScheduledDelayedNotificationTimestamp(account, device)); pushNotificationScheduler.scheduleDelayedNotification(account, device, Duration.ofMinutes(1)).join(); assertEquals(Optional.of(clock.instant().truncatedTo(ChronoUnit.MILLIS).plus(Duration.ofMinutes(1))), pushNotificationScheduler.getNextScheduledDelayedNotificationTimestamp(account, device)); pushNotificationScheduler.scheduleDelayedNotification(account, device, Duration.ofMinutes(2)).join(); assertEquals(Optional.of(clock.instant().truncatedTo(ChronoUnit.MILLIS).plus(Duration.ofMinutes(2))), pushNotificationScheduler.getNextScheduledDelayedNotificationTimestamp(account, device)); }
public static String formatSql(final AstNode root) { final StringBuilder builder = new StringBuilder(); new Formatter(builder).process(root, 0); return StringUtils.stripEnd(builder.toString(), "\n"); }
@Test public void shouldFormatSelectStarCorrectlyWithJoinOneSidedStar() { final String statementString = "CREATE STREAM S AS SELECT address.*, itemid.ordertime " + "FROM address INNER JOIN itemid ON address.address = itemid.address->address;"; final Statement statement = parseSingle(statementString); assertThat(SqlFormatter.formatSql(statement), equalTo("CREATE STREAM S AS SELECT\n" + " ADDRESS.*,\n" + " ITEMID.ORDERTIME\n" + "FROM ADDRESS ADDRESS\n" + "INNER JOIN ITEMID ITEMID ON ((ADDRESS.ADDRESS = ITEMID.ADDRESS->ADDRESS))\n" + "EMIT CHANGES")); }
static java.sql.Date parseSqlDate(final String value) { try { // JDK format in Date.valueOf is compatible with DATE_FORMAT return java.sql.Date.valueOf(value); } catch (IllegalArgumentException e) { return throwRuntimeParseException(value, new ParseException(value, 0), SQL_DATE_FORMAT); } }
@Test public void testSqlDateFailsForInvalidDate() { // Given String invalidDate = "Trust me, I am a date"; // When assertThatThrownBy(() -> DateHelper.parseSqlDate(invalidDate)) .isInstanceOf(RuntimeException.class) .hasCauseInstanceOf(ParseException.class); }
<T extends PipelineOptions> T as(Class<T> iface) { checkNotNull(iface); checkArgument(iface.isInterface(), "Not an interface: %s", iface); T existingOption = computedProperties.interfaceToProxyCache.getInstance(iface); if (existingOption == null) { synchronized (this) { // double check existingOption = computedProperties.interfaceToProxyCache.getInstance(iface); if (existingOption == null) { Registration<T> registration = PipelineOptionsFactory.CACHE .get() .validateWellFormed(iface, computedProperties.knownInterfaces); List<PropertyDescriptor> propertyDescriptors = registration.getPropertyDescriptors(); Class<T> proxyClass = registration.getProxyClass(); existingOption = InstanceBuilder.ofType(proxyClass) .fromClass(proxyClass) .withArg(InvocationHandler.class, this) .build(); computedProperties = computedProperties.updated(iface, existingOption, propertyDescriptors); } } } return existingOption; }
@Test public void testPartialMethodConflictProvidesSameValue() throws Exception { ProxyInvocationHandler handler = new ProxyInvocationHandler(Maps.newHashMap()); PartialMethodConflict partialMethodConflict = handler.as(PartialMethodConflict.class); // Tests overriding a getter property that is only partially bound partialMethodConflict.setString("conflictValue"); assertEquals("conflictValue", partialMethodConflict.getString()); assertEquals("conflictValue", partialMethodConflict.as(Simple.class).getString()); // Tests overriding a setter property that is only partially bound partialMethodConflict.setPrimitive(5); assertEquals(5, partialMethodConflict.getPrimitive()); assertEquals(5, partialMethodConflict.as(Simple.class).getPrimitive()); }
public boolean isValid(String value) { if (value == null) { return false; } URI uri; // ensure value is a valid URI try { uri = new URI(value); } catch (URISyntaxException e) { return false; } // OK, perfom additional validation String scheme = uri.getScheme(); if (!isValidScheme(scheme)) { return false; } String authority = uri.getRawAuthority(); if ("file".equals(scheme) && (authority == null || "".equals(authority))) { // Special case - file: allows an empty authority return true; // this is a local file - nothing more to do here } else if ("file".equals(scheme) && authority != null && authority.contains(":")) { return false; } else { // Validate the authority if (!isValidAuthority(authority)) { return false; } } if (!isValidPath(uri.getRawPath())) { return false; } if (!isValidQuery(uri.getRawQuery())) { return false; } if (!isValidFragment(uri.getRawFragment())) { return false; } return true; }
@Test public void testValidator391FAILS() { String[] schemes = {"file"}; UrlValidator urlValidator = new UrlValidator(schemes); assertTrue(urlValidator.isValid("file:/C:/path/to/dir/")); }
@Override public void put(PageId pageId, ByteBuffer page, boolean isTemporary) throws IOException { Callable<Void> callable = () -> { mPageStore.put(pageId, page, isTemporary); return null; }; try { mTimeLimter.callWithTimeout(callable, mTimeoutMs, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { // Task got cancelled by others, interrupt the current thread // and then throw a runtime ex to make the higher level stop. Thread.currentThread().interrupt(); throw new RuntimeException(e); } catch (TimeoutException e) { Metrics.STORE_PUT_TIMEOUT.inc(); throw new IOException(e); } catch (RejectedExecutionException e) { Metrics.STORE_THREADS_REJECTED.inc(); throw new IOException(e); } catch (ExecutionException e) { Throwables.propagateIfPossible(e.getCause(), ResourceExhaustedException.class, IOException.class); throw new IOException(e); } catch (Throwable t) { Throwables.propagateIfPossible(t, IOException.class); throw new IOException(t); } }
@Test public void put() throws Exception { mTimeBoundPageStore.put(PAGE_ID, PAGE); assertEquals(PAGE.length, mPageStore.get(PAGE_ID, 0, PAGE.length, new ByteArrayTargetBuffer(mBuf, 0))); assertArrayEquals(PAGE, mBuf); }
public LoginResponse login(LoginRequest request) { User user = userRepository.findByIdentificationNumber(request.getIdentificationNumber()).orElseThrow(() ->GenericException.builder() .httpStatus(HttpStatus.NOT_FOUND) .logMessage(this.getClass().getName() + ".login user not found with identification number {0}", request.getIdentificationNumber() ) .message(ErrorCode.USER_NOT_FOUND) .build()); passwordEncoder.matches(request.getPassword(),user.getPassword()); return LoginResponse.builder().token(authService.createToken(user)).build(); }
@Test void login_userNotFound() { // Arrange LoginRequest request = new LoginRequest("1234567890", "password"); when(userRepository.findByIdentificationNumber(request.getIdentificationNumber())).thenReturn(Optional.empty()); // Act & Assert assertThrows(GenericException.class, () -> userService.login(request)); }
public void formatSource(CharSource input, CharSink output) throws FormatterException, IOException { // TODO(cushon): proper support for streaming input/output. Input may // not be feasible (parsing) but output should be easier. output.write(formatSource(input.read())); }
@Test public void blankInClassBodyNoTrailing() throws FormatterException { String input = "package test;\nclass T {\n\n}"; String output = new Formatter().formatSource(input); String expect = "package test;\n\nclass T {}\n"; assertThat(output).isEqualTo(expect); }
@Override public V get() throws InterruptedException, ExecutionException { return resolve(super.get()); }
@Test public void test_get_Data() throws Exception { Object value = "value"; DeserializingCompletableFuture<Object> future = new DeserializingCompletableFuture<>(serializationService, deserialize); future.complete(serializationService.toData(value)); if (deserialize) { assertEquals(value, future.get()); } else { assertEquals(serializationService.toData(value), future.get()); } }
@Override public void createNetworkPolicy(NetworkPolicy networkPolicy) { checkNotNull(networkPolicy, ERR_NULL_NETWORK_POLICY); checkArgument(!Strings.isNullOrEmpty(networkPolicy.getMetadata().getUid()), ERR_NULL_NETWORK_POLICY_UID); k8sNetworkPolicyStore.createNetworkPolicy(networkPolicy); log.info(String.format(MSG_NETWORK_POLICY, networkPolicy.getMetadata().getName(), MSG_CREATED)); }
@Test(expected = IllegalArgumentException.class) public void testCreateDuplicatedNetworkPolicy() { target.createNetworkPolicy(NETWORK_POLICY); target.createNetworkPolicy(NETWORK_POLICY); }
public byte[] getAddressBytes() { return inetSocketAddress.getAddress().getAddress(); }
@Test public void testGetAddressBytes() throws Exception { final InetSocketAddress inetSocketAddress = new InetSocketAddress(Inet4Address.getLoopbackAddress(), 12345); final ResolvableInetSocketAddress address = new ResolvableInetSocketAddress(inetSocketAddress); assertThat(address.getAddressBytes()).isEqualTo(inetSocketAddress.getAddress().getAddress()); }
static URL[] constructUrlsFromClasspath(String classpath) throws MalformedURLException { List<URL> urls = new ArrayList<URL>(); for (String element : classpath.split(File.pathSeparator)) { if (element.endsWith(File.separator + "*")) { List<Path> jars = FileUtil.getJarsInDirectory(element); if (!jars.isEmpty()) { for (Path jar: jars) { urls.add(jar.toUri().toURL()); } } } else { File file = new File(element); if (file.exists()) { urls.add(new File(element).toURI().toURL()); } } } return urls.toArray(new URL[urls.size()]); }
@Test public void testConstructUrlsFromClasspath() throws Exception { File file = new File(testDir, "file"); assertTrue("Create file", file.createNewFile()); File dir = new File(testDir, "dir"); assertTrue("Make dir", dir.mkdir()); File jarsDir = new File(testDir, "jarsdir"); assertTrue("Make jarsDir", jarsDir.mkdir()); File nonJarFile = new File(jarsDir, "nonjar"); assertTrue("Create non-jar file", nonJarFile.createNewFile()); File jarFile = new File(jarsDir, "a.jar"); assertTrue("Create jar file", jarFile.createNewFile()); File nofile = new File(testDir, "nofile"); // don't create nofile StringBuilder cp = new StringBuilder(); cp.append(file.getAbsolutePath()).append(File.pathSeparator) .append(dir.getAbsolutePath()).append(File.pathSeparator) .append(jarsDir.getAbsolutePath() + "/*").append(File.pathSeparator) .append(nofile.getAbsolutePath()).append(File.pathSeparator) .append(nofile.getAbsolutePath() + "/*").append(File.pathSeparator); URL[] urls = constructUrlsFromClasspath(cp.toString()); assertEquals(3, urls.length); assertEquals(file.toURI().toURL(), urls[0]); assertEquals(dir.toURI().toURL(), urls[1]); assertEquals(jarFile.toURI().toURL(), urls[2]); // nofile should be ignored }
public static String addKeySuffixes(String key, String... suffixes) { String keySuffix = DFSUtilClient.concatSuffixes(suffixes); return DFSUtilClient.addSuffix(key, keySuffix); }
@Test public void testConfModificationFederationOnly() { final HdfsConfiguration conf = new HdfsConfiguration(); String nsId = "ns1"; conf.set(DFS_NAMESERVICES, nsId); conf.set(DFS_NAMESERVICE_ID, nsId); // Set the nameservice specific keys with nameserviceId in the config key for (String key : NameNode.NAMENODE_SPECIFIC_KEYS) { // Note: value is same as the key conf.set(DFSUtil.addKeySuffixes(key, nsId), key); } // Initialize generic keys from specific keys NameNode.initializeGenericKeys(conf, nsId, null); // Retrieve the keys without nameserviceId and Ensure generic keys are set // to the correct value for (String key : NameNode.NAMENODE_SPECIFIC_KEYS) { assertEquals(key, conf.get(key)); } }
@Override public void commit() throws IOException { LOGGER.trace("Committing recoverable with options {}: {}", options, recoverable); // see discussion: https://github.com/apache/flink/pull/15599#discussion_r623127365 // first, make sure the final blob doesn't already exist Optional<GSBlobStorage.BlobMetadata> blobMetadata = storage.getMetadata(recoverable.finalBlobIdentifier); if (blobMetadata.isPresent()) { throw new IOException( String.format( "Blob %s already exists during attempted commit", recoverable.finalBlobIdentifier)); } // write the final blob writeFinalBlob(); // clean up after successful commit cleanupTemporaryBlobs(); }
@Test(expected = IOException.class) public void commitOverwriteShouldFailTest() throws IOException { blobStorage.createBlob(blobIdentifier); GSRecoverableWriterCommitter committer = commitTestInternal(); committer.commit(); }
public String convert(ILoggingEvent le) { List<Marker> markers = le.getMarkers(); if (markers == null || markers.isEmpty()) { return EMPTY; } else { return markers.toString(); } }
@Test public void testWithOneChildMarker() { Marker marker = markerFactory.getMarker("test"); marker.add(markerFactory.getMarker("child")); String result = converter.convert(createLoggingEvent(marker)); assertEquals("[test [ child ]]", result); }
protected TaskConfig buildTaskConfig(TaskConfig config) { TaskConfig taskExecConfig = new TaskConfig(); for (Property property : config.list()) { taskExecConfig.add(getExecProperty(config, property)); } return taskExecConfig; }
@Test public void shouldReturnConfigValueInExecConfig() { TaskConfig defaultTaskConfig = new TaskConfig(); String propertyName = "URL"; String defaultValue = "ABC.TXT"; HashMap<String, String> configValue = new HashMap<>(); configValue.put("value", "XYZ.TXT"); Map<String, Map<String, String>> configMap = new HashMap<>(); configMap.put(propertyName, configValue); PluggableTask task = mock(PluggableTask.class); when(task.getPluginConfiguration()).thenReturn(new PluginConfiguration()); when(task.configAsMap()).thenReturn(configMap); PluggableTaskBuilder taskBuilder = new PluggableTaskBuilder(runIfConfigs, cancelBuilder, task, TEST_PLUGIN_ID, "test-directory"); defaultTaskConfig.addProperty(propertyName).withDefault(defaultValue); TaskConfig config = taskBuilder.buildTaskConfig(defaultTaskConfig); assertThat(config.getValue(propertyName)).isEqualTo(configValue.get("value")); }
public static String notEmpty(String str, String name) { if (str == null) { throw new IllegalArgumentException(name + " cannot be null"); } if (str.length() == 0) { throw new IllegalArgumentException(name + " cannot be empty"); } return str; }
@Test(expected = IllegalArgumentException.class) public void notEmptyNull() { Check.notEmpty(null, "name"); }
@SuppressWarnings("unchecked") @Override public boolean setFlushListener(final CacheFlushListener<Windowed<K>, V> listener, final boolean sendOldValues) { final WindowStore<Bytes, byte[]> wrapped = wrapped(); if (wrapped instanceof CachedStateStore) { return ((CachedStateStore<byte[], byte[]>) wrapped).setFlushListener( record -> listener.apply( record.withKey(WindowKeySchema.fromStoreKey(record.key(), windowSizeMs, serdes.keyDeserializer(), serdes.topic())) .withValue(new Change<>( record.value().newValue != null ? serdes.valueFrom(record.value().newValue) : null, record.value().oldValue != null ? serdes.valueFrom(record.value().oldValue) : null, record.value().isLatest )) ), sendOldValues); } return false; }
@SuppressWarnings("unchecked") @Test public void shouldSetFlushListenerOnWrappedCachingStore() { final CachedWindowStore cachedWindowStore = mock(CachedWindowStore.class); when(cachedWindowStore.setFlushListener(any(CacheFlushListener.class), eq(false))).thenReturn(true); final MeteredWindowStore<String, String> metered = new MeteredWindowStore<>( cachedWindowStore, 10L, // any size STORE_TYPE, new MockTime(), Serdes.String(), new SerdeThatDoesntHandleNull() ); assertTrue(metered.setFlushListener(null, false)); }
@Override protected void runTask() { LOGGER.trace("Looking for succeeded jobs that can go to the deleted state... "); final Instant updatedBefore = now().minus(backgroundJobServerConfiguration().getDeleteSucceededJobsAfter()); processManyJobs(previousResults -> getSucceededJobs(updatedBefore, previousResults), job -> job.delete("JobRunr maintenance - deleting succeeded job"), this::handleTotalAmountOfSucceededJobs); }
@Test void taskMovesSucceededJobsToDeletedStateAlsoForInterfacesWithMethodsThatDontExistAnymore() { // GIVEN lenient().when(storageProvider.getJobList(eq(SUCCEEDED), any(Instant.class), any())) .thenReturn( asList(aSucceededJob() .withJobDetails(jobDetails() .withClassName(TestServiceInterface.class) .withMethodName("methodThatDoesNotExist") .build()) .build()), emptyJobList() ); // WHEN runTask(task); // THEN assertThat(logger).hasNoWarnLogMessages(); verify(storageProvider).save(anyList()); verify(storageProvider).publishTotalAmountOfSucceededJobs(1); }
@Udf(description = "Returns the portion of str from pos to the end of str") public String substring( @UdfParameter(description = "The source string.") final String str, @UdfParameter(description = "The base-one position to start from.") final Integer pos ) { if (str == null || pos == null) { return null; } final int start = getStartIndex(str.length(), pos); return str.substring(start); }
@Test public void shouldUseOneBasedIndexingOnString() { assertThat(udf.substring("a test string", 1, 1), is("a")); assertThat(udf.substring("a test string", -1, 1), is("g")); }
@Override public void shutdown() throws NacosException { String className = this.getClass().getName(); LOGGER.info("{} do shutdown begin", className); if (agent != null) { agent.shutdown(); } LOGGER.info("{} do shutdown stop", className); }
@Test void testShutdown() throws NacosException, NoSuchFieldException, IllegalAccessException { Properties prop = new Properties(); ConfigFilterChainManager filter = new ConfigFilterChainManager(new Properties()); ServerListManager agent = Mockito.mock(ServerListManager.class); final NacosClientProperties nacosClientProperties = NacosClientProperties.PROTOTYPE.derive(prop); ClientWorker clientWorker = new ClientWorker(filter, agent, nacosClientProperties); clientWorker.shutdown(); Field agent1 = ClientWorker.class.getDeclaredField("agent"); agent1.setAccessible(true); ConfigTransportClient o = (ConfigTransportClient) agent1.get(clientWorker); assertTrue(o.executor.isShutdown()); agent1.setAccessible(false); assertNull(clientWorker.getAgentName()); }
public boolean isNotify() { String notify = getHeader(Constants.Config.NOTIFY_HEADER, Boolean.FALSE.toString()); return Boolean.parseBoolean(notify); }
@Test void testIsNotify() { assertTrue(configQueryRequest.isNotify()); }
Object getCellValue(Cell cell, Schema.FieldType type) { ByteString cellValue = cell.getValue(); int valueSize = cellValue.size(); switch (type.getTypeName()) { case BOOLEAN: checkArgument(valueSize == 1, message("Boolean", 1)); return cellValue.toByteArray()[0] != 0; case BYTE: checkArgument(valueSize == 1, message("Byte", 1)); return cellValue.toByteArray()[0]; case INT16: checkArgument(valueSize == 2, message("Int16", 2)); return Shorts.fromByteArray(cellValue.toByteArray()); case INT32: checkArgument(valueSize == 4, message("Int32", 4)); return Ints.fromByteArray(cellValue.toByteArray()); case INT64: checkArgument(valueSize == 8, message("Int64", 8)); return Longs.fromByteArray(cellValue.toByteArray()); case FLOAT: checkArgument(valueSize == 4, message("Float", 4)); return Float.intBitsToFloat(Ints.fromByteArray(cellValue.toByteArray())); case DOUBLE: checkArgument(valueSize == 8, message("Double", 8)); return Double.longBitsToDouble(Longs.fromByteArray(cellValue.toByteArray())); case DATETIME: return DateTime.parse(cellValue.toStringUtf8()); case STRING: return cellValue.toStringUtf8(); case BYTES: return cellValue.toByteArray(); case LOGICAL_TYPE: String identifier = checkArgumentNotNull(type.getLogicalType()).getIdentifier(); throw new IllegalStateException("Unsupported logical type: " + identifier); default: throw new IllegalArgumentException( String.format("Unsupported cell value type '%s'.", type.getTypeName())); } }
@Test public void shouldParseBooleanTypeTrueNotOne() { byte[] value = new byte[] {1}; assertEquals(true, PARSER.getCellValue(cell(value), BOOLEAN)); }
@Override @ManagedOperation(description = "Adds the key to the store") public boolean add(String key) { return cache.putIfAbsent(key, false) == null; }
@Test void testAdd() { // add first key assertTrue(repo.add(key01)); assertTrue(cache.containsKey(key01)); // try to add the same key again assertFalse(repo.add(key01)); // try to add an other one assertTrue(repo.add(key02)); assertTrue(cache.containsKey(key02)); }
public static Connection fromProperties(Properties properties) { return fromProperties(properties, getDefault(properties)); }
@Test public void testConnectionTransport() { // Create properties Properties properties = new Properties(); properties.setProperty("brokerList", "127.0.0.1:1234,localhost:2345"); // Create the connection Connection connection = ConnectionFactory.fromProperties(properties); Assert.assertNotNull(connection.getTransport()); Assert.assertNotNull(connection.getTransport().getClientMetrics()); }
public void cancelUninstalls() { for (File file : listJarFiles(fs.getUninstalledPluginsDir())) { try { moveFileToDirectory(file, fs.getInstalledExternalPluginsDir(), false); } catch (IOException e) { throw new IllegalStateException("Fail to cancel plugin uninstalls", e); } } }
@Test public void cancel() throws IOException { File file = copyTestPluginTo("test-base-plugin", uninstallDir); assertThat(Files.list(uninstallDir.toPath())).extracting(p -> p.getFileName().toString()).containsOnly(file.getName()); underTest.cancelUninstalls(); }
@VisibleForTesting static IndexRange computeConsumedSubpartitionRange( int consumerSubtaskIndex, int numConsumers, Supplier<Integer> numOfSubpartitionsSupplier, boolean isDynamicGraph, boolean isBroadcast) { int consumerIndex = consumerSubtaskIndex % numConsumers; if (!isDynamicGraph) { return new IndexRange(consumerIndex, consumerIndex); } else { int numSubpartitions = numOfSubpartitionsSupplier.get(); if (isBroadcast) { // broadcast results have only one subpartition, and be consumed multiple times. checkArgument(numSubpartitions == 1); return new IndexRange(0, 0); } else { checkArgument(consumerIndex < numConsumers); checkArgument(numConsumers <= numSubpartitions); int start = consumerIndex * numSubpartitions / numConsumers; int nextStart = (consumerIndex + 1) * numSubpartitions / numConsumers; return new IndexRange(start, nextStart - 1); } } }
@Test void testComputeConsumedSubpartitionRange6to4() { final IndexRange range1 = computeConsumedSubpartitionRange(0, 4, 6); assertThat(range1).isEqualTo(new IndexRange(0, 0)); final IndexRange range2 = computeConsumedSubpartitionRange(1, 4, 6); assertThat(range2).isEqualTo(new IndexRange(1, 2)); final IndexRange range3 = computeConsumedSubpartitionRange(2, 4, 6); assertThat(range3).isEqualTo(new IndexRange(3, 3)); final IndexRange range4 = computeConsumedSubpartitionRange(3, 4, 6); assertThat(range4).isEqualTo(new IndexRange(4, 5)); }
@Override public Collection<EfestoOutput> evaluateInput(EfestoRuntimeContext context, EfestoInput... toEvaluate) { if (toEvaluate.length == 1) { // minor optimization for the (most typical) case with 1 input return getOptionalOutput(context, toEvaluate[0]).map(Collections::singletonList).orElse(Collections.emptyList()); } Collection<EfestoOutput> toReturn = new ArrayList<>(); for (EfestoInput efestoInput : toEvaluate) { getOptionalOutput(context, efestoInput).ifPresent(toReturn::add); } return toReturn; }
@Test void evaluateInput() { MANAGED_Efesto_INPUTS.forEach(managedInput -> { try { EfestoInput toProcess = managedInput.getDeclaredConstructor().newInstance(); Collection<EfestoOutput> retrieved = runtimeManager.evaluateInput(context, toProcess); assertThat(retrieved).isNotNull().hasSize(1); } catch (Exception e) { fail("Failed assertion on evaluateInput", e); } }); Collection<EfestoOutput> retrieved = runtimeManager.evaluateInput(context, new MockEfestoInputD()); assertThat(retrieved).isNotNull().isEmpty(); }
@Override @Nonnull public Set<Member> getMembers() { final Collection<Member> members = clusterService.getMemberList(); return new LinkedHashSet<>(members); }
@Test public void getMembers() { assertEquals(1, client().getCluster().getMembers().size()); }
@NonNull public static Permutor<FeedItem> getPermutor(@NonNull SortOrder sortOrder) { Comparator<FeedItem> comparator = null; Permutor<FeedItem> permutor = null; switch (sortOrder) { case EPISODE_TITLE_A_Z: comparator = (f1, f2) -> itemTitle(f1).compareTo(itemTitle(f2)); break; case EPISODE_TITLE_Z_A: comparator = (f1, f2) -> itemTitle(f2).compareTo(itemTitle(f1)); break; case DATE_OLD_NEW: comparator = (f1, f2) -> pubDate(f1).compareTo(pubDate(f2)); break; case DATE_NEW_OLD: comparator = (f1, f2) -> pubDate(f2).compareTo(pubDate(f1)); break; case DURATION_SHORT_LONG: comparator = (f1, f2) -> Integer.compare(duration(f1), duration(f2)); break; case DURATION_LONG_SHORT: comparator = (f1, f2) -> Integer.compare(duration(f2), duration(f1)); break; case EPISODE_FILENAME_A_Z: comparator = (f1, f2) -> itemLink(f1).compareTo(itemLink(f2)); break; case EPISODE_FILENAME_Z_A: comparator = (f1, f2) -> itemLink(f2).compareTo(itemLink(f1)); break; case FEED_TITLE_A_Z: comparator = (f1, f2) -> feedTitle(f1).compareTo(feedTitle(f2)); break; case FEED_TITLE_Z_A: comparator = (f1, f2) -> feedTitle(f2).compareTo(feedTitle(f1)); break; case RANDOM: permutor = Collections::shuffle; break; case SMART_SHUFFLE_OLD_NEW: permutor = (queue) -> smartShuffle(queue, true); break; case SMART_SHUFFLE_NEW_OLD: permutor = (queue) -> smartShuffle(queue, false); break; case SIZE_SMALL_LARGE: comparator = (f1, f2) -> Long.compare(size(f1), size(f2)); break; case SIZE_LARGE_SMALL: comparator = (f1, f2) -> Long.compare(size(f2), size(f1)); break; case COMPLETION_DATE_NEW_OLD: comparator = (f1, f2) -> f2.getMedia().getPlaybackCompletionDate() .compareTo(f1.getMedia().getPlaybackCompletionDate()); break; default: throw new IllegalArgumentException("Permutor not implemented"); } if (comparator != null) { final Comparator<FeedItem> comparator2 = comparator; permutor = (queue) -> Collections.sort(queue, comparator2); } return permutor; }
@Test public void testPermutorForRule_FEED_TITLE_ASC() { Permutor<FeedItem> permutor = FeedItemPermutors.getPermutor(SortOrder.FEED_TITLE_A_Z); List<FeedItem> itemList = getTestList(); assertTrue(checkIdOrder(itemList, 1, 3, 2)); // before sorting permutor.reorder(itemList); assertTrue(checkIdOrder(itemList, 1, 2, 3)); // after sorting }
@Override public boolean test(final Path test) { return this.equals(new DefaultPathPredicate(test)); }
@Test public void testPredicateTest() { final Path t = new Path("/f", EnumSet.of(Path.Type.file)); assertTrue(new DefaultPathPredicate(t).test(t)); assertFalse(new DefaultPathPredicate(t).test(new Path("/f/a", EnumSet.of(Path.Type.file)))); assertFalse(new DefaultPathPredicate(t).test(new Path("/f", EnumSet.of(Path.Type.directory)))); }
public Map<TopicPartition, Long> retryEndOffsets(Set<TopicPartition> partitions, Duration timeoutDuration, long retryBackoffMs) { try { return RetryUtil.retryUntilTimeout( () -> endOffsets(partitions), () -> "list offsets for topic partitions", timeoutDuration, retryBackoffMs); } catch (UnsupportedVersionException e) { // Older brokers don't support this admin method, so rethrow it without wrapping it throw e; } catch (Exception e) { throw ConnectUtils.maybeWrap(e, "Failed to list offsets for topic partitions"); } }
@Test public void retryEndOffsetsShouldRetryWhenTopicNotFound() { String topicName = "myTopic"; TopicPartition tp1 = new TopicPartition(topicName, 0); Set<TopicPartition> tps = Collections.singleton(tp1); Long offset = 1000L; Cluster cluster = createCluster(1, "myTopic", 1); try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(new MockTime(), cluster)) { Map<TopicPartition, Long> offsetMap = new HashMap<>(); offsetMap.put(tp1, offset); env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); env.kafkaClient().prepareResponse(prepareMetadataResponse(cluster, Errors.UNKNOWN_TOPIC_OR_PARTITION)); env.kafkaClient().prepareResponse(prepareMetadataResponse(cluster, Errors.NONE)); env.kafkaClient().prepareResponse(listOffsetsResult(tp1, offset)); TopicAdmin admin = new TopicAdmin(env.adminClient()); Map<TopicPartition, Long> endoffsets = admin.retryEndOffsets(tps, Duration.ofMillis(100), 1); assertEquals(Collections.singletonMap(tp1, offset), endoffsets); } }
public TemplateAnswerResponse getReviewDetail(long reviewId, String reviewRequestCode, String groupAccessCode) { ReviewGroup reviewGroup = reviewGroupRepository.findByReviewRequestCode(reviewRequestCode) .orElseThrow(() -> new ReviewGroupNotFoundByReviewRequestCodeException(reviewRequestCode)); if (!reviewGroup.matchesGroupAccessCode(groupAccessCode)) { throw new ReviewGroupUnauthorizedException(reviewGroup.getId()); } Review review = reviewRepository.findByIdAndReviewGroupId(reviewId, reviewGroup.getId()) .orElseThrow(() -> new ReviewNotFoundByIdAndGroupException(reviewId, reviewGroup.getId())); long templateId = review.getTemplateId(); List<Section> sections = sectionRepository.findAllByTemplateId(templateId); List<SectionAnswerResponse> sectionResponses = new ArrayList<>(); for (Section section : sections) { addSectionResponse(review, reviewGroup, section, sectionResponses); } return new TemplateAnswerResponse( templateId, reviewGroup.getReviewee(), reviewGroup.getProjectName(), review.getCreatedDate(), sectionResponses ); }
@Test void 잘못된_그룹_액세스_코드로_리뷰를_조회할_경우_예외를_발생한다() { // given String reviewRequestCode = "reviewRequestCode"; String groupAccessCode = "groupAccessCode"; ReviewGroup reviewGroup = reviewGroupRepository.save( new ReviewGroup("테드", "리뷰미 프로젝트", reviewRequestCode, groupAccessCode)); Review review = reviewRepository.save(new Review(0, reviewGroup.getId(), List.of(), List.of())); // when, then assertThatThrownBy(() -> reviewDetailLookupService.getReviewDetail( review.getId(), reviewRequestCode, "wrong" + groupAccessCode )).isInstanceOf(ReviewGroupUnauthorizedException.class); }
@Override public void notifyTopicSubscribed(final Subscription sub, final String username) { for (final InterceptHandler handler : this.handlers.get(InterceptSubscribeMessage.class)) { LOG.debug("Notifying MQTT SUBSCRIBE message to interceptor. CId={}, topicFilter={}, interceptorId={}", sub.getClientId(), sub.getTopicFilter(), handler.getID()); executor.execute(() -> handler.onSubscribe(new InterceptSubscribeMessage(sub, username))); } }
@Test public void testNotifyTopicSubscribed() throws Exception { interceptor.notifyTopicSubscribed(new Subscription("cli1", new Topic("o2"), MqttSubscriptionOption.onlyFromQos(MqttQoS.AT_MOST_ONCE)), "cli1234"); interval(); assertEquals(70, n.get()); }
@Nullable public static TNetworkAddress getComputeNodeHost(ImmutableMap<Long, ComputeNode> computeNodes, Reference<Long> computeNodeIdRef) { ComputeNode node = getComputeNode(computeNodes); if (node != null) { computeNodeIdRef.setRef(node.getId()); return new TNetworkAddress(node.getHost(), node.getBePort()); } return null; }
@Test public void testChooseComputeNodeConcurrently() throws InterruptedException { ImmutableMap.Builder<Long, ComputeNode> builder = ImmutableMap.builder(); for (int i = 0; i < 6; i++) { ComputeNode backend = new ComputeNode(i, "address" + i, 0); backend.setAlive(i == 0); builder.put(backend.getId(), backend); } ImmutableMap<Long, ComputeNode> nodes = builder.build(); List<Thread> threads = new ArrayList<>(); for (int i = 0; i < 4; i++) { Thread t = new Thread(() -> { for (int i1 = 0; i1 < 50; i1++) { Reference<Long> idRef = new Reference<>(); TNetworkAddress address = SimpleScheduler.getComputeNodeHost(nodes, idRef); Assert.assertNotNull(address); Assert.assertEquals("address0", address.hostname); } }); threads.add(t); } for (Thread t : threads) { t.start(); } for (Thread t : threads) { t.join(); } }
@Override protected int command() { if (!validateConfigFilePresent()) { return 1; } final MigrationConfig config; try { config = MigrationConfig.load(getConfigFile()); } catch (KsqlException | MigrationException e) { LOGGER.error(e.getMessage()); return 1; } return command( config, MigrationsUtil::getKsqlClient, getMigrationsDir(getConfigFile(), config), Clock.systemDefaultZone() ); }
@Test public void shouldNotFailIfFileDoesntFitFormat() throws Exception { // Given: command = PARSER.parse("-n"); createMigrationFile(1, NAME, migrationsDir, COMMAND); when(versionQueryResult.get()).thenReturn(ImmutableList.of()); // extra file that does not match expected format assertThat(new File(migrationsDir + "/foo.sql").createNewFile(), is(true)); // When: final int result = command.command(config, (cfg, headers) -> ksqlClient, migrationsDir, Clock.fixed( Instant.ofEpochMilli(1000), ZoneId.systemDefault())); // Then: assertThat(result, is(0)); final InOrder inOrder = inOrder(ksqlClient); verifyMigratedVersion(inOrder, 1, "<none>", MigrationState.MIGRATED); inOrder.verify(ksqlClient).close(); inOrder.verifyNoMoreInteractions(); }
public void validate(Map<String, NewDocumentType> documentDefinitions, Set<NewDocumentType> globallyDistributedDocuments) { verifyReferredDocumentsArePresent(documentDefinitions); verifyReferredDocumentsAreGlobal(documentDefinitions, globallyDistributedDocuments); }
@Test void throws_exception_if_referenced_document_not_global() { NewDocumentType parent = createDocumentType("parent"); Fixture fixture = new Fixture() .addNonGlobalDocument(parent) .addNonGlobalDocument(createDocumentType("child", parent)); try { validate(fixture); fail(); } catch (IllegalArgumentException e) { assertEquals("The following document types are referenced from other documents, but are not globally distributed: 'parent'", e.getMessage()); } }
@Override public double mean() { return mean; }
@Test public void testMean() { System.out.println("mean"); LogNormalDistribution instance = new LogNormalDistribution(1.0, 1.0); instance.rand(); assertEquals(4.481689, instance.mean(), 1E-7); }
public static RootExceptionHistoryEntry fromFailureHandlingResultSnapshot( FailureHandlingResultSnapshot snapshot) { String failingTaskName = null; TaskManagerLocation taskManagerLocation = null; if (snapshot.getRootCauseExecution().isPresent()) { final Execution rootCauseExecution = snapshot.getRootCauseExecution().get(); failingTaskName = rootCauseExecution.getVertexWithAttempt(); taskManagerLocation = rootCauseExecution.getAssignedResourceLocation(); } return createRootExceptionHistoryEntry( snapshot.getRootCause(), snapshot.getTimestamp(), snapshot.getFailureLabels(), failingTaskName, taskManagerLocation, snapshot.getConcurrentlyFailedExecution()); }
@Test void testFromFailureHandlingResultSnapshot() throws ExecutionException, InterruptedException { final Throwable rootException = new RuntimeException("Expected root failure"); final ExecutionVertex rootExecutionVertex = extractExecutionVertex(0); final long rootTimestamp = triggerFailure(rootExecutionVertex, rootException); final CompletableFuture<Map<String, String>> rootFailureLabels = CompletableFuture.completedFuture(Collections.singletonMap("key", "value")); final Throwable concurrentException1 = new IllegalStateException("Expected other failure1"); final ExecutionVertex concurrentlyFailedExecutionVertex1 = extractExecutionVertex(1); Predicate<ExceptionHistoryEntry> exception1Predicate = triggerFailureAndCreateEntryMatcher( concurrentException1, concurrentlyFailedExecutionVertex1); final FailureHandlingResultSnapshot snapshot = new FailureHandlingResultSnapshot( rootExecutionVertex.getCurrentExecutionAttempt(), rootException, rootTimestamp, rootFailureLabels, Collections.singleton( concurrentlyFailedExecutionVertex1.getCurrentExecutionAttempt()), true); final RootExceptionHistoryEntry actualEntry = RootExceptionHistoryEntry.fromFailureHandlingResultSnapshot(snapshot); assertThat(actualEntry) .matches( ExceptionHistoryEntryMatcher.matchesFailure( rootException, rootTimestamp, rootFailureLabels.get(), rootExecutionVertex.getTaskNameWithSubtaskIndex(), rootExecutionVertex.getCurrentAssignedResourceLocation())); assertThat(actualEntry.getConcurrentExceptions()).hasSize(1).allMatch(exception1Predicate); // Test for addConcurrentExceptions final Throwable concurrentException2 = new IllegalStateException("Expected other failure2"); final ExecutionVertex concurrentlyFailedExecutionVertex2 = extractExecutionVertex(2); Predicate<ExceptionHistoryEntry> exception2Predicate = triggerFailureAndCreateEntryMatcher( concurrentException2, concurrentlyFailedExecutionVertex2); actualEntry.addConcurrentExceptions( concurrentlyFailedExecutionVertex2.getCurrentExecutions()); assertThat(actualEntry.getConcurrentExceptions()) .hasSize(2) .allMatch( exceptionHistoryEntry -> exception1Predicate.test(exceptionHistoryEntry) || exception2Predicate.test(exceptionHistoryEntry)); }
SchemaTransformer delegate() { return transformer; }
@Test void shouldAcceptNullAsTransformConfiguration() { final SchemaTransformerFactory schemaTransformerFactory = new SchemaTransformerFactory(null); assertSame(SchemaTransformer.IDENTITY_TRANSFORMER, schemaTransformerFactory.delegate()); }
@Override public double getRatio() { if (misses.get() == 0) { if (hits.get() == 0) { return Double.NaN; } else { return Double.POSITIVE_INFINITY; } } else { return hits.doubleValue() / misses.doubleValue() * PERCENTAGE; } }
@Test public void testGetRatio_NaN() { NearCacheStatsImpl nearCacheStats = new NearCacheStatsImpl(); assertEquals(Double.NaN, nearCacheStats.getRatio(), 0.0001); }
public Optional<String> getType(Set<String> streamIds, String field) { final Map<String, Set<String>> allFieldTypes = this.get(streamIds); final Set<String> fieldTypes = allFieldTypes.get(field); return typeFromFieldType(fieldTypes); }
@Test void returnsEmptyOptionalIfStreamsAreEmpty() { final Pair<IndexFieldTypesService, StreamService> services = mockServices(); final FieldTypesLookup lookup = new FieldTypesLookup(services.getLeft(), services.getRight()); final Optional<String> result = lookup.getType(Collections.emptySet(), "somefield"); assertThat(result).isEmpty(); }
@VisibleForTesting public static JobGraph createJobGraph(StreamGraph streamGraph) { return new StreamingJobGraphGenerator( Thread.currentThread().getContextClassLoader(), streamGraph, null, Runnable::run) .createJobGraph(); }
@Test void testExchangeModeUndefined() { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); // fromElements -> Map -> Print DataStream<Integer> sourceDataStream = env.fromData(1, 2, 3); DataStream<Integer> partitionAfterSourceDataStream = new DataStream<>( env, new PartitionTransformation<>( sourceDataStream.getTransformation(), new ForwardPartitioner<>(), StreamExchangeMode.UNDEFINED)); DataStream<Integer> mapDataStream = partitionAfterSourceDataStream.map(value -> value).setParallelism(1); DataStream<Integer> partitionAfterMapDataStream = new DataStream<>( env, new PartitionTransformation<>( mapDataStream.getTransformation(), new RescalePartitioner<>(), StreamExchangeMode.UNDEFINED)); partitionAfterMapDataStream.print().setParallelism(2); JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(env.getStreamGraph()); List<JobVertex> verticesSorted = jobGraph.getVerticesSortedTopologicallyFromSources(); assertThat(verticesSorted).hasSize(2); // it can be chained with UNDEFINED exchange mode JobVertex sourceAndMapVertex = verticesSorted.get(0); // UNDEFINED exchange mode is translated into PIPELINED_BOUNDED result partition by default assertThat(sourceAndMapVertex.getProducedDataSets().get(0).getResultType()) .isEqualTo(ResultPartitionType.PIPELINED_BOUNDED); }
public static String getPath(ApplicationId id) { return getPath(id, false); }
@Test void testGetPathApplicationId() { assertEquals("/proxy/application_100_0001", ProxyUriUtils.getPath(BuilderUtils.newApplicationId(100l, 1))); assertEquals("/proxy/application_6384623_0005", ProxyUriUtils.getPath(BuilderUtils.newApplicationId(6384623l, 5))); }
public synchronized void register(MeshRuleListener listener) { if (listener == null) { return; } ConcurrentHashMapUtils.computeIfAbsent(listenerMap, listener.ruleSuffix(), (k) -> new ConcurrentHashSet<>()) .add(listener); }
@Test void register() { MeshRuleDispatcher meshRuleDispatcher = new MeshRuleDispatcher("TestApp"); MeshRuleListener listener1 = new MeshRuleListener() { @Override public void onRuleChange(String appName, List<Map<String, Object>> rules) {} @Override public void clearRule(String appName) {} @Override public String ruleSuffix() { return "Type1"; } }; meshRuleDispatcher.register(listener1); meshRuleDispatcher.register(listener1); Assertions.assertEquals( 1, meshRuleDispatcher.getListenerMap().get("Type1").size()); Assertions.assertTrue(meshRuleDispatcher.getListenerMap().get("Type1").contains(listener1)); }
@Override public JavaKeyStore load(SecureConfig config) { if (!exists(config)) { throw new SecretStoreException.LoadException( String.format("Can not find Logstash keystore at %s. Please verify this file exists and is a valid Logstash keystore.", config.getPlainText("keystore.file") == null ? "<undefined>" : new String(config.getPlainText("keystore.file")))); } try { init(config); lock.lock(); try (final InputStream is = Files.newInputStream(keyStorePath)) { try { keyStore.load(is, this.keyStorePass); } catch (IOException ioe) { if (ioe.getCause() instanceof UnrecoverableKeyException) { throw new SecretStoreException.AccessException( String.format("Can not access Logstash keystore at %s. Please verify correct file permissions and keystore password.", keyStorePath.toAbsolutePath()), ioe); } else { throw new SecretStoreException.LoadException(String.format("Found a file at %s, but it is not a valid Logstash keystore.", keyStorePath.toAbsolutePath().toString()), ioe); } } byte[] marker = retrieveSecret(LOGSTASH_MARKER); if (marker == null) { throw new SecretStoreException.LoadException(String.format("Found a keystore at %s, but it is not a Logstash keystore.", keyStorePath.toAbsolutePath().toString())); } LOGGER.debug("Using existing keystore at {}", keyStorePath.toAbsolutePath()); return this; } } catch (SecretStoreException sse) { throw sse; } catch (Exception e) { //should never happen throw new SecretStoreException.UnknownException("Error while trying to load the Logstash keystore", e); } finally { releaseLock(lock); config.clearValues(); } }
@Test public void notLogstashKeystore() throws Exception { SecureConfig altConfig = new SecureConfig(); Path altPath = folder.newFolder().toPath().resolve("alt.not.a.logstash.keystore"); try (OutputStream out = Files.newOutputStream(altPath)) { byte[] randomBytes = new byte[300]; new Random().nextBytes(randomBytes); out.write(randomBytes); } altConfig.add("keystore.file", altPath.toString().toCharArray()); assertThrows(SecretStoreException.class, () -> { new JavaKeyStore().load(altConfig); }); }
static S3ResourceId fromComponents(String scheme, String bucket, String key) { if (!key.startsWith("/")) { key = "/" + key; } return new S3ResourceId(scheme, bucket, key, null, null); }
@Test public void testInvalidBucket() { thrown.expect(IllegalArgumentException.class); S3ResourceId.fromComponents("s3", "invalid/", ""); }
public static <E extends Extension> Predicate<E> labelAndFieldSelectorToPredicate( List<String> labelSelectors, List<String> fieldSelectors) { return SelectorUtil.<E>labelSelectorsToPredicate(labelSelectors) .and(fieldSelectorToPredicate(fieldSelectors)); }
@Test void shouldConvertCorrectlyIfSelectorsAreNull() { var predicate = labelAndFieldSelectorToPredicate(null, null); assertTrue(predicate.test(mock(Extension.class))); }
public static WorkerIdentity fromProto(alluxio.grpc.WorkerIdentity proto) throws ProtoParsingException { return Parsers.fromProto(proto); }
@Test public void legacyVersionMismatch() { alluxio.grpc.WorkerIdentity identityProto = alluxio.grpc.WorkerIdentity.newBuilder() .setVersion(1) .setIdentifier(ByteString.copyFrom(Longs.toByteArray(2L))) .build(); assertThrows(InvalidVersionParsingException.class, () -> WorkerIdentity.ParserV0.INSTANCE.fromProto(identityProto)); }
@Override public void dispatch(DispatchRequest request) { if (!this.brokerConfig.isEnableCalcFilterBitMap()) { return; } try { Collection<ConsumerFilterData> filterDatas = consumerFilterManager.get(request.getTopic()); if (filterDatas == null || filterDatas.isEmpty()) { return; } Iterator<ConsumerFilterData> iterator = filterDatas.iterator(); BitsArray filterBitMap = BitsArray.create( this.consumerFilterManager.getBloomFilter().getM() ); long startTime = System.currentTimeMillis(); while (iterator.hasNext()) { ConsumerFilterData filterData = iterator.next(); if (filterData.getCompiledExpression() == null) { log.error("[BUG] Consumer in filter manager has no compiled expression! {}", filterData); continue; } if (filterData.getBloomFilterData() == null) { log.error("[BUG] Consumer in filter manager has no bloom data! {}", filterData); continue; } Object ret = null; try { MessageEvaluationContext context = new MessageEvaluationContext(request.getPropertiesMap()); ret = filterData.getCompiledExpression().evaluate(context); } catch (Throwable e) { log.error("Calc filter bit map error!commitLogOffset={}, consumer={}, {}", request.getCommitLogOffset(), filterData, e); } log.debug("Result of Calc bit map:ret={}, data={}, props={}, offset={}", ret, filterData, request.getPropertiesMap(), request.getCommitLogOffset()); // eval true if (ret != null && ret instanceof Boolean && (Boolean) ret) { consumerFilterManager.getBloomFilter().hashTo( filterData.getBloomFilterData(), filterBitMap ); } } request.setBitMap(filterBitMap.bytes()); long elapsedTime = UtilAll.computeElapsedTimeMilliseconds(startTime); // 1ms if (elapsedTime >= 1) { log.warn("Spend {} ms to calc bit map, consumerNum={}, topic={}", elapsedTime, filterDatas.size(), request.getTopic()); } } catch (Throwable e) { log.error("Calc bit map error! topic={}, offset={}, queueId={}, {}", request.getTopic(), request.getCommitLogOffset(), request.getQueueId(), e); } }
@Test public void testDispatch_filterDataIllegal() { BrokerConfig brokerConfig = new BrokerConfig(); brokerConfig.setEnableCalcFilterBitMap(true); ConsumerFilterManager filterManager = new ConsumerFilterManager(); filterManager.register("topic0", "CID_0", "a is not null and a >= 5", ExpressionType.SQL92, System.currentTimeMillis()); filterManager.register("topic0", "CID_1", "a is not null and a >= 15", ExpressionType.SQL92, System.currentTimeMillis()); ConsumerFilterData nullExpression = filterManager.get("topic0", "CID_0"); nullExpression.setExpression(null); nullExpression.setCompiledExpression(null); ConsumerFilterData nullBloomData = filterManager.get("topic0", "CID_1"); nullBloomData.setBloomFilterData(null); CommitLogDispatcherCalcBitMap calcBitMap = new CommitLogDispatcherCalcBitMap(brokerConfig, filterManager); for (int i = 0; i < 1; i++) { Map<String, String> properties = new HashMap<>(4); properties.put("a", String.valueOf(i * 10 + 5)); String topic = "topic" + i; DispatchRequest dispatchRequest = new DispatchRequest( topic, 0, i * 100 + 123, 100, (long) ("tags" + i).hashCode(), System.currentTimeMillis(), i, null, UUID.randomUUID().toString(), 0, 0, properties ); calcBitMap.dispatch(dispatchRequest); assertThat(dispatchRequest.getBitMap()).isNotNull(); BitsArray bitsArray = BitsArray.create(dispatchRequest.getBitMap(), filterManager.getBloomFilter().getM()); for (int j = 0; j < bitsArray.bitLength(); j++) { assertThat(bitsArray.getBit(j)).isFalse(); } } }
public List<V> reverseTopologicalSort() { List<V> list = topologicalSort(); if (list == null) { return null; } Collections.reverse(list); return list; }
@Test void reverseTopologicalSort() { List<Character> result = graph.reverseTopologicalSort(); List<Character> expected = Arrays.asList('C', 'G', 'F', 'B', 'A', 'E', 'D'); assertEquals(expected, result); }
public InetSocketAddress getManagedPort( final UdpChannel udpChannel, final InetSocketAddress bindAddress) throws BindException { InetSocketAddress address = bindAddress; if (bindAddress.getPort() != 0) { portSet.add(bindAddress.getPort()); } else if (!isOsWildcard) { // do not map if not a subscription and does not have a control address. We want to use an ephemeral port // for the control channel on publications. if (!isSender || udpChannel.hasExplicitControl()) { address = new InetSocketAddress(bindAddress.getAddress(), allocateOpenPort()); } } return address; }
@Test void shouldPassThroughWithExplicitBindAddressInsideRange() throws BindException { final InetSocketAddress bindAddress = new InetSocketAddress("localhost", 20003); final WildcardPortManager manager = new WildcardPortManager(portRange, true); assertThat(manager.getManagedPort( udpChannelPort0, bindAddress), is(new InetSocketAddress("localhost", 20003))); }
@Override public void onMsg(TbContext ctx, TbMsg msg) { locks.computeIfAbsent(msg.getOriginator(), SemaphoreWithTbMsgQueue::new) .addToQueueAndTryProcess(msg, ctx, this::processMsgAsync); }
@Test public void test_2_plus_2_attr_and_ts() { var node = initNode(TbRuleNodeMathFunctionType.ADD, new TbMathResult(TbMathArgumentType.MESSAGE_BODY, "result", 2, false, false, null), new TbMathArgument(TbMathArgumentType.ATTRIBUTE, "a"), new TbMathArgument(TbMathArgumentType.TIME_SERIES, "b") ); TbMsg msg = TbMsg.newMsg(TbMsgType.POST_TELEMETRY_REQUEST, originator, TbMsgMetaData.EMPTY, JacksonUtil.newObjectNode().toString()); when(attributesService.find(tenantId, originator, AttributeScope.SERVER_SCOPE, "a")) .thenReturn(Futures.immediateFuture(Optional.of(new BaseAttributeKvEntry(System.currentTimeMillis(), new DoubleDataEntry("a", 2.0))))); when(tsService.findLatest(tenantId, originator, "b")) .thenReturn(Futures.immediateFuture(Optional.of(new BasicTsKvEntry(System.currentTimeMillis(), new LongDataEntry("b", 2L))))); node.onMsg(ctx, msg); ArgumentCaptor<TbMsg> msgCaptor = ArgumentCaptor.forClass(TbMsg.class); verify(ctx, timeout(TIMEOUT)).tellSuccess(msgCaptor.capture()); TbMsg resultMsg = msgCaptor.getValue(); assertNotNull(resultMsg); assertNotNull(resultMsg.getData()); var resultJson = JacksonUtil.toJsonNode(resultMsg.getData()); assertTrue(resultJson.has("result")); assertEquals(4, resultJson.get("result").asInt()); }
public SearchJob executeSync(String searchId, SearchUser searchUser, ExecutionState executionState) { return searchDomain.getForUser(searchId, searchUser) .map(s -> executeSync(s, searchUser, executionState)) .orElseThrow(() -> new NotFoundException("No search found with id <" + searchId + ">.")); }
@Test public void addsStreamsToSearchWithoutStreams() { final Search search = Search.builder() .queries(ImmutableSet.of(Query.builder().build())) .build(); final SearchUser searchUser = TestSearchUser.builder() .withUser(testUser -> testUser.withUsername("frank-drebin")) .allowStream("somestream") .build(); when(searchDomain.getForUser(eq("search1"), eq(searchUser))).thenReturn(Optional.of(search)); final SearchJob searchJob = this.searchExecutor.executeSync("search1", searchUser, ExecutionState.empty()); assertThat(searchJob.getSearch().queries()) .are(new Condition<>(query -> query.usedStreamIds().equals(Collections.singleton("somestream")), "All accessible streams have been added")); }
public static String generateJwk(PublicKey longKey, String longKeyId, PublicKey currKey, String currKeyId, PublicKey prevKey, String prevKeyId) { List<JsonWebKey> jwkList = new ArrayList<>(); try { // Create a JWK object from the long live public key PublicJsonWebKey jwk = PublicJsonWebKey.Factory.newPublicJwk(longKey); jwk.setKeyId(longKeyId); jwkList.add(jwk); // Create a JWK object from the current public key jwk = PublicJsonWebKey.Factory.newPublicJwk(currKey); jwk.setKeyId(currKeyId); jwkList.add(jwk); // Create a JWK object from the previous public key if(prevKey != null && prevKeyId != null) { jwk = PublicJsonWebKey.Factory.newPublicJwk(prevKey); jwk.setKeyId(prevKeyId); jwkList.add(jwk); } } catch (JoseException e) { logger.error("Exception:", e); } // create a JsonWebKeySet object with the list of JWK objects JsonWebKeySet jwks = new JsonWebKeySet(jwkList); // and output the JSON of the JWKS return jwks.toJson(JsonWebKey.OutputControlLevel.PUBLIC_ONLY); }
@Test public void testGenerateJwk() throws Exception { KeyPair longKeyPair = KeyUtil.generateKeyPair("RSA", 2048); String longKeyId = HashUtil.generateUUID(); System.out.println("longKeyId = " + longKeyId); String publicKey = KeyUtil.serializePublicKey(longKeyPair.getPublic()); System.out.println("long public key = " + publicKey); String privateKey = KeyUtil.serializePrivateKey(longKeyPair.getPrivate()); System.out.println("long private key = " + privateKey); KeyPair currKeyPair = KeyUtil.generateKeyPair("RSA", 2048); String currKeyId = HashUtil.generateUUID(); System.out.println("currKeyId = " + currKeyId); publicKey = KeyUtil.serializePublicKey(currKeyPair.getPublic()); System.out.println("curr public key = " + publicKey); privateKey = KeyUtil.serializePrivateKey(currKeyPair.getPrivate()); System.out.println("curr private key = " + privateKey); String jwk = KeyUtil.generateJwk(longKeyPair.getPublic(), longKeyId, currKeyPair.getPublic(), currKeyId, null, null); System.out.println("jwk = " + jwk); }
public static UserGroupInformation getUGI(HttpServletRequest request, Configuration conf) throws IOException { return getUGI(null, request, conf); }
@Test public void testGetUgi() throws IOException { conf.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "hdfs://localhost:4321/"); HttpServletRequest request = mock(HttpServletRequest.class); ServletContext context = mock(ServletContext.class); String user = "TheDoctor"; Text userText = new Text(user); DelegationTokenIdentifier dtId = new DelegationTokenIdentifier(userText, userText, null); Token<DelegationTokenIdentifier> token = new Token<DelegationTokenIdentifier>( dtId, new DummySecretManager(0, 0, 0, 0)); String tokenString = token.encodeToUrlString(); when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn( tokenString); when(request.getRemoteUser()).thenReturn(user); //Test attribute in the url to be used as service in the token. when(request.getParameter(JspHelper.NAMENODE_ADDRESS)).thenReturn( "1.1.1.1:1111"); conf.set(DFSConfigKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); UserGroupInformation.setConfiguration(conf); verifyServiceInToken(context, request, "1.1.1.1:1111"); //Test attribute name.node.address //Set the nnaddr url parameter to null. token.decodeIdentifier().clearCache(); when(request.getParameter(JspHelper.NAMENODE_ADDRESS)).thenReturn(null); InetSocketAddress addr = new InetSocketAddress("localhost", 2222); when(context.getAttribute(NameNodeHttpServer.NAMENODE_ADDRESS_ATTRIBUTE_KEY)) .thenReturn(addr); verifyServiceInToken(context, request, addr.getAddress().getHostAddress() + ":2222"); //Test service already set in the token and DN doesn't change service //when it doesn't know the NN service addr userText = new Text(user+"2"); dtId = new DelegationTokenIdentifier(userText, userText, null); token = new Token<DelegationTokenIdentifier>( dtId, new DummySecretManager(0, 0, 0, 0)); token.setService(new Text("3.3.3.3:3333")); tokenString = token.encodeToUrlString(); //Set the name.node.address attribute in Servlet context to null when(context.getAttribute(NameNodeHttpServer.NAMENODE_ADDRESS_ATTRIBUTE_KEY)) .thenReturn(null); when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn( tokenString); verifyServiceInToken(context, request, "3.3.3.3:3333"); }
@Nullable @Override public Connection currentConnection() { var ctx = Context.current(); return ctx.get(this.connectionKey); }
@Test void testTransaction(PostgresParams params) throws SQLException { var tableName = "test_table_" + PostgresTestContainer.randomName("test_table"); params.execute("CREATE TABLE %s(id BIGSERIAL, value VARCHAR);".formatted(tableName)); var id = "INSERT INTO %s(value) VALUES ('test1');".formatted(tableName); var sql = "INSERT INTO %s(value) VALUES ('test1');".formatted(tableName); PostgresParams.ResultSetMapper<List<String>, RuntimeException> extractor = rs -> { var result = new ArrayList<String>(); try { while (rs.next()) { result.add(rs.getString(1)); } } catch (SQLException sqlException) { throw new RuntimeException(sqlException); } return result; }; withDb(params, db -> { Assertions.assertThatThrownBy(() -> db.inTx((JdbcHelper.SqlRunnable) () -> { try (var stmt = db.currentConnection().prepareStatement(sql)) { stmt.execute(); } throw new RuntimeException(); })); var values = params.query("SELECT value FROM %s".formatted(tableName), extractor); Assertions.assertThat(values).hasSize(0); db.inTx(() -> { try (var stmt = db.currentConnection().prepareStatement(sql)) { stmt.execute(); } }); values = params.query("SELECT value FROM %s".formatted(tableName), extractor); Assertions.assertThat(values).hasSize(1); }); }
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
@Test public void testValidateValueMismatchBoolean() { assertThrows(DataException.class, () -> ConnectSchema.validateValue(Schema.BOOLEAN_SCHEMA, 1.f)); }
public IterableSubject asList() { return checkNoNeedToDisplayBothValues("asList()").that(Booleans.asList(checkNotNull(actual))); }
@Test public void asList() { assertThat(array(true, true, false)).asList().containsAtLeast(true, false); }
public void isTrue() { if (actual == null) { isEqualTo(true); // fails } else if (!actual) { failWithoutActual(simpleFact("expected to be true")); } }
@Test public void isTrueFailing() { expectFailureWhenTestingThat(false).isTrue(); assertFailureKeys("expected to be true"); }
@Override public ICardinality merge(ICardinality... estimators) throws CardinalityMergeException { if (estimators == null) { return mergeEstimators(this); } CountThenEstimate[] all = Arrays.copyOf(estimators, estimators.length + 1, CountThenEstimate[].class); all[all.length - 1] = this; return mergeEstimators(all); }
@Test public void testMerge() throws CardinalityMergeException { int numToMerge = 10; int tippingPoint = 100; int cardinality = 1000; CountThenEstimate[] ctes = new CountThenEstimate[numToMerge]; for (int i = 0; i < numToMerge; i++) { ctes[i] = new CountThenEstimate(tippingPoint, AdaptiveCounting.Builder.obyCount(100000)); for (int j = 0; j < tippingPoint - 1; j++) { ctes[i].offer(Math.random()); } } int expectedCardinality = numToMerge * (tippingPoint - 1); long mergedEstimate = CountThenEstimate.mergeEstimators(ctes).cardinality(); double error = Math.abs(mergedEstimate - expectedCardinality) / (double) expectedCardinality; assertEquals(0.01, error, 0.01); for (int i = 0; i < numToMerge / 2; i++) { for (int j = tippingPoint - 1; j < cardinality; j++) { ctes[i].offer(Math.random()); } } expectedCardinality = (numToMerge / 2) * (cardinality + tippingPoint - 1); mergedEstimate = CountThenEstimate.mergeEstimators(ctes).cardinality(); error = Math.abs(mergedEstimate - expectedCardinality) / (double) expectedCardinality; assertEquals(0.01, error, 0.01); for (int i = numToMerge / 2; i < numToMerge; i++) { for (int j = tippingPoint - 1; j < cardinality; j++) { ctes[i].offer(Math.random()); } } expectedCardinality = numToMerge * cardinality; mergedEstimate = CountThenEstimate.mergeEstimators(ctes).cardinality(); error = Math.abs(mergedEstimate - expectedCardinality) / (double) expectedCardinality; assertEquals(0.01, error, 0.01); }
public static int lastIndexOfSeparator(String filePath) { if (StrUtil.isNotEmpty(filePath)) { int i = filePath.length(); char c; while (--i >= 0) { c = filePath.charAt(i); if (CharUtil.isFileSeparator(c)) { return i; } } } return -1; }
@Test public void lastIndexOfSeparatorTest() { final String dir = "d:\\aaa\\bbb\\cc\\ddd"; final int index = FileUtil.lastIndexOfSeparator(dir); assertEquals(13, index); final String file = "ddd.jpg"; final int index2 = FileUtil.lastIndexOfSeparator(file); assertEquals(-1, index2); }
public static boolean isNumber(String text) { final int startPos = findStartPosition(text); if (startPos < 0) { return false; } for (int i = startPos; i < text.length(); i++) { char ch = text.charAt(i); if (!Character.isDigit(ch)) { return false; } } return true; }
@Test @DisplayName("Tests that isNumber returns false for non-numeric chars") void isNumberNonNumeric() { assertFalse(ObjectHelper.isNumber("ABC")); assertFalse(ObjectHelper.isNumber("-ABC")); assertFalse(ObjectHelper.isNumber("ABC.0")); assertFalse(ObjectHelper.isNumber("-ABC.0")); assertFalse(ObjectHelper.isNumber("!@#$#$%@#$%")); assertFalse(ObjectHelper.isNumber(".")); assertFalse(ObjectHelper.isNumber("-")); }
public Destination[] createDestinations(int destCount) throws JMSException { final String destName = getClient().getDestName(); ArrayList<Destination> destinations = new ArrayList<>(); if (destName.contains(DESTINATION_SEPARATOR)) { if (getClient().isDestComposite() && (destCount == 1)) { // user was explicit about which destinations to make composite String[] simpleNames = mapToSimpleNames(destName.split(DESTINATION_SEPARATOR)); String joinedSimpleNames = join(simpleNames, DESTINATION_SEPARATOR); // use the type of the 1st destination for the Destination instance byte destinationType = getDestinationType(destName); destinations.add(createCompositeDestination(destinationType, joinedSimpleNames, 1)); } else { LOG.info("User requested multiple destinations, splitting: {}", destName); // either composite with multiple destinations to be suffixed // or multiple non-composite destinations String[] destinationNames = destName.split(DESTINATION_SEPARATOR); for (String splitDestName : destinationNames) { addDestinations(destinations, splitDestName, destCount); } } } else { addDestinations(destinations, destName, destCount); } return destinations.toArray(new Destination[] {}); }
@Test public void testCreateDestinations_compositeCommaSeparated() throws JMSException { clientProperties.setDestComposite(true); clientProperties.setDestName("queue://foo,topic://cheese"); Destination[] destinations = jmsClient.createDestinations(2); assertEquals(2, destinations.length); assertDestinationNameType("foo.0,foo.1", QUEUE_TYPE, asAmqDest(destinations[0])); assertDestinationNameType("cheese.0,cheese.1", TOPIC_TYPE, asAmqDest(destinations[1])); }
@Override public PageResult<JobLogDO> getJobLogPage(JobLogPageReqVO pageReqVO) { return jobLogMapper.selectPage(pageReqVO); }
@Test public void testGetJobPage() { // mock 数据 JobLogDO dbJobLog = randomPojo(JobLogDO.class, o -> { o.setExecuteIndex(1); o.setHandlerName("handlerName 单元测试"); o.setStatus(JobLogStatusEnum.SUCCESS.getStatus()); o.setBeginTime(buildTime(2021, 1, 8)); o.setEndTime(buildTime(2021, 1, 8)); }); jobLogMapper.insert(dbJobLog); // 测试 jobId 不匹配 jobLogMapper.insert(cloneIgnoreId(dbJobLog, o -> o.setJobId(randomLongId()))); // 测试 handlerName 不匹配 jobLogMapper.insert(cloneIgnoreId(dbJobLog, o -> o.setHandlerName(randomString()))); // 测试 beginTime 不匹配 jobLogMapper.insert(cloneIgnoreId(dbJobLog, o -> o.setBeginTime(buildTime(2021, 1, 7)))); // 测试 endTime 不匹配 jobLogMapper.insert(cloneIgnoreId(dbJobLog, o -> o.setEndTime(buildTime(2021, 1, 9)))); // 测试 status 不匹配 jobLogMapper.insert(cloneIgnoreId(dbJobLog, o -> o.setStatus(JobLogStatusEnum.FAILURE.getStatus()))); // 准备参数 JobLogPageReqVO reqVo = new JobLogPageReqVO(); reqVo.setJobId(dbJobLog.getJobId()); reqVo.setHandlerName("单元"); reqVo.setBeginTime(dbJobLog.getBeginTime()); reqVo.setEndTime(dbJobLog.getEndTime()); reqVo.setStatus(JobLogStatusEnum.SUCCESS.getStatus()); // 调用 PageResult<JobLogDO> pageResult = jobLogService.getJobLogPage(reqVo); // 断言 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); assertPojoEquals(dbJobLog, pageResult.getList().get(0)); }
@Override @Transactional(rollbackFor = Exception.class) @LogRecord(type = SYSTEM_USER_TYPE, subType = SYSTEM_USER_DELETE_SUB_TYPE, bizNo = "{{#id}}", success = SYSTEM_USER_DELETE_SUCCESS) public void deleteUser(Long id) { // 1. 校验用户存在 AdminUserDO user = validateUserExists(id); // 2.1 删除用户 userMapper.deleteById(id); // 2.2 删除用户关联数据 permissionService.processUserDeleted(id); // 2.2 删除用户岗位 userPostMapper.deleteByUserId(id); // 3. 记录操作日志上下文 LogRecordContext.putVariable("user", user); }
@Test public void testDeleteUser_success(){ // mock 数据 AdminUserDO dbUser = randomAdminUserDO(); userMapper.insert(dbUser); // 准备参数 Long userId = dbUser.getId(); // 调用数据 userService.deleteUser(userId); // 校验结果 assertNull(userMapper.selectById(userId)); // 校验调用次数 verify(permissionService, times(1)).processUserDeleted(eq(userId)); }
public static native int chdir(String path);
@Test void testChdir() { File d = new File("target/tstDir"); d.mkdirs(); CLibrary.chdir(d.getAbsolutePath()); }
@Override public Map<String, String> getAddresses() { AwsCredentials credentials = awsCredentialsProvider.credentials(); Map<String, String> instances = Collections.emptyMap(); if (!awsConfig.anyOfEcsPropertiesConfigured()) { instances = awsEc2Api.describeInstances(credentials); } if (awsConfig.anyOfEc2PropertiesConfigured()) { return instances; } if (instances.isEmpty() && DiscoveryMode.Client == awsConfig.getDiscoveryMode()) { return getEcsAddresses(credentials); } return instances; }
@Test public void getAddresses() { // given AwsCredentials credentials = AwsCredentials.builder() .setAccessKey("access-key") .setSecretKey("secret-key") .setToken("token") .build(); Map<String, String> expectedResult = singletonMap("123.12.1.0", "1.4.6.2"); given(awsCredentialsProvider.credentials()).willReturn(credentials); given(awsEc2Api.describeInstances(credentials)).willReturn(expectedResult); // when Map<String, String> result = awsEc2Client.getAddresses(); // then assertEquals(expectedResult, result); }
@Override public PageResult<ProductSpuDO> getSpuPage(ProductSpuPageReqVO pageReqVO) { return productSpuMapper.selectPage(pageReqVO); }
@Test void testGetSpuPage() { // 准备参数 ProductSpuDO createReqVO = randomPojo(ProductSpuDO.class,o->{ o.setCategoryId(generateId()); o.setBrandId(generateId()); o.setDeliveryTemplateId(generateId()); o.setSort(RandomUtil.randomInt(1,100)); // 限制排序范围 o.setGiveIntegral(generaInt()); // 限制范围为正整数 o.setVirtualSalesCount(generaInt()); // 限制范围为正整数 o.setPrice(generaInt()); // 限制范围为正整数 o.setMarketPrice(generaInt()); // 限制范围为正整数 o.setCostPrice(generaInt()); // 限制范围为正整数 o.setStock(generaInt()); // 限制范围为正整数 o.setGiveIntegral(generaInt()); // 限制范围为正整数 o.setSalesCount(generaInt()); // 限制范围为正整数 o.setBrowseCount(generaInt()); // 限制范围为正整数 }); // 准备参数 productSpuMapper.insert(createReqVO); // 测试 status 不匹配 productSpuMapper.insert(cloneIgnoreId(createReqVO, o -> o.setStatus(ProductSpuStatusEnum.DISABLE.getStatus()))); productSpuMapper.insert(cloneIgnoreId(createReqVO, o -> o.setStatus(ProductSpuStatusEnum.RECYCLE.getStatus()))); // 测试 SpecType 不匹配 productSpuMapper.insert(cloneIgnoreId(createReqVO, o -> o.setSpecType(true))); // 测试 BrandId 不匹配 productSpuMapper.insert(cloneIgnoreId(createReqVO, o -> o.setBrandId(generateId()))); // 测试 CategoryId 不匹配 productSpuMapper.insert(cloneIgnoreId(createReqVO, o -> o.setCategoryId(generateId()))); // 调用 ProductSpuPageReqVO productSpuPageReqVO = new ProductSpuPageReqVO(); // 查询条件 按需打开 //productSpuPageReqVO.setTabType(ProductSpuPageReqVO.ALERT_STOCK); //productSpuPageReqVO.setTabType(ProductSpuPageReqVO.RECYCLE_BIN); //productSpuPageReqVO.setTabType(ProductSpuPageReqVO.FOR_SALE); //productSpuPageReqVO.setTabType(ProductSpuPageReqVO.IN_WAREHOUSE); //productSpuPageReqVO.setTabType(ProductSpuPageReqVO.SOLD_OUT); //productSpuPageReqVO.setName(createReqVO.getName()); //productSpuPageReqVO.setCategoryId(createReqVO.getCategoryId()); PageResult<ProductSpuDO> spuPage = productSpuService.getSpuPage(productSpuPageReqVO); assertEquals(1, spuPage.getTotal()); }
public static <K, V> Read<K, V> read() { return new AutoValue_CdapIO_Read.Builder<K, V>().build(); }
@Test public void testReadObjectCreationFailsIfStartOffsetIsNull() { assertThrows( IllegalArgumentException.class, () -> CdapIO.<String, String>read().withStartOffset(null)); }
@Override public void handlerPlugin(final PluginData pluginData) { if (Objects.nonNull(pluginData) && Boolean.TRUE.equals(pluginData.getEnabled())) { DubboRegisterConfig dubboRegisterConfig = GsonUtils.getInstance().fromJson(pluginData.getConfig(), DubboRegisterConfig.class); DubboRegisterConfig exist = Singleton.INST.get(DubboRegisterConfig.class); if (Objects.isNull(dubboRegisterConfig)) { return; } if (Objects.isNull(exist) || !dubboRegisterConfig.equals(exist)) { // If it is null, initialize it this.initConfigCache(dubboRegisterConfig); } Singleton.INST.single(DubboRegisterConfig.class, dubboRegisterConfig); } }
@Test public void handlerPluginTest() { PluginData pluginData = new PluginData(); pluginData.setEnabled(Boolean.TRUE); handler.handlerPlugin(pluginData); pluginData.setConfig("{}"); handler.handlerPlugin(pluginData); }
public int getReadTimeOutMillis() { return readTimeOutMillis; }
@Test void testGetReadTimeOutMillis() { HttpClientConfig config = HttpClientConfig.builder().setReadTimeOutMillis(2000).build(); assertEquals(2000, config.getReadTimeOutMillis()); }
protected List<FileStatus> listStatus(JobContext job ) throws IOException { Path[] dirs = getInputPaths(job); if (dirs.length == 0) { throw new IOException("No input paths specified in job"); } // get tokens for all the required FileSystems.. TokenCache.obtainTokensForNamenodes(job.getCredentials(), dirs, job.getConfiguration()); // Whether we need to recursive look into the directory structure boolean recursive = getInputDirRecursive(job); // creates a MultiPathFilter with the hiddenFileFilter and the // user provided one (if any). List<PathFilter> filters = new ArrayList<PathFilter>(); filters.add(hiddenFileFilter); PathFilter jobFilter = getInputPathFilter(job); if (jobFilter != null) { filters.add(jobFilter); } PathFilter inputFilter = new MultiPathFilter(filters); List<FileStatus> result = null; int numThreads = job.getConfiguration().getInt(LIST_STATUS_NUM_THREADS, DEFAULT_LIST_STATUS_NUM_THREADS); StopWatch sw = new StopWatch().start(); if (numThreads == 1) { result = singleThreadedListStatus(job, dirs, inputFilter, recursive); } else { Iterable<FileStatus> locatedFiles = null; try { LocatedFileStatusFetcher locatedFileStatusFetcher = new LocatedFileStatusFetcher( job.getConfiguration(), dirs, recursive, inputFilter, true); locatedFiles = locatedFileStatusFetcher.getFileStatuses(); } catch (InterruptedException e) { throw (IOException) new InterruptedIOException( "Interrupted while getting file statuses") .initCause(e); } result = Lists.newArrayList(locatedFiles); } sw.stop(); if (LOG.isDebugEnabled()) { LOG.debug("Time taken to get FileStatuses: " + sw.now(TimeUnit.MILLISECONDS)); } LOG.info("Total input files to process : " + result.size()); return result; }
@Test public void testListStatusNestedRecursive() throws IOException { Configuration conf = new Configuration(); conf.setInt(FileInputFormat.LIST_STATUS_NUM_THREADS, numThreads); List<Path> expectedPaths = configureTestNestedRecursive(conf, localFs); Job job = Job.getInstance(conf); FileInputFormat<?, ?> fif = new TextInputFormat(); List<FileStatus> statuses = fif.listStatus(job); verifyFileStatuses(expectedPaths, statuses, localFs); }
@Override public Integer call() throws Exception { super.call(); if (this.pluginsPath == null) { throw new CommandLine.ParameterException(this.spec.commandLine(), "Missing required options '--plugins' " + "or environment variable 'KESTRA_PLUGINS_PATH" ); } if (!pluginsPath.toFile().exists()) { if (!pluginsPath.toFile().mkdir()) { throw new RuntimeException("Cannot create directory: " + pluginsPath.toFile().getAbsolutePath()); } } if (repositories != null) { Arrays.stream(repositories) .forEach(throwConsumer(s -> { URIBuilder uriBuilder = new URIBuilder(s); RepositoryConfig.RepositoryConfigBuilder builder = RepositoryConfig.builder() .id(IdUtils.create()); if (uriBuilder.getUserInfo() != null) { int index = uriBuilder.getUserInfo().indexOf(":"); builder.basicAuth(new RepositoryConfig.BasicAuth( uriBuilder.getUserInfo().substring(0, index), uriBuilder.getUserInfo().substring(index + 1) )); uriBuilder.setUserInfo(null); } builder.url(uriBuilder.build().toString()); pluginDownloader.addRepository(builder.build()); })); } List<URL> resolveUrl = pluginDownloader.resolve(dependencies); stdOut("Resolved Plugin(s) with {0}", resolveUrl); for (URL url: resolveUrl) { Files.copy( Paths.get(url.toURI()), Paths.get(pluginsPath.toString(), FilenameUtils.getName(url.toString())), StandardCopyOption.REPLACE_EXISTING ); } stdOut("Successfully installed plugins {0} into {1}", dependencies, pluginsPath); return 0; }
@Test void fixedVersion() throws IOException { Path pluginsPath = Files.createTempDirectory(PluginInstallCommandTest.class.getSimpleName()); pluginsPath.toFile().deleteOnExit(); try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) { String[] args = {"--plugins", pluginsPath.toAbsolutePath().toString(), "io.kestra.plugin:plugin-notifications:0.6.0"}; PicocliRunner.call(PluginInstallCommand.class, ctx, args); List<Path> files = Files.list(pluginsPath).toList(); assertThat(files.size(), is(1)); assertThat(files.getFirst().getFileName().toString(), is("plugin-notifications-0.6.0.jar")); } }
@Override public boolean setTransactionTimeout(final int timeout) throws XAException { return delegate.setTransactionTimeout(timeout); }
@Test void assertSetTransactionTimeout() throws XAException { singleXAResource.setTransactionTimeout(1); verify(xaResource).setTransactionTimeout(1); }
@Override public void batchDeregisterInstance(String serviceName, String groupName, List<Instance> instances) throws NacosException { NamingUtils.batchCheckInstanceIsLegal(instances); batchCheckAndStripGroupNamePrefix(instances, groupName); clientProxy.batchDeregisterService(serviceName, groupName, instances); }
@Test void testBatchDeRegisterInstance() throws NacosException { Instance instance = new Instance(); String serviceName = "service1"; String ip = "1.1.1.1"; int port = 10000; instance.setServiceName(serviceName); instance.setEphemeral(true); instance.setPort(port); instance.setIp(ip); List<Instance> instanceList = new ArrayList<>(); instanceList.add(instance); //when try { client.batchDeregisterInstance(serviceName, Constants.DEFAULT_GROUP, instanceList); } catch (Exception e) { assertTrue(e instanceof NacosException); assertTrue(e.getMessage().contains("not found")); } }
@SuppressWarnings("deprecation") static Object[] buildArgs(final Object[] positionalArguments, final ResourceMethodDescriptor resourceMethod, final ServerResourceContext context, final DynamicRecordTemplate template, final ResourceMethodConfig resourceMethodConfig) { List<Parameter<?>> parameters = resourceMethod.getParameters(); Object[] arguments = Arrays.copyOf(positionalArguments, parameters.size()); fixUpComplexKeySingletonArraysInArguments(arguments); boolean attachmentsDesired = false; for (int i = positionalArguments.length; i < parameters.size(); ++i) { Parameter<?> param = parameters.get(i); try { if (param.getParamType() == Parameter.ParamType.KEY || param.getParamType() == Parameter.ParamType.ASSOC_KEY_PARAM) { Object value = context.getPathKeys().get(param.getName()); if (value != null) { arguments[i] = value; continue; } } else if (param.getParamType() == Parameter.ParamType.CALLBACK) { continue; } else if (param.getParamType() == Parameter.ParamType.PARSEQ_CONTEXT_PARAM || param.getParamType() == Parameter.ParamType.PARSEQ_CONTEXT) { continue; // don't know what to fill in yet } else if (param.getParamType() == Parameter.ParamType.HEADER) { HeaderParam headerParam = param.getAnnotations().get(HeaderParam.class); String value = context.getRequestHeaders().get(headerParam.value()); arguments[i] = value; continue; } //Since we have multiple different types of MaskTrees that can be passed into resource methods, //we must evaluate based on the param type (annotation used) else if (param.getParamType() == Parameter.ParamType.PROJECTION || param.getParamType() == Parameter.ParamType.PROJECTION_PARAM) { arguments[i] = context.getProjectionMask(); continue; } else if (param.getParamType() == Parameter.ParamType.METADATA_PROJECTION_PARAM) { arguments[i] = context.getMetadataProjectionMask(); continue; } else if (param.getParamType() == Parameter.ParamType.PAGING_PROJECTION_PARAM) { arguments[i] = context.getPagingProjectionMask(); continue; } else if (param.getParamType() == Parameter.ParamType.CONTEXT || param.getParamType() == Parameter.ParamType.PAGING_CONTEXT_PARAM) { PagingContext ctx = RestUtils.getPagingContext(context, (PagingContext) param.getDefaultValue()); arguments[i] = ctx; continue; } else if (param.getParamType() == Parameter.ParamType.PATH_KEYS || param.getParamType() == Parameter.ParamType.PATH_KEYS_PARAM) { arguments[i] = context.getPathKeys(); continue; } else if (param.getParamType() == Parameter.ParamType.PATH_KEY_PARAM) { Object value = context.getPathKeys().get(param.getName()); if (value != null) { arguments[i] = value; continue; } } else if (param.getParamType() == Parameter.ParamType.RESOURCE_CONTEXT || param.getParamType() == Parameter.ParamType.RESOURCE_CONTEXT_PARAM) { arguments[i] = context; continue; } else if (param.getParamType() == Parameter.ParamType.VALIDATOR_PARAM) { RestLiDataValidator validator = new RestLiDataValidator(resourceMethod.getResourceModel().getResourceClass().getAnnotations(), resourceMethod.getResourceModel().getValueClass(), resourceMethod.getMethodType()); arguments[i] = validator; continue; } else if (param.getParamType() == Parameter.ParamType.RESTLI_ATTACHMENTS_PARAM) { arguments[i] = context.getRequestAttachmentReader(); attachmentsDesired = true; continue; } else if (param.getParamType() == Parameter.ParamType.UNSTRUCTURED_DATA_WRITER_PARAM) { // The OutputStream is passed to the resource implementation in a synchronous call. Upon return of the // resource method, all the bytes would haven't written to the OutputStream. The EntityStream would have // contained all the bytes by the time data is requested. The ownership of the OutputStream is passed to // the ByteArrayOutputStreamWriter, which is responsible of closing the OutputStream if necessary. ByteArrayOutputStream out = new ByteArrayOutputStream(); context.setResponseEntityStream(EntityStreams.newEntityStream(new ByteArrayOutputStreamWriter(out))); arguments[i] = new UnstructuredDataWriter(out, context); continue; } else if (param.getParamType() == Parameter.ParamType.UNSTRUCTURED_DATA_REACTIVE_READER_PARAM) { arguments[i] = new UnstructuredDataReactiveReader(context.getRequestEntityStream(), context.getRawRequest().getHeader(RestConstants.HEADER_CONTENT_TYPE)); continue; } else if (param.getParamType() == Parameter.ParamType.POST) { // handle action parameters if (template != null) { DataMap data = template.data(); if (data.containsKey(param.getName())) { arguments[i] = template.getValue(param); continue; } } } else if (param.getParamType() == Parameter.ParamType.QUERY) { Object value; if (DataTemplate.class.isAssignableFrom(param.getType())) { value = buildDataTemplateArgument(context.getStructuredParameter(param.getName()), param, resourceMethodConfig.shouldValidateQueryParams()); } else { value = buildRegularArgument(context, param, resourceMethodConfig.shouldValidateQueryParams()); } if (value != null) { arguments[i] = value; continue; } } else if (param.getParamType() == Parameter.ParamType.BATCH || param.getParamType() == Parameter.ParamType.RESOURCE_KEY) { // should not come to this routine since it should be handled by passing in positionalArguments throw new RoutingException("Parameter '" + param.getName() + "' should be passed in as a positional argument", HttpStatus.S_400_BAD_REQUEST.getCode()); } else { // unknown param type throw new RoutingException( "Parameter '" + param.getName() + "' has an unknown parameter type '" + param.getParamType().name() + "'", HttpStatus.S_400_BAD_REQUEST.getCode()); } } catch (TemplateRuntimeException e) { throw new RoutingException("Parameter '" + param.getName() + "' is invalid", HttpStatus.S_400_BAD_REQUEST.getCode()); } try { // Handling null-valued parameters not provided in resource context or entity body // check if it is optional parameter if (param.isOptional() && param.hasDefaultValue()) { arguments[i] = param.getDefaultValue(); } else if (param.isOptional() && !param.getType().isPrimitive()) { // optional primitive parameter must have default value or provided arguments[i] = null; } else { throw new RoutingException("Parameter '" + param.getName() + "' is required", HttpStatus.S_400_BAD_REQUEST.getCode()); } } catch (ResourceConfigException e) { // Parameter default value format exception should result in server error code 500. throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, "Parameter '" + param.getName() + "' default value is invalid", e); } } //Verify that if the resource method did not expect attachments, and attachments were present, that we drain all //incoming attachments and send back a bad request. We must take precaution here since simply ignoring the request //attachments is not correct behavior here. Ignoring other request level constructs such as headers or query parameters //that were not needed is safe, but not for request attachments. if (!attachmentsDesired && context.getRequestAttachmentReader() != null) { throw new RestLiServiceException(HttpStatus.S_400_BAD_REQUEST, "Resource method endpoint invoked does not accept any request attachments."); } return arguments; }
@Test(dataProvider = "projectionParameterData") @SuppressWarnings("deprecation") public void testProjectionParamType(Parameter.ParamType paramType) { String testParamKey = "testParam"; ServerResourceContext mockResourceContext = EasyMock.createMock(ServerResourceContext.class); MaskTree mockMask = EasyMock.createMock(MaskTree.class); if (paramType == Parameter.ParamType.PROJECTION_PARAM || paramType == Parameter.ParamType.PROJECTION) { EasyMock.expect(mockResourceContext.getProjectionMask()).andReturn(mockMask); } else if (paramType == Parameter.ParamType.METADATA_PROJECTION_PARAM) { EasyMock.expect(mockResourceContext.getMetadataProjectionMask()).andReturn(mockMask); } else { EasyMock.expect(mockResourceContext.getPagingProjectionMask()).andReturn(mockMask); } EasyMock.expect(mockResourceContext.getRequestAttachmentReader()).andReturn(null); EasyMock.replay(mockResourceContext); Parameter<MaskTree> param = new Parameter<>(testParamKey, MaskTree.class, null, false, null, paramType, false, AnnotationSet.EMPTY); List<Parameter<?>> parameters = Collections.singletonList(param); Object[] results = ArgumentBuilder.buildArgs(new Object[0], getMockResourceMethod(parameters), mockResourceContext, null, getMockResourceMethodConfig(false)); Assert.assertEquals(results[0], mockMask); }
@Override public List<DeptDO> getChildDeptList(Long id) { List<DeptDO> children = new LinkedList<>(); // 遍历每一层 Collection<Long> parentIds = Collections.singleton(id); for (int i = 0; i < Short.MAX_VALUE; i++) { // 使用 Short.MAX_VALUE 避免 bug 场景下,存在死循环 // 查询当前层,所有的子部门 List<DeptDO> depts = deptMapper.selectListByParentId(parentIds); // 1. 如果没有子部门,则结束遍历 if (CollUtil.isEmpty(depts)) { break; } // 2. 如果有子部门,继续遍历 children.addAll(depts); parentIds = convertSet(depts, DeptDO::getId); } return children; }
@Test public void testGetChildDeptList() { // mock 数据(1 级别子节点) DeptDO dept1 = randomPojo(DeptDO.class, o -> o.setName("1")); deptMapper.insert(dept1); DeptDO dept2 = randomPojo(DeptDO.class, o -> o.setName("2")); deptMapper.insert(dept2); // mock 数据(2 级子节点) DeptDO dept1a = randomPojo(DeptDO.class, o -> o.setName("1-a").setParentId(dept1.getId())); deptMapper.insert(dept1a); DeptDO dept2a = randomPojo(DeptDO.class, o -> o.setName("2-a").setParentId(dept2.getId())); deptMapper.insert(dept2a); // 准备参数 Long id = dept1.getParentId(); // 调用 List<DeptDO> result = deptService.getChildDeptList(id); // 断言 assertEquals(result.size(), 2); assertPojoEquals(dept1, result.get(0)); assertPojoEquals(dept1a, result.get(1)); }
protected Authorization parseAuthLine(String line) throws ParseException { String[] tokens = line.split("\\s+"); String keyword = tokens[0].toLowerCase(); switch (keyword) { case "topic": return createAuthorization(line, tokens); case "user": m_parsingUsersSpecificSection = true; m_currentUser = tokens[1]; m_parsingPatternSpecificSection = false; return null; case "pattern": m_parsingUsersSpecificSection = false; m_currentUser = ""; m_parsingPatternSpecificSection = true; return createAuthorization(line, tokens); default: throw new ParseException(String.format("invalid line definition found %s", line), 1); } }
@Test public void testParseAuthLineValid() throws ParseException { Authorization authorization = authorizator.parseAuthLine("topic /weather/italy/anemometer"); // Verify assertEquals(RW_ANEMOMETER, authorization); }
public List<RowMetaAndData> getFeatureSummary() { List<RowMetaAndData> list = new ArrayList<>(); RowMetaAndData r = null; final String par = "Parameter"; final String val = "Value"; ValueMetaInterface testValue = new ValueMetaString( "FIELD" ); testValue.setLength( 30 ); if ( databaseInterface != null ) { // Type of database r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "Database type" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getPluginId() ); list.add( r ); // Type of access r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "Access type" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getAccessTypeDesc() ); list.add( r ); // Name of database r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "Database name" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getDatabaseName() ); list.add( r ); // server host name r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "Server hostname" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getHostname() ); list.add( r ); // Port number r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "Service port" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getDatabasePortNumberString() ); list.add( r ); // Username r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "Username" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getUsername() ); list.add( r ); // Informix server r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "Informix server name" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getServername() ); list.add( r ); // Other properties... Enumeration<Object> keys = getAttributes().keys(); while ( keys.hasMoreElements() ) { String key = (String) keys.nextElement(); String value = getAttributes().getProperty( key ); r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "Extra attribute [" + key + "]" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, value ); list.add( r ); } // driver class r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "Driver class" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getDriverClass() ); list.add( r ); // URL String pwd = getPassword(); setPassword( "password" ); // Don't give away the password in the URL! String url = ""; try { url = getURL(); } catch ( Exception e ) { url = ""; } // SAP etc. r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "URL" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, url ); list.add( r ); setPassword( pwd ); // SQL: Next sequence value r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "SQL: next sequence value" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getSeqNextvalSQL( "SEQUENCE" ) ); list.add( r ); // is set fetch size supported r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "supported: set fetch size" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, isFetchSizeSupported() ? "Y" : "N" ); list.add( r ); // needs place holder for auto increment r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "auto increment field needs placeholder" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, needsPlaceHolder() ? "Y" : "N" ); list.add( r ); // Sum function r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "SUM aggregate function" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getFunctionSum() ); list.add( r ); // Avg function r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "AVG aggregate function" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getFunctionAverage() ); list.add( r ); // Minimum function r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "MIN aggregate function" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getFunctionMinimum() ); list.add( r ); // Maximum function r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "MAX aggregate function" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getFunctionMaximum() ); list.add( r ); // Count function r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "COUNT aggregate function" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getFunctionCount() ); list.add( r ); // Schema-table combination r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "Schema / Table combination" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getQuotedSchemaTableCombination( "SCHEMA", "TABLE" ) ); list.add( r ); // Limit clause r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "LIMIT clause for 100 rows" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getLimitClause( 100 ) ); list.add( r ); // add column statement r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "Add column statement" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getAddColumnStatement( "TABLE", testValue, null, false, null, false ) ); list.add( r ); // drop column statement r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "Drop column statement" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getDropColumnStatement( "TABLE", testValue, null, false, null, false ) ); list.add( r ); // Modify column statement r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "Modify column statement" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getModifyColumnStatement( "TABLE", testValue, null, false, null, false ) ); list.add( r ); // List of reserved words String reserved = ""; if ( getReservedWords() != null ) { for ( int i = 0; i < getReservedWords().length; i++ ) { reserved += ( i > 0 ? ", " : "" ) + getReservedWords()[i]; } } r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "List of reserved words" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, reserved ); list.add( r ); // Quote reserved words? r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "Quote reserved words?" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, quoteReservedWords() ? "Y" : "N" ); list.add( r ); // Start Quote r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "Start quote for reserved words" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getStartQuote() ); list.add( r ); // End Quote r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "End quote for reserved words" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getEndQuote() ); list.add( r ); // List of table types String types = ""; String[] slist = getTableTypes(); if ( slist != null ) { for ( int i = 0; i < slist.length; i++ ) { types += ( i > 0 ? ", " : "" ) + slist[i]; } } r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "List of JDBC table types" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, types ); list.add( r ); // List of view types types = ""; slist = getViewTypes(); if ( slist != null ) { for ( int i = 0; i < slist.length; i++ ) { types += ( i > 0 ? ", " : "" ) + slist[i]; } } r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "List of JDBC view types" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, types ); list.add( r ); // List of synonym types types = ""; slist = getSynonymTypes(); if ( slist != null ) { for ( int i = 0; i < slist.length; i++ ) { types += ( i > 0 ? ", " : "" ) + slist[i]; } } r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "List of JDBC synonym types" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, types ); list.add( r ); // Use schema-name to get list of tables? r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "use schema name to get table list?" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, useSchemaNameForTableList() ? "Y" : "N" ); list.add( r ); // supports view? r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "supports views?" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, supportsViews() ? "Y" : "N" ); list.add( r ); // supports synonyms? r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "supports synonyms?" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, supportsSynonyms() ? "Y" : "N" ); list.add( r ); // SQL: get list of procedures? r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "SQL: list of procedures" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, getSQLListOfProcedures() ); list.add( r ); // SQL: get truncate table statement? r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "SQL: truncate table" ); String truncateStatement = getTruncateTableStatement( null, "TABLE" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, truncateStatement != null ? truncateStatement : "Not supported by this database type" ); list.add( r ); // supports float rounding on update? r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "supports floating point rounding on update/insert" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, supportsFloatRoundingOnUpdate() ? "Y" : "N" ); list.add( r ); // supports time stamp to date conversion r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "supports timestamp-date conversion" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, supportsTimeStampToDateConversion() ? "Y" : "N" ); list.add( r ); // supports batch updates r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "supports batch updates" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, supportsBatchUpdates() ? "Y" : "N" ); list.add( r ); // supports boolean values r = new RowMetaAndData(); r.addValue( par, ValueMetaInterface.TYPE_STRING, "supports boolean data type" ); r.addValue( val, ValueMetaInterface.TYPE_STRING, supportsBooleanDataType() ? "Y" : "N" ); list.add( r ); } return list; }
@Test public void testGetFeatureSummary() throws Exception { DatabaseMeta databaseMeta = mock( DatabaseMeta.class ); OracleDatabaseMeta odbm = new OracleDatabaseMeta(); doCallRealMethod().when( databaseMeta ).setDatabaseInterface( any( DatabaseInterface.class ) ); doCallRealMethod().when( databaseMeta ).getFeatureSummary(); doCallRealMethod().when( databaseMeta ).getAttributes(); databaseMeta.setDatabaseInterface( odbm ); List<RowMetaAndData> result = databaseMeta.getFeatureSummary(); assertNotNull( result ); for ( RowMetaAndData rmd : result ) { assertEquals( 2, rmd.getRowMeta().size() ); assertEquals( "Parameter", rmd.getRowMeta().getValueMeta( 0 ).getName() ); assertEquals( ValueMetaInterface.TYPE_STRING, rmd.getRowMeta().getValueMeta( 0 ).getType() ); assertEquals( "Value", rmd.getRowMeta().getValueMeta( 1 ).getName() ); assertEquals( ValueMetaInterface.TYPE_STRING, rmd.getRowMeta().getValueMeta( 1 ).getType() ); } }
public static <T> Callable<T> recover(Callable<T> callable, Function<Throwable, T> exceptionHandler) { return () -> { try { return callable.call(); } catch (Exception exception) { return exceptionHandler.apply(exception); } }; }
@Test public void shouldRecoverCallableFromException() throws Exception { Callable<String> callable = () -> { throw new IOException("BAM!"); }; Callable<String> callableWithRecovery = CallableUtils.recover(callable, (ex) -> "Bla"); String result = callableWithRecovery.call(); assertThat(result).isEqualTo("Bla"); }
public static Integer[] generateBySet(int begin, int end, int size) { if (begin > end) { int temp = begin; begin = end; end = temp; } // 加入逻辑判断,确保begin<end并且size不能大于该表示范围 if ((end - begin) < size) { throw new UtilException("Size is larger than range between begin and end!"); } Set<Integer> set = new HashSet<>(size, 1); while (set.size() < size) { set.add(begin + RandomUtil.randomInt(end - begin)); } return set.toArray(new Integer[0]); }
@Test public void generateBySetTest(){ final Integer[] integers = NumberUtil.generateBySet(10, 100, 5); assertEquals(5, integers.length); }
public Analysis analyze(Statement statement) { return analyze(statement, false); }
@Test public void testExplainAnalyzeIllegalArgs() { assertThrows(IllegalStateException.class, () -> analyze("EXPLAIN ANALYZE (type LOGICAL) SELECT * FROM t1")); assertThrows(IllegalStateException.class, () -> analyze("EXPLAIN ANALYZE (format TEXT, type LOGICAL) SELECT * FROM t1")); assertThrows(IllegalStateException.class, () -> analyze("EXPLAIN ANALYZE (format JSON, format TEXT) SELECT * FROM t1")); }
@Override public List<SnowflakeIdentifier> listSchemas(SnowflakeIdentifier scope) { StringBuilder baseQuery = new StringBuilder("SHOW SCHEMAS"); String[] queryParams = null; switch (scope.type()) { case ROOT: // account-level listing baseQuery.append(" IN ACCOUNT"); break; case DATABASE: // database-level listing baseQuery.append(" IN DATABASE IDENTIFIER(?)"); queryParams = new String[] {scope.toIdentifierString()}; break; default: throw new IllegalArgumentException( String.format("Unsupported scope type for listSchemas: %s", scope)); } final String finalQuery = baseQuery.toString(); final String[] finalQueryParams = queryParams; List<SnowflakeIdentifier> schemas; try { schemas = connectionPool.run( conn -> queryHarness.query( conn, finalQuery, SCHEMA_RESULT_SET_HANDLER, finalQueryParams)); } catch (SQLException e) { throw snowflakeExceptionToIcebergException( scope, e, String.format("Failed to list schemas for scope '%s'", scope)); } catch (InterruptedException e) { throw new UncheckedInterruptedException( e, "Interrupted while listing schemas for scope '%s'", scope); } schemas.forEach( schema -> Preconditions.checkState( schema.type() == SnowflakeIdentifier.Type.SCHEMA, "Expected SCHEMA, got identifier '%s' for scope '%s'", schema, scope)); return schemas; }
@SuppressWarnings("unchecked") @Test public void testListSchemasSQLExceptionAtDatabaseLevel() throws SQLException, InterruptedException { for (Integer errorCode : DATABASE_NOT_FOUND_ERROR_CODES) { Exception injectedException = new SQLException( String.format("SQL exception with Error Code %d", errorCode), "2000", errorCode, null); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(NoSuchNamespaceException.class) .isThrownBy(() -> snowflakeClient.listSchemas(SnowflakeIdentifier.ofDatabase("DB_1"))) .withMessageContaining( String.format( "Identifier not found: 'DATABASE: 'DB_1''. Underlying exception: 'SQL exception with Error Code %d'", errorCode)) .withCause(injectedException); } }
public SourceWithMetadata lookupSource(int globalLineNumber, int sourceColumn) throws IncompleteSourceWithMetadataException { LineToSource lts = this.sourceReferences().stream() .filter(lts1 -> lts1.includeLine(globalLineNumber)) .findFirst() .orElseThrow(() -> new IllegalArgumentException("can't find the config segment related to line " + globalLineNumber)); return new SourceWithMetadata(lts.source.getProtocol(), lts.source.getId(), globalLineNumber + 1 - lts.startLine, sourceColumn, lts.source.getText()); }
@Test public void testSourceAndLineRemapping_pipelineDefinedInSingleFileOneLine() throws IncompleteSourceWithMetadataException { String oneLinerPipeline = "input { generator1 }"; final SourceWithMetadata swm = new SourceWithMetadata("file", "/tmp/1", 0, 0, oneLinerPipeline); sut = new PipelineConfig(source, pipelineIdSym, toRubyArray(new SourceWithMetadata[]{swm}), SETTINGS); assertEquals("return the same line of the queried", 1, (int) sut.lookupSource(1, 0).getLine()); }
@Override public Duration convert(String source) { try { if (ISO8601.matcher(source).matches()) { return Duration.parse(source); } Matcher matcher = SIMPLE.matcher(source); Assert.state(matcher.matches(), "'" + source + "' is not a valid duration"); long amount = Long.parseLong(matcher.group(1)); ChronoUnit unit = getUnit(matcher.group(2)); return Duration.of(amount, unit); } catch (Exception ex) { throw new IllegalStateException("'" + source + "' is not a valid duration", ex); } }
@Test public void convertWhenSimpleMinutesShouldReturnDuration() { assertThat(convert("10m")).isEqualTo(Duration.ofMinutes(10)); assertThat(convert("10M")).isEqualTo(Duration.ofMinutes(10)); assertThat(convert("+10m")).isEqualTo(Duration.ofMinutes(10)); assertThat(convert("-10m")).isEqualTo(Duration.ofMinutes(-10)); }