focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
static void checkValidTableId(String idToCheck) { if (idToCheck.length() < MIN_TABLE_ID_LENGTH) { throw new IllegalArgumentException("Table ID cannot be empty. "); } if (idToCheck.length() > MAX_TABLE_ID_LENGTH) { throw new IllegalArgumentException( "Table ID " + idToCheck + " cannot be longer than " + MAX_TABLE_ID_LENGTH + " characters."); } if (ILLEGAL_TABLE_CHARS.matcher(idToCheck).find()) { throw new IllegalArgumentException( "Table ID " + idToCheck + " is not a valid ID. Only letters, numbers, hyphens and underscores are allowed."); } }
@Test public void testCheckValidTableIdWhenIdContainsIllegalCharacter() { assertThrows(IllegalArgumentException.class, () -> checkValidTableId("table-id%")); assertThrows(IllegalArgumentException.class, () -> checkValidTableId("ta#ble-id")); }
@Override public Collection<ResourceRequirement> getResourceRequirements() { final Collection<ResourceRequirement> currentResourceRequirements = new ArrayList<>(); for (Map.Entry<ResourceProfile, Integer> resourceRequirement : totalResourceRequirements.getResourcesWithCount()) { currentResourceRequirements.add( ResourceRequirement.create( resourceRequirement.getKey(), resourceRequirement.getValue())); } return currentResourceRequirements; }
@TestTemplate void testGetResourceRequirements() { final DefaultDeclarativeSlotPool slotPool = createDefaultDeclarativeSlotPool(); assertThat(slotPool.getResourceRequirements()).isEmpty(); final ResourceCounter resourceRequirements = createResourceRequirements(); slotPool.increaseResourceRequirementsBy(resourceRequirements); slotPool.tryWaitSlotRequestIsDone(); assertThat(slotPool.getResourceRequirements()) .isEqualTo(toResourceRequirements(resourceRequirements)); }
public ClientAuth getClientAuth() { String clientAuth = getString(SSL_CLIENT_AUTHENTICATION_CONFIG); if (originals().containsKey(SSL_CLIENT_AUTH_CONFIG)) { if (originals().containsKey(SSL_CLIENT_AUTHENTICATION_CONFIG)) { log.warn( "The {} configuration is deprecated. Since a value has been supplied for the {} " + "configuration, that will be used instead", SSL_CLIENT_AUTH_CONFIG, SSL_CLIENT_AUTHENTICATION_CONFIG ); } else { log.warn( "The configuration {} is deprecated and should be replaced with {}", SSL_CLIENT_AUTH_CONFIG, SSL_CLIENT_AUTHENTICATION_CONFIG ); clientAuth = getBoolean(SSL_CLIENT_AUTH_CONFIG) ? SSL_CLIENT_AUTHENTICATION_REQUIRED : SSL_CLIENT_AUTHENTICATION_NONE; } } return getClientAuth(clientAuth); }
@Test public void shouldUseClientAuthenticationIfClientAuthProvidedNone() { // Given: final KsqlRestConfig config = new KsqlRestConfig(ImmutableMap.<String, Object>builder() .put(KsqlRestConfig.SSL_CLIENT_AUTH_CONFIG, true) .put(KsqlRestConfig.SSL_CLIENT_AUTHENTICATION_CONFIG, KsqlRestConfig.SSL_CLIENT_AUTHENTICATION_NONE) .build()); // When: final ClientAuth clientAuth = config.getClientAuth(); // Then: assertThat(clientAuth, is(ClientAuth.NONE)); }
public static RDA fit(double[][] x, int[] y, Properties params) { double alpha = Double.parseDouble(params.getProperty("smile.rda.alpha", "0.9")); double[] priori = Strings.parseDoubleArray(params.getProperty("smile.rda.priori")); double tol = Double.parseDouble(params.getProperty("smile.rda.tolerance", "1E-4")); return fit(x, y, alpha, priori, tol); }
@Test public void testUSPS() throws Exception { System.out.println("USPS"); RDA model = RDA.fit(USPS.x, USPS.y, 0.7); int[] prediction = model.predict(USPS.testx); int error = Error.of(USPS.testy, prediction); System.out.println("Error = " + error); assertEquals(235, error); java.nio.file.Path temp = Write.object(model); Read.object(temp); }
@Override public ExecuteContext before(ExecuteContext context) { Object[] arguments = context.getArguments(); if (arguments != null && arguments.length > 0) { Object obj = arguments[0]; Object serviceName = ReflectUtils.getFieldValue(obj, "serviceName").orElse(null); if (serviceName instanceof String) { AppCache.INSTANCE.setAppName((String) serviceName); } else { LOGGER.warning("Service name is null or not instanceof string."); } SpringRouterUtils.putMetaData(SpringRouterUtils.getMetadata(obj), routerConfig); } return context; }
@Test public void testBefore() { interceptor.before(context); Assert.assertEquals("foo", AppCache.INSTANCE.getAppName()); Map<String, String> metadata = ((TestObject) context.getArguments()[0]).getMetadata(); Assert.assertEquals("bar1", metadata.get("bar")); Assert.assertEquals("foo1", metadata.get("foo")); Assert.assertEquals(routerConfig.getRouterVersion(), metadata.get("version")); context.getArguments()[0] = new TestObject(null); interceptor.before(context); Assert.assertEquals("foo", AppCache.INSTANCE.getAppName()); }
@SuppressWarnings("NPathComplexity") @Override public final int hashCode() { int result = (name != null ? name.hashCode() : 0); result = 31 * result + backupCount; result = 31 * result + asyncBackupCount; result = 31 * result + timeToLiveSeconds; result = 31 * result + maxIdleSeconds; result = 31 * result + evictionConfig.hashCode(); result = 31 * result + (mapStoreConfig != null ? mapStoreConfig.hashCode() : 0); result = 31 * result + (nearCacheConfig != null ? nearCacheConfig.hashCode() : 0); result = 31 * result + (readBackupData ? 1 : 0); result = 31 * result + cacheDeserializedValues.hashCode(); result = 31 * result + (mergePolicyConfig != null ? mergePolicyConfig.hashCode() : 0); result = 31 * result + inMemoryFormat.hashCode(); result = 31 * result + metadataPolicy.hashCode(); result = 31 * result + (wanReplicationRef != null ? wanReplicationRef.hashCode() : 0); result = 31 * result + getEntryListenerConfigs().hashCode(); result = 31 * result + Set.copyOf(getIndexConfigs()).hashCode(); result = 31 * result + getAttributeConfigs().hashCode(); result = 31 * result + getQueryCacheConfigs().hashCode(); result = 31 * result + getPartitionLostListenerConfigs().hashCode(); result = 31 * result + (statisticsEnabled ? 1 : 0); result = 31 * result + (perEntryStatsEnabled ? 1 : 0); result = 31 * result + (partitioningStrategyConfig != null ? partitioningStrategyConfig.hashCode() : 0); result = 31 * result + (splitBrainProtectionName != null ? splitBrainProtectionName.hashCode() : 0); result = 31 * result + merkleTreeConfig.hashCode(); result = 31 * result + eventJournalConfig.hashCode(); result = 31 * result + hotRestartConfig.hashCode(); result = 31 * result + dataPersistenceConfig.hashCode(); result = 31 * result + tieredStoreConfig.hashCode(); result = 31 * result + getPartitioningAttributeConfigs().hashCode(); result = 31 * result + (userCodeNamespace != null ? userCodeNamespace.hashCode() : 0); return result; }
@Test @SuppressWarnings("ResultOfMethodCallIgnored") public void testDefaultHashCode() { MapConfig mapConfig = new MapConfig(); mapConfig.hashCode(); }
public int getNumberOfEntriesAffected() { return numberOfEntriesAffected; }
@Test public void testGetNumberOfEntriesAffected() { assertEquals(42, localCacheWideEventData.getNumberOfEntriesAffected()); }
@Override public void filter(final ContainerRequestContext request, final ContainerResponseContext response) throws IOException { final MediaType type = response.getMediaType(); if (type != null && !type.getParameters().containsKey(MediaType.CHARSET_PARAMETER)) { final MediaType typeWithCharset = type.withCharset(UTF_8); response.getHeaders().putSingle(HttpHeaders.CONTENT_TYPE, typeWithCharset); } }
@Test void testSetsCharsetEncoding() throws Exception { when(response.getMediaType()).thenReturn(MediaType.APPLICATION_JSON_TYPE); MultivaluedMap<String, Object> headers = new MultivaluedHashMap<>(); headers.add(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_TYPE); when(response.getHeaders()).thenReturn(headers); charsetUtf8Filter.filter(request, response); assertThat((MediaType) headers.getFirst(HttpHeaders.CONTENT_TYPE)) .isEqualTo(MediaType.valueOf("application/json;charset=UTF-8")); }
public static SortDir sortDir(String s) { return !DESC.equals(s) ? SortDir.ASC : SortDir.DESC; }
@Test public void sortDirAsc() { assertEquals("asc sort dir", SortDir.ASC, TableModel.sortDir("asc")); }
@Override public void addFirst(DirectoryDiff diff) { final int nodeLevel = DirectoryDiffListFactory.randomLevel(); final SkipListNode[] nodePath = new SkipListNode[nodeLevel + 1]; Arrays.fill(nodePath, head); final SkipListNode newNode = new SkipListNode(diff, nodeLevel); for (int level = 0; level <= nodeLevel; level++) { if (level > 0) { // Case : S0 is added at the beginning and it has 3 levels // suppose the list is like: // level 1: head ------------------->s5------------->NULL // level 0:head-> s1->s2->s3->s4->s5->s6->s7->s8->s9 // in this case: // level 2: head -> s0 -------------------------------->NULL // level 1: head -> s0'---------------->s5------------->NULL // level 0:head-> s0->s1->s2->s3->s4->s5->s6->s7->s8->s9 // At level 1, we need to combine s0, s1, s2, s3, s4 and s5 and store // as s0'. At level 2, s0 of next is pointing to null; // Note: in this case, the diff of element being added is included // while combining the diffs. final SkipListNode nextNode = head.getSkipNode(level); if (nextNode != null) { ChildrenDiff combined = combineDiff(newNode, nextNode, level); if (combined != null) { newNode.setSkipDiff(combined, level); } } } //insert to the linked list newNode.setSkipTo(nodePath[level].getSkipNode(level), level); nodePath[level].setSkipTo(newNode, level); } skipNodeList.add(0, newNode); }
@Test public void testAddFirst() throws Exception { testAddFirst(NUM_SNAPSHOTS); }
public void formatSource(CharSource input, CharSink output) throws FormatterException, IOException { // TODO(cushon): proper support for streaming input/output. Input may // not be feasible (parsing) but output should be easier. output.write(formatSource(input.read())); }
@Test public void voidMethod() throws FormatterException { String input = "class X { void Y() {} }"; String output = new Formatter().formatSource(input); String expect = "class X {\n void Y() {}\n}\n"; assertThat(output).isEqualTo(expect); }
@Override public void initialize(ServiceConfiguration config) throws IOException, IllegalArgumentException { String prefix = (String) config.getProperty(CONF_TOKEN_SETTING_PREFIX); if (null == prefix) { prefix = ""; } this.confTokenSecretKeySettingName = prefix + CONF_TOKEN_SECRET_KEY; this.confTokenPublicKeySettingName = prefix + CONF_TOKEN_PUBLIC_KEY; this.confTokenAuthClaimSettingName = prefix + CONF_TOKEN_AUTH_CLAIM; this.confTokenPublicAlgSettingName = prefix + CONF_TOKEN_PUBLIC_ALG; this.confTokenAudienceClaimSettingName = prefix + CONF_TOKEN_AUDIENCE_CLAIM; this.confTokenAudienceSettingName = prefix + CONF_TOKEN_AUDIENCE; this.confTokenAllowedClockSkewSecondsSettingName = prefix + CONF_TOKEN_ALLOWED_CLOCK_SKEW_SECONDS; // we need to fetch the algorithm before we fetch the key this.publicKeyAlg = getPublicKeyAlgType(config); this.validationKey = getValidationKey(config); this.roleClaim = getTokenRoleClaim(config); this.audienceClaim = getTokenAudienceClaim(config); this.audience = getTokenAudience(config); long allowedSkew = getConfTokenAllowedClockSkewSeconds(config); this.parser = Jwts.parserBuilder() .setAllowedClockSkewSeconds(allowedSkew) .setSigningKey(this.validationKey) .build(); if (audienceClaim != null && audience == null) { throw new IllegalArgumentException("Token Audience Claim [" + audienceClaim + "] configured, but Audience stands for this broker not."); } }
@Test(expectedExceptions = IllegalArgumentException.class) public void testInitializeWhenSecretKeyFilePathIfNotExist() throws IOException { File secretKeyFile = File.createTempFile("secret_key_file_not_exist", ".key"); assertTrue(secretKeyFile.delete()); assertFalse(secretKeyFile.exists()); Properties properties = new Properties(); properties.setProperty(AuthenticationProviderToken.CONF_TOKEN_SECRET_KEY, secretKeyFile.toString()); ServiceConfiguration conf = new ServiceConfiguration(); conf.setProperties(properties); new AuthenticationProviderToken().initialize(conf); }
public <T> T fromXmlPartial(String partial, Class<T> o) throws Exception { return fromXmlPartial(toInputStream(partial, UTF_8), o); }
@Test void shouldLoadBuildPlanFromXmlPartial() throws Exception { String buildXmlPartial = """ <job name="functional"> <artifacts> <artifact type="build" src="artifact1.xml" dest="cruise-output" /> </artifacts> </job>"""; JobConfig build = xmlLoader.fromXmlPartial(buildXmlPartial, JobConfig.class); assertThat(build.name()).isEqualTo(new CaseInsensitiveString("functional")); assertThat(build.artifactTypeConfigs().size()).isEqualTo(1); }
@Override public Double getDouble(K name) { return null; }
@Test public void testGetDoubleDefault() { assertEquals(1, HEADERS.getDouble("name1", 1), 0); }
private boolean autoscale(ApplicationId applicationId, ClusterSpec.Id clusterId) { boolean redeploy = false; boolean enabled = enabledFlag.with(Dimension.INSTANCE_ID, applicationId.serializedForm()).value(); boolean logDetails = enableDetailedLoggingFlag.with(Dimension.INSTANCE_ID, applicationId.serializedForm()).value(); try (var lock = nodeRepository().applications().lock(applicationId)) { Optional<Application> application = nodeRepository().applications().get(applicationId); if (application.isEmpty()) return true; if (application.get().cluster(clusterId).isEmpty()) return true; Cluster cluster = application.get().cluster(clusterId).get(); Cluster unchangedCluster = cluster; NodeList clusterNodes = nodeRepository().nodes().list(Node.State.active).owner(applicationId).cluster(clusterId); if (clusterNodes.isEmpty()) return true; // Cluster was removed since we started cluster = updateCompletion(cluster, clusterNodes); var current = new AllocatableResources(clusterNodes.not().retired(), nodeRepository()).advertisedResources(); // Autoscale unless an autoscaling is already in progress Autoscaling autoscaling = null; if (cluster.target().resources().isEmpty() && !cluster.scalingInProgress()) { autoscaling = autoscaler.autoscale(application.get(), cluster, clusterNodes, enabled, logDetails); if (autoscaling.isPresent() || cluster.target().isEmpty()) // Ignore empty from recently started servers cluster = cluster.withTarget(autoscaling); } // Always store any updates if (cluster != unchangedCluster) applications().put(application.get().with(cluster), lock); // Attempt to perform the autoscaling immediately, and log it regardless if (autoscaling != null && autoscaling.resources().isPresent() && !current.equals(autoscaling.resources().get())) { redeploy = true; logAutoscaling(current, autoscaling.resources().get(), applicationId, clusterNodes.not().retired()); if (logDetails) { log.info("autoscaling data for " + applicationId.toFullString() + ": " + "\n\tmetrics().cpuCostPerQuery(): " + autoscaling.metrics().cpuCostPerQuery() + "\n\tmetrics().queryRate(): " + autoscaling.metrics().queryRate() + "\n\tmetrics().growthRateHeadroom(): " + autoscaling.metrics().growthRateHeadroom() + "\n\tpeak(): " + autoscaling.peak().toString() + "\n\tideal(): " + autoscaling.ideal().toString()); } } } catch (ApplicationLockException e) { return false; } catch (IllegalArgumentException e) { throw new IllegalArgumentException("Illegal arguments for " + applicationId + " cluster " + clusterId, e); } if (redeploy) { try (MaintenanceDeployment deployment = new MaintenanceDeployment(applicationId, deployer, metric, nodeRepository())) { if (deployment.isValid()) deployment.activate(); } } return true; }
@Test public void test_cd_autoscaling_test() { ApplicationId app1 = AutoscalingMaintainerTester.makeApplicationId("app1"); ClusterSpec cluster1 = AutoscalingMaintainerTester.containerClusterSpec(); NodeResources resources = new NodeResources(1, 4, 50, 1); ClusterResources min = new ClusterResources( 2, 1, resources); ClusterResources max = new ClusterResources(3, 1, resources); var capacity = Capacity.from(min, max); var tester = new AutoscalingMaintainerTester(new Zone(SystemName.cd, Environment.prod, RegionName.from("us-east3")), new MockDeployer.ApplicationContext(app1, cluster1, capacity)); ManualClock clock = tester.clock(); tester.deploy(app1, cluster1, capacity); assertEquals(2, tester.nodeRepository().nodes().list(Node.State.active) .owner(app1) .cluster(cluster1.id()) .size()); autoscale(false, Duration.ofMinutes( 1), Duration.ofMinutes( 5), clock, app1, cluster1, tester); assertEquals(3, tester.nodeRepository().nodes().list(Node.State.active) .owner(app1) .cluster(cluster1.id()) .size()); }
@Nullable public Object get(PropertyKey key) { if (mUserProps.containsKey(key)) { return mUserProps.get(key).orElse(null); } // In case key is not the reference to the original key return PropertyKey.fromString(key.toString()).getDefaultValue(); }
@Test public void get() { assertEquals("value", mProperties.get(mKeyWithValue)); assertEquals(null, mProperties.get(mKeyWithoutValue)); mProperties.put(mKeyWithoutValue, "newValue1", Source.RUNTIME); assertEquals("newValue1", mProperties.get(mKeyWithoutValue)); }
public static UTypeApply create(UExpression type, List<UExpression> typeArguments) { return new AutoValue_UTypeApply(type, ImmutableList.copyOf(typeArguments)); }
@Test public void serialization() { SerializableTester.reserializeAndAssert( UTypeApply.create( UClassIdent.create("java.util.List"), UClassIdent.create("java.lang.String"))); }
public TolerantFloatComparison isWithin(float tolerance) { return new TolerantFloatComparison() { @Override public void of(float expected) { Float actual = FloatSubject.this.actual; checkNotNull( actual, "actual value cannot be null. tolerance=%s expected=%s", tolerance, expected); checkTolerance(tolerance); if (!equalWithinTolerance(actual, expected, tolerance)) { failWithoutActual( fact("expected", floatToString(expected)), butWas(), fact("outside tolerance", floatToString(tolerance))); } } }; }
@Test public void isWithinOf() { assertThat(2.0f).isWithin(0.0f).of(2.0f); assertThat(2.0f).isWithin(0.00001f).of(2.0f); assertThat(2.0f).isWithin(1000.0f).of(2.0f); assertThat(2.0f).isWithin(1.00001f).of(3.0f); assertThatIsWithinFails(2.0f, 0.99999f, 3.0f); assertThatIsWithinFails(2.0f, 1000.0f, 1003.0f); assertThatIsWithinFails(2.0f, 1000.0f, Float.POSITIVE_INFINITY); assertThatIsWithinFails(2.0f, 1000.0f, Float.NaN); assertThatIsWithinFails(Float.NEGATIVE_INFINITY, 1000.0f, 2.0f); assertThatIsWithinFails(Float.NaN, 1000.0f, 2.0f); }
@Override @Deprecated public <KR, VR> KStream<KR, VR> transform(final org.apache.kafka.streams.kstream.TransformerSupplier<? super K, ? super V, KeyValue<KR, VR>> transformerSupplier, final String... stateStoreNames) { Objects.requireNonNull(transformerSupplier, "transformerSupplier can't be null"); final String name = builder.newProcessorName(TRANSFORM_NAME); return flatTransform(new TransformerSupplierAdapter<>(transformerSupplier), Named.as(name), stateStoreNames); }
@Test @SuppressWarnings("deprecation") public void shouldNotAllowNullNamedOnTransformWithStoreName() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.transform(transformerSupplier, (Named) null, "storeName")); assertThat(exception.getMessage(), equalTo("named can't be null")); }
@Override public long remainTimeToLive(K key) { return get(remainTimeToLiveAsync(key)); }
@Test public void testRemainTimeToLive() { RMapCacheNative<String, String> map = redisson.getMapCacheNative("test"); map.put("1", "2", Duration.ofSeconds(2)); assertThat(map.remainTimeToLive("1")).isBetween(1900L, 2000L); map.put("3", "4"); assertThat(map.remainTimeToLive("3")).isEqualTo(-1); assertThat(map.remainTimeToLive("0")).isEqualTo(-2); map.put("5", "6", Duration.ofSeconds(20)); assertThat(map.remainTimeToLive("1")).isLessThan(9900); Map<String, Long> r = map.remainTimeToLive(Set.of("0", "1", "3", "5", "6")); assertThat(r.get("0")).isEqualTo(-2); assertThat(r.get("1")).isGreaterThan(1); assertThat(r.get("3")).isEqualTo(-1); assertThat(r.get("5")).isGreaterThan(1); assertThat(r.get("6")).isEqualTo(-2); }
public void add(Task task) { tasks.put('/' + task.getName(), task); TaskExecutor taskExecutor = new TaskExecutor(task); try { final Method executeMethod = task.getClass().getMethod("execute", Map.class, PrintWriter.class); if (executeMethod.isAnnotationPresent(Timed.class)) { final Timed annotation = executeMethod.getAnnotation(Timed.class); final String name = chooseName(annotation.name(), annotation.absolute(), task); taskExecutor = new TimedTask(taskExecutor, metricRegistry.timer(name)); } if (executeMethod.isAnnotationPresent(Metered.class)) { final Metered annotation = executeMethod.getAnnotation(Metered.class); final String name = chooseName(annotation.name(), annotation.absolute(), task); taskExecutor = new MeteredTask(taskExecutor, metricRegistry.meter(name)); } if (executeMethod.isAnnotationPresent(ExceptionMetered.class)) { final ExceptionMetered annotation = executeMethod.getAnnotation(ExceptionMetered.class); final String name = chooseName(annotation.name(), annotation.absolute(), task, ExceptionMetered.DEFAULT_NAME_SUFFIX); taskExecutor = new ExceptionMeteredTask(taskExecutor, metricRegistry.meter(name), annotation.cause()); } } catch (NoSuchMethodException ignored) { } taskExecutors.put(task, taskExecutor); }
@Test void testRunsExceptionMeteredTask() throws Exception { final ServletInputStream bodyStream = new TestServletInputStream( new ByteArrayInputStream("".getBytes(StandardCharsets.UTF_8))); final Task exceptionMeteredTask = new Task("exception-metered-task") { @Override @ExceptionMetered(name = "vacuum-cleaning-exceptions") public void execute(Map<String, List<String>> parameters, PrintWriter output) { throw new RuntimeException("The engine has died"); } }; servlet.add(exceptionMeteredTask); when(request.getInputStream()).thenReturn(bodyStream); when(request.getParameterNames()).thenReturn(Collections.emptyEnumeration()); when(request.getMethod()).thenReturn("POST"); when(request.getPathInfo()).thenReturn("/exception-metered-task"); when(response.getWriter()).thenReturn(mock(PrintWriter.class)); servlet.service(request, response); assertThat(metricRegistry.getMeters()).containsKey(name(exceptionMeteredTask.getClass(), "vacuum-cleaning-exceptions")); }
public static ClassLoader createNewClassLoader() throws KettleException { try { // Nothing really in URL, everything is in scope. URL[] urls = new URL[] {}; URLClassLoader ucl = new URLClassLoader( urls ); return ucl; } catch ( Exception e ) { throw new KettleException( "Unexpected error during classloader creation", e ); } }
@Test public void testCreateNewClassLoader() throws KettleException { ClassLoader cl = Const.createNewClassLoader(); assertTrue( cl instanceof URLClassLoader && ( (URLClassLoader) cl ).getURLs().length == 0 ); }
@Override public OutputT expand(InputT input) { OutputT res = delegate().expand(input); if (res instanceof PCollection) { PCollection pc = (PCollection) res; try { pc.setCoder(delegate().getDefaultOutputCoder(input, pc)); } catch (CannotProvideCoderException e) { // Let coder inference happen later. } } return res; }
@Test public void applyDelegates() { @SuppressWarnings("unchecked") PCollection<Integer> collection = mock(PCollection.class); @SuppressWarnings("unchecked") PCollection<String> output = mock(PCollection.class); when(delegate.expand(collection)).thenReturn(output); PCollection<String> result = forwarding.expand(collection); assertThat(result, equalTo(output)); }
public static <C> AsyncBuilder<C> builder() { return new AsyncBuilder<>(); }
@SuppressWarnings("resource") @Test void decodeLogicSupportsByteArray() throws Throwable { byte[] expectedResponse = {12, 34, 56}; server.enqueue(new MockResponse().setBody(new Buffer().write(expectedResponse))); OtherTestInterfaceAsync api = AsyncFeign.builder().target(OtherTestInterfaceAsync.class, "http://localhost:" + server.getPort()); assertThat(unwrap(api.binaryResponseBody())).containsExactly(expectedResponse); }
public List<Object> getData() { List<Object> result = new ArrayList<>(cells.size()); for (QueryResponseCell cell : cells) { result.add(cell.getData()); } return result; }
@Test void assertGetDataWhenQueryResponseCellsPresent() { QueryResponseCell queryResponseCell1 = new QueryResponseCell(Types.INTEGER, 1); QueryResponseCell queryResponseCell2 = new QueryResponseCell(Types.VARCHAR, "column"); QueryResponseRow queryResponseRow = new QueryResponseRow(Arrays.asList(queryResponseCell1, queryResponseCell2)); List<Object> actualData = queryResponseRow.getData(); assertThat(actualData.size(), is(2)); assertThat(actualData, is(Arrays.asList(1, "column"))); }
@Override @Transactional(rollbackFor = Exception.class) @CacheEvict(value = RedisKeyConstants.ROLE, key = "#id") @LogRecord(type = SYSTEM_ROLE_TYPE, subType = SYSTEM_ROLE_DELETE_SUB_TYPE, bizNo = "{{#id}}", success = SYSTEM_ROLE_DELETE_SUCCESS) public void deleteRole(Long id) { // 1. 校验是否可以更新 RoleDO role = validateRoleForUpdate(id); // 2.1 标记删除 roleMapper.deleteById(id); // 2.2 删除相关数据 permissionService.processRoleDeleted(id); // 3. 记录操作日志上下文 LogRecordContext.putVariable("role", role); }
@Test public void testDeleteRole() { // mock 数据 RoleDO roleDO = randomPojo(RoleDO.class, o -> o.setType(RoleTypeEnum.CUSTOM.getType())); roleMapper.insert(roleDO); // 参数准备 Long id = roleDO.getId(); // 调用 roleService.deleteRole(id); // 断言 assertNull(roleMapper.selectById(id)); // verify 删除相关数据 verify(permissionService).processRoleDeleted(id); }
public Optional<File> getFile(FileReference reference) { ensureRootExist(); File dir = new File(getPath(reference)); if (!dir.exists()) { // This is common when config server has not yet received the file from one the server the app was deployed on log.log(FINE, "File reference '" + reference.value() + "' ('" + dir.getAbsolutePath() + "') does not exist."); return Optional.empty(); } if (!dir.isDirectory()) { log.log(INFO, "File reference '" + reference.value() + "' ('" + dir.getAbsolutePath() + ")' is not a directory."); return Optional.empty(); } File[] files = dir.listFiles(new Filter()); if (files == null || files.length == 0) { log.log(INFO, "File reference '" + reference.value() + "' ('" + dir.getAbsolutePath() + "') does not contain any files"); return Optional.empty(); } return Optional.of(files[0]); }
@Test public void requireThatFileReferenceWithFilesWorks() throws IOException { FileReference foo = createFile("foo"); FileReference bar = createFile("bar"); assertTrue(fileDirectory.getFile(foo).get().exists()); assertEquals("ea315b7acac56246", foo.value()); assertTrue(fileDirectory.getFile(bar).get().exists()); assertEquals("2b8e97f15c854e1d", bar.value()); }
public static Tuple2<String, String> getNameAndNamespaceFromString(String nameDotNamespace) { if (!nameDotNamespace.contains(".")) { return new Tuple2<>(nameDotNamespace, ""); } String name = nameDotNamespace.substring(nameDotNamespace.lastIndexOf(".") + 1); String namespace = nameDotNamespace.substring(0, nameDotNamespace.lastIndexOf(".")); return new Tuple2<>(name, namespace); }
@Test public void testNamespaceDotNames() { String namespaceDotName = "foo.bar"; Tuple2<String, String> tuple = ConfigUtils.getNameAndNamespaceFromString(namespaceDotName); assertEquals("bar", tuple.first); assertEquals("foo", tuple.second); namespaceDotName = "foo.baz.bar"; tuple = ConfigUtils.getNameAndNamespaceFromString(namespaceDotName); assertEquals("bar", tuple.first); assertEquals("foo.baz", tuple.second); // no namespace namespaceDotName = "bar"; tuple = ConfigUtils.getNameAndNamespaceFromString(namespaceDotName); assertEquals("bar", tuple.first); assertEquals("", tuple.second); // no name namespaceDotName = "foo."; tuple = ConfigUtils.getNameAndNamespaceFromString(namespaceDotName); assertEquals("", tuple.first); assertEquals("foo", tuple.second); // no namespace namespaceDotName = ".bar"; tuple = ConfigUtils.getNameAndNamespaceFromString(namespaceDotName); assertEquals("bar", tuple.first); assertEquals("", tuple.second); }
@Override public ListenableFuture<HttpResponse> sendAsync(HttpRequest httpRequest) { return sendAsync(httpRequest, null); }
@Test public void sendAsync_whenPostRequest_returnsExpectedHttpResponse() throws IOException, ExecutionException, InterruptedException { String responseBody = "{ \"test\": \"json\" }"; mockWebServer.enqueue( new MockResponse() .setResponseCode(HttpStatus.OK.code()) .setHeader(CONTENT_TYPE, MediaType.JSON_UTF_8.toString()) .setBody(responseBody)); mockWebServer.start(); String requestUrl = mockWebServer.url("/test/post").toString(); HttpResponse response = httpClient .sendAsync( post(requestUrl) .setHeaders( HttpHeaders.builder() .addHeader(ACCEPT, MediaType.JSON_UTF_8.toString()) .build()) .build()) .get(); assertThat(response) .isEqualTo( HttpResponse.builder() .setStatus(HttpStatus.OK) .setHeaders( HttpHeaders.builder() .addHeader(CONTENT_TYPE, MediaType.JSON_UTF_8.toString()) // MockWebServer always adds this response header. .addHeader(CONTENT_LENGTH, String.valueOf(responseBody.length())) .build()) .setBodyBytes(ByteString.copyFrom(responseBody, UTF_8)) .setResponseUrl(HttpUrl.parse(requestUrl)) .build()); }
@VisibleForTesting static final String hostKey(String host) { try { final Matcher m = HOST_PATTERN.matcher(host); // I know which type of host matched by the number of the group that is non-null // I use a different replacement string per host type to make the Epic stats more clear if (m.matches()) { if (m.group(1) != null) { host = host.replace(m.group(1), "EC2"); } else if (m.group(2) != null) { host = host.replace(m.group(2), "IP"); } else if (m.group(3) != null) { host = host.replace(m.group(3), "IP"); } else if (m.group(4) != null) { host = host.replace(m.group(4), "CDN"); } else if (m.group(5) != null) { host = host.replace(m.group(5), "CDN"); } else if (m.group(6) != null) { host = host.replace(m.group(6), "CDN"); } } } catch (Exception e) { LOG.error(e.getMessage(), e); } finally { return String.format("host_%s", host); } }
@Test void createsNormalizedHostKey() { assertEquals("host_EC2.amazonaws.com", StatsManager.hostKey("ec2-174-129-179-89.compute-1.amazonaws.com")); assertEquals("host_IP", StatsManager.hostKey("12.345.6.789")); assertEquals("host_IP", StatsManager.hostKey("ip-10-86-83-168")); assertEquals("host_CDN.nflxvideo.net", StatsManager.hostKey("002.ie.llnw.nflxvideo.net")); assertEquals("host_CDN.llnwd.net", StatsManager.hostKey("netflix-635.vo.llnwd.net")); assertEquals("host_CDN.nflximg.com", StatsManager.hostKey("cdn-0.nflximg.com")); }
public GrpcChannel acquireChannel(GrpcNetworkGroup networkGroup, GrpcServerAddress serverAddress, AlluxioConfiguration conf, boolean alwaysEnableTLS) { GrpcChannelKey channelKey = getChannelKey(networkGroup, serverAddress, conf); CountingReference<ManagedChannel> channelRef = mChannels.compute(channelKey, (key, ref) -> { boolean shutdownExistingConnection = false; int existingRefCount = 0; if (ref != null) { // Connection exists, wait for health check. if (waitForConnectionReady(ref.get(), conf)) { LOG.debug("Acquiring an existing connection. ConnectionKey: {}. Ref-count: {}", key, ref.getRefCount()); return ref.reference(); } else { // Health check failed. shutdownExistingConnection = true; } } // Existing connection should be shutdown. if (shutdownExistingConnection) { existingRefCount = ref.getRefCount(); LOG.debug("Shutting down an existing unhealthy connection. " + "ConnectionKey: {}. Ref-count: {}", key, existingRefCount); // Shutdown the channel forcefully as it's already unhealthy. shutdownManagedChannel(ref.get()); } // Create a new managed channel. LOG.debug("Creating a new managed channel. ConnectionKey: {}. Ref-count:{}," + " alwaysEnableTLS:{} config TLS:{}", key, existingRefCount, alwaysEnableTLS, conf.getBoolean(alluxio.conf.PropertyKey.NETWORK_TLS_ENABLED)); ManagedChannel managedChannel = createManagedChannel(channelKey, conf, alwaysEnableTLS); // Set map reference. return new CountingReference<>(managedChannel, existingRefCount).reference(); }); return new GrpcChannel(channelKey, channelRef.get()); }
@Test public void testEqualKeys() throws Exception { try (CloseableTestServer server = createServer()) { GrpcChannel conn1 = GrpcChannelPool.INSTANCE.acquireChannel( GrpcNetworkGroup.RPC, server.getConnectAddress(), sConf, false); GrpcChannel conn2 = GrpcChannelPool.INSTANCE.acquireChannel( GrpcNetworkGroup.RPC, server.getConnectAddress(), sConf, false); assertEquals(conn1, conn2); } }
static Collection<String> getMandatoryJvmOptions(int javaMajorVersion){ return Arrays.stream(MANDATORY_JVM_OPTIONS) .map(option -> jvmOptionFromLine(javaMajorVersion, option)) .flatMap(Optional::stream) .collect(Collectors.toUnmodifiableList()); }
@Test public void testMandatoryJvmOptionNonApplicableJvmNotPresent() throws IOException{ assertFalse("Does not contains add-exports value for Java 11", JvmOptionsParser.getMandatoryJvmOptions(11).contains("--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED")); }
public static boolean parse(final String str, ResTable_config out) { return parse(str, out, true); }
@Test public void parse_layoutDirection_any() { ResTable_config config = new ResTable_config(); ConfigDescription.parse("any", config); assertThat(config.screenLayout).isEqualTo(LAYOUTDIR_ANY); }
@Override public void delete(final Map<Path, TransferStatus> files, final PasswordCallback prompt, final Callback callback) throws BackgroundException { final Map<Path, List<String>> containers = new HashMap<>(); for(Path file : files.keySet()) { if(containerService.isContainer(file)) { continue; } callback.delete(file); final Path container = containerService.getContainer(file); if(containers.containsKey(container)) { containers.get(container).add(containerService.getKey(file)); } else { final List<String> keys = new ArrayList<>(); keys.add(containerService.getKey(file)); // Collect a list of existing segments. Must do this before deleting the manifest file. for(Path segment : segmentService.list(file)) { keys.add(containerService.getKey(segment)); } containers.put(container, keys); } } try { for(Map.Entry<Path, List<String>> container : containers.entrySet()) { final Region region = regionService.lookup(container.getKey()); final List<String> keys = container.getValue(); for(List<String> partition : new Partition<>(keys, new HostPreferences(session.getHost()).getInteger("openstack.delete.multiple.partition"))) { session.getClient().deleteObjects(region, container.getKey().getName(), partition); } } } catch(GenericException e) { if(new SwiftExceptionMappingService().map(e) instanceof InteroperabilityException) { new SwiftDeleteFeature(session, regionService).delete(files, prompt, callback); return; } else { throw new SwiftExceptionMappingService().map("Cannot delete {0}", e, files.keySet().iterator().next()); } } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Cannot delete {0}", e, files.keySet().iterator().next()); } for(Path file : files.keySet()) { if(containerService.isContainer(file)) { callback.delete(file); // Finally delete bucket itself try { session.getClient().deleteContainer(regionService.lookup(file), containerService.getContainer(file).getName()); } catch(GenericException e) { throw new SwiftExceptionMappingService().map("Cannot delete {0}", e, file); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Cannot delete {0}", e, file); } } } }
@Test(expected = NotfoundException.class) @Ignore public void testDeleteNotFoundKey() throws Exception { final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume)); new SwiftMultipleDeleteFeature(session).delete(Arrays.asList( new Path(container, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)), new Path(container, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)) ), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override void handle(Connection connection, DatabaseCharsetChecker.State state) throws SQLException { // PostgreSQL does not have concept of case-sensitive collation. Only charset ("encoding" in postgresql terminology) // must be verified. expectUtf8AsDefault(connection); if (state == DatabaseCharsetChecker.State.UPGRADE || state == DatabaseCharsetChecker.State.STARTUP) { // no need to check columns on fresh installs... as they are not supposed to exist! expectUtf8Columns(connection); } }
@Test public void regular_startup_verifies_that_default_charset_and_columns_are_utf8() throws Exception { answerDefaultCharset("utf8"); answerColumns(asList( new String[] {TABLE_ISSUES, COLUMN_KEE, "utf8"}, new String[] {TABLE_PROJECTS, COLUMN_NAME, "utf8"})); underTest.handle(connection, DatabaseCharsetChecker.State.STARTUP); // no errors, charsets have been verified verify(metadata).getDefaultCharset(same(connection)); }
@VisibleForTesting ImmutableList<EventWithContext> eventsFromAggregationResult(EventFactory eventFactory, AggregationEventProcessorParameters parameters, AggregationResult result) throws EventProcessorException { final ImmutableList.Builder<EventWithContext> eventsWithContext = ImmutableList.builder(); final Set<String> sourceStreams = eventStreamService.buildEventSourceStreams(getStreams(parameters), result.sourceStreams()); for (final AggregationKeyResult keyResult : result.keyResults()) { if (!satisfiesConditions(keyResult)) { LOG.debug("Skipping result <{}> because the conditions <{}> don't match", keyResult, config.conditions()); continue; } final String keyString = String.join("|", keyResult.key()); final String eventMessage = createEventMessageString(keyString, keyResult); // Extract event time and range from the key result or use query time range as fallback. // These can be different, e.g. during catch up processing. final DateTime eventTime = keyResult.timestamp().orElse(result.effectiveTimerange().to()); final Event event = eventFactory.createEvent(eventDefinition, eventTime, eventMessage); // The keyResult timestamp is set to the end of the range event.setTimerangeStart(keyResult.timestamp().map(t -> t.minus(config.searchWithinMs())).orElse(parameters.timerange().getFrom())); event.setTimerangeEnd(keyResult.timestamp().orElse(parameters.timerange().getTo())); event.setReplayInfo(EventReplayInfo.builder() .timerangeStart(event.getTimerangeStart()) .timerangeEnd(event.getTimerangeEnd()) .query(config.query()) .streams(sourceStreams) .filters(config.filters()) .build()); sourceStreams.forEach(event::addSourceStream); final Map<String, Object> fields = new HashMap<>(); // Each group value will be a separate field in the message to make it usable as event fields. // // Example result: // groupBy=["application_name", "username"] // result-key=["sshd", "jane"] // // Message fields: // application_name=sshd // username=jane for (int i = 0; i < config.groupBy().size(); i++) { try { fields.put(config.groupBy().get(i), keyResult.key().get(i)); } catch (IndexOutOfBoundsException e) { throw new EventProcessorException( "Couldn't create events for: " + eventDefinition.title() + " (possibly due to non-existing grouping fields)", false, eventDefinition.id(), eventDefinition, e); } } // Group By fields need to be saved on the event so they are available to the subsequent notification events event.setGroupByFields(fields.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().toString()))); // The field name for the series value is composed of the series function and field. We don't take the // series ID into account because it would be very hard to use for the user. That means a series with // the same function and field but different ID would overwrite a previous one. // This shouldn't be a problem though, because the same function and field will always compute the same // value. // // Examples: // aggregation_value_count_source=42 // aggregation_value_card_anonid=23 for (AggregationSeriesValue seriesValue : keyResult.seriesValues()) { final String function = seriesValue.series().type().toLowerCase(Locale.ROOT); final Optional<String> field = fieldFromSeries(seriesValue.series()); final String fieldName = field.map(f -> String.format(Locale.ROOT, "aggregation_value_%s_%s", function, f)) .orElseGet(() -> String.format(Locale.ROOT, "aggregation_value_%s", function)); fields.put(fieldName, seriesValue.value()); } // This is the concatenated key value fields.put("aggregation_key", keyString); // TODO: Can we find a useful source value? final Message message = messageFactory.createMessage(eventMessage, "", result.effectiveTimerange().to()); message.addFields(fields); // Ask any event query modifier for its state and collect it into the event modifier state final Map<String, Object> eventModifierState = eventQueryModifiers.stream() .flatMap(modifier -> modifier.eventModifierData(result.additionalResults()).entrySet().stream()) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); LOG.debug("Creating event {}/{} - {} {} ({})", eventDefinition.title(), eventDefinition.id(), keyResult.key(), seriesString(keyResult), fields); eventsWithContext.add(EventWithContext.builder() .event(event) .messageContext(message) .eventModifierState(eventModifierState) .build()); } return eventsWithContext.build(); }
@Test public void testEventsFromAggregationResult() throws EventProcessorException { final DateTime now = DateTime.now(DateTimeZone.UTC); final AbsoluteRange timerange = AbsoluteRange.create(now.minusHours(1), now.minusHours(1).plusMillis(SEARCH_WINDOW_MS)); // We expect to get the end of the aggregation timerange as event time final TestEvent event1 = new TestEvent(timerange.to()); final TestEvent event2 = new TestEvent(timerange.to()); when(eventFactory.createEvent(any(EventDefinition.class), any(DateTime.class), anyString())) .thenReturn(event1) // first invocation return value .thenReturn(event2); // second invocation return value final EventDefinitionDto eventDefinitionDto = buildEventDefinitionDto(ImmutableSet.of("stream-2"), ImmutableList.of(), null, emptyList()); final AggregationEventProcessorParameters parameters = AggregationEventProcessorParameters.builder() .timerange(timerange) .build(); final AggregationEventProcessor eventProcessor = new AggregationEventProcessor( eventDefinitionDto, searchFactory, eventProcessorDependencyCheck, stateService, moreSearch, eventStreamService, messages, notificationService, permittedStreams, Set.of(), messageFactory); final AggregationResult result = AggregationResult.builder() .effectiveTimerange(timerange) .totalAggregatedMessages(1) .sourceStreams(ImmutableSet.of("stream-1", "stream-2")) .keyResults(ImmutableList.of( AggregationKeyResult.builder() .key(ImmutableList.of("one", "two")) .timestamp(timerange.to()) .seriesValues(ImmutableList.of( AggregationSeriesValue.builder() .key(ImmutableList.of("a")) .value(42.0d) .series(Count.builder() .id("abc123") .field("source") .build()) .build(), AggregationSeriesValue.builder() .key(ImmutableList.of("a")) .value(23.0d) .series(Count.builder() .id("abc123-no-field") .build()) .build(), AggregationSeriesValue.builder() .key(ImmutableList.of("a")) .value(1.0d) .series(Cardinality.builder() .id("xyz789") .field("source") .build()) .build() )) .build() )) .build(); final ImmutableList<EventWithContext> eventsWithContext = eventProcessor.eventsFromAggregationResult(eventFactory, parameters, result); assertThat(eventsWithContext).hasSize(1); assertThat(eventsWithContext.get(0)).satisfies(eventWithContext -> { final Event event = eventWithContext.event(); assertThat(event.getId()).isEqualTo(event1.getId()); assertThat(event.getMessage()).isEqualTo(event1.getMessage()); assertThat(event.getEventTimestamp()).isEqualTo(timerange.to()); assertThat(event.getTimerangeStart()).isEqualTo(timerange.from()); assertThat(event.getTimerangeEnd()).isEqualTo(timerange.to()); // Should only contain the streams that have been configured in event definition assertThat(event.getSourceStreams()).containsOnly("stream-2"); final Message message = eventWithContext.messageContext().orElse(null); assertThat(message).isNotNull(); assertThat(message.getField("group_field_one")).isEqualTo("one"); assertThat(message.getField("group_field_two")).isEqualTo("two"); assertThat(message.getField("aggregation_key")).isEqualTo("one|two"); assertThat(message.getField("aggregation_value_count_source")).isEqualTo(42.0d); // Make sure that the count with a "null" field doesn't include the field in the name assertThat(message.getField("aggregation_value_count")).isEqualTo(23.0d); assertThat(message.getField("aggregation_value_card_source")).isEqualTo(1.0d); assertThat(event.getGroupByFields().get("group_field_one")).isEqualTo("one"); assertThat(event.getGroupByFields().get("group_field_two")).isEqualTo("two"); }); }
public final E submitterGoneItem() { return submitterGoneItem; }
@Test public void submitterGoneItem() { assertSame(doneItem, conveyor.submitterGoneItem()); }
@Override public double score(int[] truth, int[] prediction) { return of(truth, prediction, strategy); }
@Test public void test() { System.out.println("Precision"); int[] truth = { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; int[] prediction = { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; Precision instance = new Precision(); double expResult = 0.9259; double result = instance.score(truth, prediction); assertEquals(expResult, result, 1E-4); }
@Override public boolean accept(final Path file) { if(!super.accept(file)) { return false; } return proxy.accept(file); }
@Test public void testAccept() { assertTrue(new DownloadGlobFilter("*.css").accept(new Path("/dir/f.css", EnumSet.of(Path.Type.file)))); assertFalse(new DownloadGlobFilter("*.css").accept(new Path("/dir/f.png", EnumSet.of(Path.Type.file)))); }
@POST @Timed @Consumes(MediaType.APPLICATION_JSON) @ApiOperation(value = "Upload a content pack") @ApiResponses(value = { @ApiResponse(code = 400, message = "Missing or invalid content pack"), @ApiResponse(code = 500, message = "Error while saving content pack") }) @AuditEvent(type = AuditEventTypes.CONTENT_PACK_CREATE) @JsonView(ContentPackView.HttpView.class) public Response createContentPack( @ApiParam(name = "Request body", value = "Content pack", required = true) @NotNull @Valid final ContentPack contentPack) { checkPermission(RestPermissions.CONTENT_PACK_CREATE); final ContentPack pack = contentPackPersistenceService.filterMissingResourcesAndInsert(contentPack) .orElseThrow(() -> new BadRequestException("Content pack " + contentPack.id() + " with this revision " + contentPack.revision() + " already found!")); final URI packUri = getUriBuilderToSelf().path(ContentPackResource.class) .path("{contentPackId}") .build(pack.id()); return Response.created(packUri).build(); }
@Test public void uploadContentPack() throws Exception { final ContentPack contentPack = objectMapper.readValue(CONTENT_PACK, ContentPack.class); when(contentPackPersistenceService.filterMissingResourcesAndInsert(contentPack)).thenReturn(Optional.ofNullable(contentPack)); final Response response = contentPackResource.createContentPack(contentPack); verify(contentPackPersistenceService, times(1)).filterMissingResourcesAndInsert(contentPack); assertThat(response.getStatusInfo()).isEqualTo(Response.Status.CREATED); }
@Override public String resolve(Method method, Object[] arguments, String spelExpression) { if (StringUtils.isEmpty(spelExpression)) { return spelExpression; } if (spelExpression.matches(PLACEHOLDER_SPEL_REGEX) && stringValueResolver != null) { return stringValueResolver.resolveStringValue(spelExpression); } if (spelExpression.matches(METHOD_SPEL_REGEX)) { SpelRootObject rootObject = new SpelRootObject(method, arguments); MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer); Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext); return (String) evaluated; } if (spelExpression.matches(BEAN_SPEL_REGEX)) { SpelRootObject rootObject = new SpelRootObject(method, arguments); MethodBasedEvaluationContext evaluationContext = new MethodBasedEvaluationContext(rootObject, method, arguments, parameterNameDiscoverer); evaluationContext.setBeanResolver(new BeanFactoryResolver(this.beanFactory)); Object evaluated = expressionParser.parseExpression(spelExpression).getValue(evaluationContext); return (String) evaluated; } return spelExpression; }
@Test public void atTest() throws Exception { DefaultSpelResolverTest target = new DefaultSpelResolverTest(); Method testMethod = target.getClass().getMethod("testMethod", String.class); String result = sut.resolve(testMethod, new Object[]{}, "@"); assertThat(result).isEqualTo("@"); }
@Override protected EnumDeclaration create(CompilationUnit compilationUnit) { EnumDeclaration lambdaClass = super.create(compilationUnit); boolean hasDroolsParameter = lambdaParameters.stream().anyMatch(this::isDroolsParameter); if (hasDroolsParameter) { bitMaskVariables.forEach(vd -> vd.generateBitMaskField(lambdaClass)); } return lambdaClass; }
@Test public void createConsequence() { CreatedClass aClass = new MaterializedLambdaConsequence("org.drools.modelcompiler.util.lambdareplace", "rulename", new ArrayList<>()) .create("(org.drools.model.codegen.execmodel.domain.Person p1, org.drools.model.codegen.execmodel.domain.Person p2) -> result.setValue( p1.getName() + \" is older than \" + p2.getName())", new ArrayList<>(), new ArrayList<>()); String classNameWithPackage = aClass.getClassNameWithPackage(); // There is no easy way to retrieve the originally created "hashcode" because it is calculated over a CompilationUnit that soon after is modified; // so current "CreatedClass" contains a CompilationUnit that is different from the one used to calculate the hashcode String expectedPackageName = classNameWithPackage.substring(0, classNameWithPackage.lastIndexOf('.')); String expectedClassName = classNameWithPackage.substring(classNameWithPackage.lastIndexOf('.')+1); //language=JAVA String expectedResult = "" + "package PACKAGE_TOREPLACE;\n" + "import static rulename.*; " + "import org.drools.modelcompiler.dsl.pattern.D; " + "" + " \n"+ "@org.drools.compiler.kie.builder.MaterializedLambda() " + "public enum CLASS_TOREPLACE implements org.drools.model.functions.Block2<org.drools.model.codegen.execmodel.domain.Person, org.drools.model.codegen.execmodel.domain.Person>, org.drools.model.functions.HashedExpression {\n" + "INSTANCE;\n" + "public static final String EXPRESSION_HASH = \"92816350431E6B9D2AF473C2A98D8B37\";" + " public java.lang.String getExpressionHash() {\n" + " return EXPRESSION_HASH;\n" + " } " + " @Override()\n" + " public void execute(org.drools.model.codegen.execmodel.domain.Person p1, org.drools.model.codegen.execmodel.domain.Person p2) throws java.lang.Exception {\n" + " result.setValue(p1.getName() + \" is older than \" + p2.getName());\n" + " }\n" + " }\n"; // Workaround to keep the "//language=JAVA" working expectedResult = expectedResult .replace("PACKAGE_TOREPLACE", expectedPackageName) .replace("CLASS_TOREPLACE", expectedClassName); verifyCreatedClass(aClass, expectedResult); }
@Override public void createDataStream(String dataStreamName, String timestampField, Map<String, Map<String, String>> mappings, Policy ismPolicy) { updateDataStreamTemplate(dataStreamName, timestampField, mappings); dataStreamAdapter.createDataStream(dataStreamName); dataStreamAdapter.applyIsmPolicy(dataStreamName, ismPolicy); dataStreamAdapter.setNumberOfReplicas(dataStreamName, replicas); }
@Test public void createDataStreamPerformsFunctions() { final String name = "teststream"; final String ts = "ts"; final Map<String, Map<String, String>> mappings = new HashMap<>(); final Policy policy = mock(Policy.class); dataStreamService.createDataStream(name, ts, mappings, policy); verify(dataStreamAdapter).ensureDataStreamTemplate(eq(name + "-template"), any(), eq(ts)); verify(indexFieldTypesService).upsert(any()); verify(dataStreamAdapter).createDataStream(name); verify(dataStreamAdapter).applyIsmPolicy(name, policy); verify(dataStreamAdapter).setNumberOfReplicas(name, 0); }
public Map<String, Object> offsetStorageTopicSettings() { return topicSettings(OFFSET_STORAGE_PREFIX); }
@Test public void shouldRemoveCompactionFromOffsetTopicSettings() { Map<String, String> expectedTopicSettings = new HashMap<>(); expectedTopicSettings.put("foo", "foo value"); expectedTopicSettings.put("bar", "bar value"); expectedTopicSettings.put("baz.bim", "100"); Map<String, String> topicSettings = new HashMap<>(expectedTopicSettings); topicSettings.put("cleanup.policy", "something-else"); Map<String, String> settings = configs(); topicSettings.forEach((k, v) -> settings.put(DistributedConfig.OFFSET_STORAGE_PREFIX + k, v)); DistributedConfig config = new DistributedConfig(settings); Map<String, Object> actual = config.offsetStorageTopicSettings(); assertEquals(expectedTopicSettings, actual); assertNotEquals(topicSettings, actual); }
public static String substVars(String val, PropertyContainer pc1) { return substVars(val, pc1, null); }
@Test public void doesNotThrowNullPointerExceptionForEmptyVariable() throws JoranException { context.putProperty("var", ""); OptionHelper.substVars("${var}", context); }
@Override public <T> T unwrap(Class<T> clazz) { if (clazz.isInstance(cache)) { return clazz.cast(cache); } else if (clazz.isInstance(this)) { return clazz.cast(this); } throw new IllegalArgumentException("Unwrapping to " + clazz + " is not supported by this implementation"); }
@Test public void unwrap_fail() { assertThrows(IllegalArgumentException.class, () -> jcache.unwrap(CaffeineConfiguration.class)); }
@Override public void close() throws IOException { GZIPOutputStream zos = (GZIPOutputStream) delegate; zos.close(); }
@Test public void testClose() throws IOException { CompressionProvider provider = outStream.getCompressionProvider(); ByteArrayOutputStream out = new ByteArrayOutputStream(); outStream = new GZIPCompressionOutputStream( out, provider ) { }; outStream.close(); try { outStream.write( "This will throw an Exception if the stream is already closed".getBytes() ); fail(); } catch ( IOException e ) { //Success, The Output Stream was already closed } }
public AccessTokenResponse createAccesToken(AccessTokenRequest request) throws NoSuchAlgorithmException, DienstencatalogusException { var openIdSession = openIdRepository.findByCode(request.getCode()).orElseThrow(() -> new OpenIdSessionNotFoundException("OpenIdSession not found")); var metadata = dcClient.retrieveMetadataFromDc(request.getClientId()); validateCodeChallenge(request.getCodeVerifier(), openIdSession); validateMimumAuthenticationLevel(openIdSession, metadata); AccessTokenResponse response = new AccessTokenResponse(); response.setTokenType("Bearer"); response.setAccessToken(getAccessToken(openIdSession)); response.setIdToken(getIdToken(openIdSession)); response.setState(openIdSession.getState()); var logCode = Map.of("20", "743", "25", "1124").get(openIdSession.getAuthenticationLevel()); adClient.remoteLog(logCode, Map.of("account_id", openIdSession.getAccountId(), "webservice_id", openIdSession.getLegacyWebserviceId(), "webservice_name", openIdSession.getServiceName())); openIdRepository.delete(openIdSession); return response; }
@Test void createValidAccesTokenTest() throws NoSuchAlgorithmException, DienstencatalogusException, InvalidSignatureException, IOException, ParseException, JOSEException { mockDcMetadataResponse(); AccessTokenRequest accessTokenRequest = new AccessTokenRequest(); accessTokenRequest.setCode("testCode"); accessTokenRequest.setCodeVerifier(client.CHALLENGE_VERIFIER); accessTokenRequest.setClientId(client.CLIENT_ID); OpenIdSession openIdSession = new OpenIdSession(); openIdSession.setCodeChallenge(client.CHALLENGE); openIdSession.setAuthenticationLevel("20"); openIdSession.setBsn("PPPPPPP"); openIdSession.setState("RANDOM"); openIdSession.setLegacyWebserviceId(1L); openIdSession.setAccountId(1L); openIdSession.setServiceName("serviceName"); when(openIdRepository.findByCode(accessTokenRequest.getCode())).thenReturn(Optional.of(openIdSession)); AccessTokenResponse response = openIdService.createAccesToken(accessTokenRequest); assertEquals("RANDOM", response.getState()); }
public static <T> T visit(final Schema start, final SchemaVisitor<T> visitor) { // Set of Visited Schemas IdentityHashMap<Schema, Schema> visited = new IdentityHashMap<>(); // Stack that contains the Schams to process and afterVisitNonTerminal // functions. // Deque<Either<Schema, Supplier<SchemaVisitorAction>>> // Using either has a cost which we want to avoid... Deque<Object> dq = new ArrayDeque<>(); dq.addLast(start); Object current; while ((current = dq.pollLast()) != null) { if (current instanceof Supplier) { // we are executing a non terminal post visit. SchemaVisitorAction action = ((Supplier<SchemaVisitorAction>) current).get(); switch (action) { case CONTINUE: break; case SKIP_SUBTREE: throw new UnsupportedOperationException(); case SKIP_SIBLINGS: while (dq.getLast() instanceof Schema) { dq.removeLast(); } break; case TERMINATE: return visitor.get(); default: throw new UnsupportedOperationException("Invalid action " + action); } } else { Schema schema = (Schema) current; boolean terminate; if (!visited.containsKey(schema)) { Schema.Type type = schema.getType(); switch (type) { case ARRAY: terminate = visitNonTerminal(visitor, schema, dq, Collections.singleton(schema.getElementType())); visited.put(schema, schema); break; case RECORD: terminate = visitNonTerminal(visitor, schema, dq, () -> schema.getFields().stream().map(Field::schema) .collect(Collectors.toCollection(ArrayDeque::new)).descendingIterator()); visited.put(schema, schema); break; case UNION: terminate = visitNonTerminal(visitor, schema, dq, schema.getTypes()); visited.put(schema, schema); break; case MAP: terminate = visitNonTerminal(visitor, schema, dq, Collections.singleton(schema.getValueType())); visited.put(schema, schema); break; case NULL: case BOOLEAN: case BYTES: case DOUBLE: case ENUM: case FIXED: case FLOAT: case INT: case LONG: case STRING: terminate = visitTerminal(visitor, schema, dq); break; default: throw new UnsupportedOperationException("Invalid type " + type); } } else { terminate = visitTerminal(visitor, schema, dq); } if (terminate) { return visitor.get(); } } } return visitor.get(); }
@Test void visit8() { assertThrows(UnsupportedOperationException.class, () -> { String s8 = "{\"type\": \"record\", \"name\": \"c1\", \"fields\": [" + "{\"name\": \"f1\", \"type\": {\"type\": \"record\", \"name\": \"cst2\", \"fields\": " + "[{\"name\": \"f11\", \"type\": \"int\"}]}}," + "{\"name\": \"f2\", \"type\": \"int\"}" + "]}"; Schemas.visit(new Schema.Parser().parse(s8), new TestVisitor()); }); }
@NonNull @Override public Object configure(CNode config, ConfigurationContext context) throws ConfiguratorException { return Stapler.lookupConverter(target) .convert( target, context.getSecretSourceResolver() .resolve(config.asScalar().toString())); }
@Test public void _string_env() throws Exception { environment.set("ENV_FOR_TEST", "abc"); Configurator c = registry.lookupOrFail(String.class); final Object value = c.configure(new Scalar("${ENV_FOR_TEST}"), context); assertEquals("abc", value); }
@Udf public <T> List<T> except( @UdfParameter(description = "Array of values") final List<T> left, @UdfParameter(description = "Array of exceptions") final List<T> right) { if (left == null || right == null) { return null; } final Set<T> distinctRightValues = new HashSet<>(right); final Set<T> distinctLeftValues = new LinkedHashSet<>(left); return distinctLeftValues .stream() .filter(e -> !distinctRightValues.contains(e)) .collect(Collectors.toList()); }
@Test public void shouldExceptFromArrayContainingNulls() { final List<String> input1 = Arrays.asList("foo", null, "foo", "bar"); final List<String> input2 = Arrays.asList("foo"); final List<String> result = udf.except(input1, input2); assertThat(result, contains(null, "bar")); }
@ConstantFunction(name = "seconds_add", argTypes = {DATETIME, INT}, returnType = DATETIME, isMonotonic = true) public static ConstantOperator secondsAdd(ConstantOperator date, ConstantOperator second) { return ConstantOperator.createDatetimeOrNull(date.getDatetime().plusSeconds(second.getInt())); }
@Test public void secondsAdd() { assertEquals("2015-03-23T09:24:05", ScalarOperatorFunctions.secondsAdd(O_DT_20150323_092355, O_INT_10).getDatetime().toString()); }
@ExecuteOn(TaskExecutors.IO) @Get(uri = "{namespace}/files/search") @Operation(tags = {"Files"}, summary = "Find files which path contain the given string in their URI") public List<String> search( @Parameter(description = "The namespace id") @PathVariable String namespace, @Parameter(description = "The string the file path should contain") @QueryValue String q ) throws IOException, URISyntaxException { URI baseNamespaceFilesUri = NamespaceFile.of(namespace).uri(); return storageInterface.allByPrefix(tenantService.resolveTenant(), baseNamespaceFilesUri, false).stream() .map(storageUri -> "/" + baseNamespaceFilesUri.relativize(storageUri).getPath()) .filter(path -> path.contains(q)).toList(); }
@SuppressWarnings("unchecked") @Test void search() throws IOException { storageInterface.put(null, toNamespacedStorageUri(NAMESPACE, URI.create("/file.txt")), new ByteArrayInputStream(new byte[0])); storageInterface.put(null, toNamespacedStorageUri(NAMESPACE, URI.create("/another_file.json")), new ByteArrayInputStream(new byte[0])); storageInterface.put(null, toNamespacedStorageUri(NAMESPACE, URI.create("/folder/file.txt")), new ByteArrayInputStream(new byte[0])); storageInterface.put(null, toNamespacedStorageUri(NAMESPACE, URI.create("/folder/some.yaml")), new ByteArrayInputStream(new byte[0])); storageInterface.put(null, toNamespacedStorageUri(NAMESPACE, URI.create("/folder/sub/script.py")), new ByteArrayInputStream(new byte[0])); String res = client.toBlocking().retrieve(HttpRequest.GET("/api/v1/namespaces/" + NAMESPACE + "/files/search?q=file")); assertThat((Iterable<String>) JacksonMapper.toObject(res), containsInAnyOrder("/file.txt", "/another_file.json", "/folder/file.txt")); res = client.toBlocking().retrieve(HttpRequest.GET("/api/v1/namespaces/" + NAMESPACE + "/files/search?q=file.txt")); assertThat((Iterable<String>) JacksonMapper.toObject(res), containsInAnyOrder("/file.txt", "/folder/file.txt")); res = client.toBlocking().retrieve(HttpRequest.GET("/api/v1/namespaces/" + NAMESPACE + "/files/search?q=folder")); assertThat((Iterable<String>) JacksonMapper.toObject(res), containsInAnyOrder("/folder/file.txt", "/folder/some.yaml", "/folder/sub/script.py")); res = client.toBlocking().retrieve(HttpRequest.GET("/api/v1/namespaces/" + NAMESPACE + "/files/search?q=.py")); assertThat((Iterable<String>) JacksonMapper.toObject(res), containsInAnyOrder("/folder/sub/script.py")); }
public static boolean listRecording( final File archiveDir, final long recordingId, final RecordingDescriptorConsumer consumer) { try (Catalog catalog = new Catalog(archiveDir, System::currentTimeMillis)) { return catalog.forEntry(recordingId, new RecordingDescriptorConsumerAdapter(consumer)); } }
@Test void shouldListRecordingByRecordingId() { final boolean found = CatalogView.listRecording(archiveDir, recordingTwoId, mockRecordingDescriptorConsumer); assertTrue(found); verify(mockRecordingDescriptorConsumer).onRecordingDescriptor( Aeron.NULL_VALUE, Aeron.NULL_VALUE, recordingTwoId, 5L, Aeron.NULL_VALUE, 11L, Aeron.NULL_VALUE, 0, SEGMENT_LENGTH, TERM_LENGTH, MTU_LENGTH, 8, 2, "channelH", "channelH?tag=f", "sourceV"); verifyNoMoreInteractions(mockRecordingDescriptorConsumer); }
protected boolean isFinalLeaf(final Node node) { return node instanceof LeafNode || node.getNodes() == null || node.getNodes().isEmpty(); }
@Test void isFinalLeaf() { Node node = new LeafNode(); DATA_TYPE targetType = DATA_TYPE.STRING; KiePMMLTreeModelNodeASTFactory.factory(new HashMap<>(), Collections.emptyList(), TreeModel.NoTrueChildStrategy.RETURN_NULL_PREDICTION, targetType).isFinalLeaf(node); assertThat(KiePMMLTreeModelNodeASTFactory.factory(new HashMap<>(), Collections.emptyList(), TreeModel.NoTrueChildStrategy.RETURN_NULL_PREDICTION, targetType).isFinalLeaf(node)).isTrue(); node = new ClassifierNode(); assertThat(KiePMMLTreeModelNodeASTFactory.factory(new HashMap<>(), Collections.emptyList(), TreeModel.NoTrueChildStrategy.RETURN_NULL_PREDICTION, targetType).isFinalLeaf(node)).isTrue(); node.addNodes(new LeafNode()); assertThat(KiePMMLTreeModelNodeASTFactory.factory(new HashMap<>(), Collections.emptyList(), TreeModel.NoTrueChildStrategy.RETURN_NULL_PREDICTION, targetType).isFinalLeaf(node)).isFalse(); }
public static Optional<Cookie> findCookie(String cookieName, HttpRequest request) { Cookie[] cookies = request.getCookies(); if (cookies == null) { return Optional.empty(); } return Arrays.stream(cookies) .filter(cookie -> cookieName.equals(cookie.getName())) .findFirst(); }
@Test public void does_not_fail_to_find_cookie_when_no_cookie() { assertThat(findCookie("unknown", request)).isEmpty(); }
public String generatePrimitiveTypeColumnTask(long tableId, long dbId, String tableName, String dbName, List<ColumnStats> primitiveTypeStats, TabletSampleManager manager) { String prefix = "INSERT INTO " + STATISTICS_DB_NAME + "." + SAMPLE_STATISTICS_TABLE_NAME; StringBuilder builder = new StringBuilder(); builder.append(prefix).append(" "); builder.append("WITH base_cte_table as ("); String queryDataSql = generateQueryDataSql(tableName, dbName, primitiveTypeStats, manager); builder.append(queryDataSql).append(") "); int idx = 0; int size = primitiveTypeStats.size(); for (ColumnStats columnStats : primitiveTypeStats) { idx++; builder.append(generateQueryColumnSql(tableId, dbId, tableName, dbName, columnStats, "col_" + idx)); if (idx != size) { builder.append(" UNION ALL "); } } return builder.toString(); }
@Test public void generateSubFieldTypeColumnTask() { SampleInfo sampleInfo = tabletSampleManager.generateSampleInfo("test", "t_struct"); List<String> columnNames = Lists.newArrayList("c1", "c4.b", "c6.c.b"); List<Type> columnTypes = Lists.newArrayList(Type.DATE, new ArrayType(Type.ANY_STRUCT), Type.INT); ColumnSampleManager columnSampleManager = ColumnSampleManager.init(columnNames, columnTypes, table, sampleInfo); List<List<ColumnStats>> columnStatsBatch = columnSampleManager.splitPrimitiveTypeStats(); String sql = sampleInfo.generatePrimitiveTypeColumnTask(table.getId(), db.getId(), table.getName(), db.getFullName(), columnStatsBatch.get(0), tabletSampleManager); List<StatementBase> stmt = SqlParser.parse(sql, connectContext.getSessionVariable()); Assert.assertTrue(stmt.get(0) instanceof InsertStmt); InsertStmt insertStmt = (InsertStmt) stmt.get(0); Assert.assertTrue(insertStmt.getQueryStatement().getQueryRelation() instanceof UnionRelation); UnionRelation unionRelation = (UnionRelation) insertStmt.getQueryStatement().getQueryRelation(); Assert.assertTrue(unionRelation.getRelations().size() == 2); }
@Override public KeyVersion createKey(final String name, final byte[] material, final Options options) throws IOException { return doOp(new ProviderCallable<KeyVersion>() { @Override public KeyVersion call(KMSClientProvider provider) throws IOException { return provider.createKey(name, material, options); } }, nextIdx(), false); }
@Test public void testClientRetriesSpecifiedNumberOfTimes() throws Exception { Configuration conf = new Configuration(); conf.setInt( CommonConfigurationKeysPublic.KMS_CLIENT_FAILOVER_MAX_RETRIES_KEY, 10); KMSClientProvider p1 = mock(KMSClientProvider.class); when(p1.createKey(Mockito.anyString(), Mockito.any(Options.class))) .thenThrow(new ConnectTimeoutException("p1")); KMSClientProvider p2 = mock(KMSClientProvider.class); when(p2.createKey(Mockito.anyString(), Mockito.any(Options.class))) .thenThrow(new ConnectTimeoutException("p2")); when(p1.getKMSUrl()).thenReturn("p1"); when(p2.getKMSUrl()).thenReturn("p2"); LoadBalancingKMSClientProvider kp = new LoadBalancingKMSClientProvider( new KMSClientProvider[] {p1, p2}, 0, conf); try { kp.createKey("test3", new Options(conf)); fail("Should fail"); } catch (Exception e) { assert (e instanceof ConnectTimeoutException); } verify(p1, Mockito.times(6)).createKey(Mockito.eq("test3"), Mockito.any(Options.class)); verify(p2, Mockito.times(5)).createKey(Mockito.eq("test3"), Mockito.any(Options.class)); }
public static String configKey(Class targetType, Method method) { StringBuilder builder = new StringBuilder(); builder.append(targetType.getSimpleName()); builder.append('#').append(method.getName()).append('('); for (Type param : method.getGenericParameterTypes()) { param = Types.resolve(targetType, targetType, param); builder.append(Types.getRawType(param).getSimpleName()).append(','); } if (method.getParameterTypes().length > 0) { builder.deleteCharAt(builder.length() - 1); } return builder.append(')').toString(); }
@Test void configKeyUsesChildType() throws Exception { assertThat(Feign.configKey(List.class, Iterable.class.getDeclaredMethod("iterator"))) .isEqualTo("List#iterator()"); }
@Override public JavaKeyStore load(SecureConfig config) { if (!exists(config)) { throw new SecretStoreException.LoadException( String.format("Can not find Logstash keystore at %s. Please verify this file exists and is a valid Logstash keystore.", config.getPlainText("keystore.file") == null ? "<undefined>" : new String(config.getPlainText("keystore.file")))); } try { init(config); lock.lock(); try (final InputStream is = Files.newInputStream(keyStorePath)) { try { keyStore.load(is, this.keyStorePass); } catch (IOException ioe) { if (ioe.getCause() instanceof UnrecoverableKeyException) { throw new SecretStoreException.AccessException( String.format("Can not access Logstash keystore at %s. Please verify correct file permissions and keystore password.", keyStorePath.toAbsolutePath()), ioe); } else { throw new SecretStoreException.LoadException(String.format("Found a file at %s, but it is not a valid Logstash keystore.", keyStorePath.toAbsolutePath().toString()), ioe); } } byte[] marker = retrieveSecret(LOGSTASH_MARKER); if (marker == null) { throw new SecretStoreException.LoadException(String.format("Found a keystore at %s, but it is not a Logstash keystore.", keyStorePath.toAbsolutePath().toString())); } LOGGER.debug("Using existing keystore at {}", keyStorePath.toAbsolutePath()); return this; } } catch (SecretStoreException sse) { throw sse; } catch (Exception e) { //should never happen throw new SecretStoreException.UnknownException("Error while trying to load the Logstash keystore", e); } finally { releaseLock(lock); config.clearValues(); } }
@Test public void notLogstashKeystoreNoMarker() throws Exception { withDefinedPassConfig.add("keystore.file", Paths.get(this.getClass().getClassLoader().getResource("not.a.logstash.keystore").toURI()).toString().toCharArray().clone()); assertThrows(SecretStoreException.LoadException.class, () -> { new JavaKeyStore().load(withDefinedPassConfig); }); }
public void asyncDestroy() { if (!(dataSource instanceof AutoCloseable)) { return; } ExecutorService executor = Executors.newSingleThreadExecutor(); executor.execute(this::graceDestroy); executor.shutdown(); }
@Test void assertAsyncDestroyWithoutAutoCloseableDataSource() { assertDoesNotThrow(() -> new DataSourcePoolDestroyer(new MockedDataSource()).asyncDestroy()); }
@Override public DescribeConsumerGroupsResult describeConsumerGroups(final Collection<String> groupIds, final DescribeConsumerGroupsOptions options) { SimpleAdminApiFuture<CoordinatorKey, ConsumerGroupDescription> future = DescribeConsumerGroupsHandler.newFuture(groupIds); DescribeConsumerGroupsHandler handler = new DescribeConsumerGroupsHandler(options.includeAuthorizedOperations(), logContext); invokeDriver(handler, future, options.timeoutMs); return new DescribeConsumerGroupsResult(future.all().entrySet().stream() .collect(Collectors.toMap(entry -> entry.getKey().idValue, Map.Entry::getValue))); }
@Test public void testDescribeMultipleConsumerGroups() { try (AdminClientUnitTestEnv env = new AdminClientUnitTestEnv(mockCluster(1, 0))) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); env.kafkaClient().prepareResponse(prepareFindCoordinatorResponse(Errors.NONE, env.cluster().controller())); // The first request sent will be a ConsumerGroupDescribe request. Let's // fail it in order to fail back to using the classic version. env.kafkaClient().prepareUnsupportedVersionResponse( request -> request instanceof ConsumerGroupDescribeRequest); TopicPartition myTopicPartition0 = new TopicPartition("my_topic", 0); TopicPartition myTopicPartition1 = new TopicPartition("my_topic", 1); TopicPartition myTopicPartition2 = new TopicPartition("my_topic", 2); final List<TopicPartition> topicPartitions = new ArrayList<>(); topicPartitions.add(0, myTopicPartition0); topicPartitions.add(1, myTopicPartition1); topicPartitions.add(2, myTopicPartition2); final ByteBuffer memberAssignment = ConsumerProtocol.serializeAssignment(new ConsumerPartitionAssignor.Assignment(topicPartitions)); byte[] memberAssignmentBytes = new byte[memberAssignment.remaining()]; memberAssignment.get(memberAssignmentBytes); DescribeGroupsResponseData group0Data = new DescribeGroupsResponseData(); group0Data.groups().add(DescribeGroupsResponse.groupMetadata( GROUP_ID, Errors.NONE, "", ConsumerProtocol.PROTOCOL_TYPE, "", asList( DescribeGroupsResponse.groupMember("0", null, "clientId0", "clientHost", memberAssignmentBytes, null), DescribeGroupsResponse.groupMember("1", null, "clientId1", "clientHost", memberAssignmentBytes, null) ), Collections.emptySet())); DescribeGroupsResponseData groupConnectData = new DescribeGroupsResponseData(); group0Data.groups().add(DescribeGroupsResponse.groupMetadata( "group-connect-0", Errors.NONE, "", "connect", "", asList( DescribeGroupsResponse.groupMember("0", null, "clientId0", "clientHost", memberAssignmentBytes, null), DescribeGroupsResponse.groupMember("1", null, "clientId1", "clientHost", memberAssignmentBytes, null) ), Collections.emptySet())); env.kafkaClient().prepareResponse(new DescribeGroupsResponse(group0Data)); env.kafkaClient().prepareResponse(new DescribeGroupsResponse(groupConnectData)); Collection<String> groups = new HashSet<>(); groups.add(GROUP_ID); groups.add("group-connect-0"); final DescribeConsumerGroupsResult result = env.adminClient().describeConsumerGroups(groups); assertEquals(2, result.describedGroups().size()); assertEquals(groups, result.describedGroups().keySet()); } }
@Override public ContinuousEnumerationResult planSplits(IcebergEnumeratorPosition lastPosition) { table.refresh(); if (lastPosition != null) { return discoverIncrementalSplits(lastPosition); } else { return discoverInitialSplits(); } }
@Test public void testIncrementalFromSnapshotTimestamp() throws Exception { appendTwoSnapshots(); ScanContext scanContext = ScanContext.builder() .startingStrategy(StreamingStartingStrategy.INCREMENTAL_FROM_SNAPSHOT_TIMESTAMP) .startSnapshotTimestamp(snapshot2.timestampMillis()) .build(); ContinuousSplitPlannerImpl splitPlanner = new ContinuousSplitPlannerImpl(TABLE_RESOURCE.tableLoader().clone(), scanContext, null); ContinuousEnumerationResult initialResult = splitPlanner.planSplits(null); assertThat(initialResult.fromPosition()).isNull(); // For inclusive behavior, the initial result should point to snapshot1 (as snapshot2's parent). assertThat(initialResult.toPosition().snapshotId().longValue()) .isEqualTo(snapshot1.snapshotId()); assertThat(initialResult.toPosition().snapshotTimestampMs().longValue()) .isEqualTo(snapshot1.timestampMillis()); assertThat(initialResult.splits()).isEmpty(); ContinuousEnumerationResult secondResult = splitPlanner.planSplits(initialResult.toPosition()); assertThat(secondResult.fromPosition().snapshotId().longValue()) .isEqualTo(snapshot1.snapshotId()); assertThat(secondResult.fromPosition().snapshotTimestampMs().longValue()) .isEqualTo(snapshot1.timestampMillis()); assertThat(secondResult.toPosition().snapshotId().longValue()) .isEqualTo(snapshot2.snapshotId()); assertThat(secondResult.toPosition().snapshotTimestampMs().longValue()) .isEqualTo(snapshot2.timestampMillis()); IcebergSourceSplit split = Iterables.getOnlyElement(secondResult.splits()); assertThat(split.task().files()).hasSize(1); Set<String> discoveredFiles = split.task().files().stream() .map(fileScanTask -> fileScanTask.file().path().toString()) .collect(Collectors.toSet()); // should discover dataFile2 appended in snapshot2 Set<String> expectedFiles = ImmutableSet.of(dataFile2.path().toString()); assertThat(discoveredFiles).containsExactlyElementsOf(expectedFiles); IcebergEnumeratorPosition lastPosition = secondResult.toPosition(); for (int i = 0; i < 3; ++i) { lastPosition = verifyOneCycle(splitPlanner, lastPosition).lastPosition; } }
@Override @DSTransactional // 多数据源,使用 @DSTransactional 保证本地事务,以及数据源的切换 public void updateTenantPackage(TenantPackageSaveReqVO updateReqVO) { // 校验存在 TenantPackageDO tenantPackage = validateTenantPackageExists(updateReqVO.getId()); // 更新 TenantPackageDO updateObj = BeanUtils.toBean(updateReqVO, TenantPackageDO.class); tenantPackageMapper.updateById(updateObj); // 如果菜单发生变化,则修改每个租户的菜单 if (!CollUtil.isEqualList(tenantPackage.getMenuIds(), updateReqVO.getMenuIds())) { List<TenantDO> tenants = tenantService.getTenantListByPackageId(tenantPackage.getId()); tenants.forEach(tenant -> tenantService.updateTenantRoleMenu(tenant.getId(), updateReqVO.getMenuIds())); } }
@Test public void testUpdateTenantPackage_success() { // mock 数据 TenantPackageDO dbTenantPackage = randomPojo(TenantPackageDO.class, o -> o.setStatus(randomCommonStatus())); tenantPackageMapper.insert(dbTenantPackage);// @Sql: 先插入出一条存在的数据 // 准备参数 TenantPackageSaveReqVO reqVO = randomPojo(TenantPackageSaveReqVO.class, o -> { o.setId(dbTenantPackage.getId()); // 设置更新的 ID o.setStatus(randomCommonStatus()); }); // mock 方法 Long tenantId01 = randomLongId(); Long tenantId02 = randomLongId(); when(tenantService.getTenantListByPackageId(eq(reqVO.getId()))).thenReturn( asList(randomPojo(TenantDO.class, o -> o.setId(tenantId01)), randomPojo(TenantDO.class, o -> o.setId(tenantId02)))); // 调用 tenantPackageService.updateTenantPackage(reqVO); // 校验是否更新正确 TenantPackageDO tenantPackage = tenantPackageMapper.selectById(reqVO.getId()); // 获取最新的 assertPojoEquals(reqVO, tenantPackage); // 校验调用租户的菜单 verify(tenantService).updateTenantRoleMenu(eq(tenantId01), eq(reqVO.getMenuIds())); verify(tenantService).updateTenantRoleMenu(eq(tenantId02), eq(reqVO.getMenuIds())); }
@SafeVarargs public static <T> List<T> newLists(T... values) { if(null == values || values.length == 0){ Assert.notNull(values, "values not is null."); } return Arrays.asList(values); }
@Test public void newLists() { final Object[] values = {}; Assert.assertEquals(new ArrayList(), CollectionKit.newLists(values)); }
public static <FROM, TO> MappedCondition<FROM, TO> mappedCondition(Function<FROM, TO> mapping, Condition<TO> condition, String mappingDescription, Object... args) { requireNonNull(mappingDescription, "The given mappingDescription should not be null"); return new MappedCondition<>(mapping, condition, format(mappingDescription, args)); }
@Test void mappedCondition_without_description_and_null_condition_should_throw_NPE() { // GIVEN Condition<String> nullCondition = null; // WHEN/THEN thenNullPointerException().isThrownBy(() -> mappedCondition(StringBuilder::toString, nullCondition)) .withMessage("The given condition should not be null"); }
@Override public Optional<ErrorResponse> filter(DiscFilterRequest request) { try { Optional<ResourceNameAndAction> resourceMapping = requestResourceMapper.getResourceNameAndAction(request); log.log(Level.FINE, () -> String.format("Resource mapping for '%s': %s", request, resourceMapping)); if (resourceMapping.isEmpty()) { incrementAcceptedMetrics(request, false, Optional.empty()); return Optional.empty(); } Result result = checkAccessAllowed(request, resourceMapping.get()); AuthorizationResult.Type resultType = result.zpeResult.type(); setAttribute(request, RESULT_ATTRIBUTE, resultType.name()); if (resultType == AuthorizationResult.Type.ALLOW) { populateRequestWithResult(request, result); incrementAcceptedMetrics(request, true, Optional.of(result)); return Optional.empty(); } log.log(Level.FINE, () -> String.format("Forbidden (403) for '%s': %s", request, resultType.name())); incrementRejectedMetrics(request, FORBIDDEN, resultType.name(), Optional.of(result)); return Optional.of(new ErrorResponse(FORBIDDEN, "Access forbidden: " + resultType.getDescription())); } catch (IllegalArgumentException e) { log.log(Level.FINE, () -> String.format("Unauthorized (401) for '%s': %s", request, e.getMessage())); incrementRejectedMetrics(request, UNAUTHORIZED, "Unauthorized", Optional.empty()); return Optional.of(new ErrorResponse(UNAUTHORIZED, e.getMessage())); } }
@Test void accepts_request_with_role_token() { AthenzAuthorizationFilter filter = createFilter(new AllowingZpe(), List.of()); MockResponseHandler responseHandler = new MockResponseHandler(); DiscFilterRequest request = createRequest(ROLE_TOKEN, null, null); filter.filter(request, responseHandler); assertAuthorizationResult(request, Type.ALLOW); assertRequestNotFiltered(responseHandler); assertMatchedCredentialType(request, EnabledCredentials.ROLE_TOKEN); assertMatchedRole(request, ROLE); }
@Override public int run(String[] args) throws Exception { if (args.length != 2) { return usage(args); } String action = args[0]; String name = args[1]; int result; if (A_LOAD.equals(action)) { result = loadClass(name); } else if (A_CREATE.equals(action)) { //first load to separate load errors from create result = loadClass(name); if (result == SUCCESS) { //class loads, so instantiate it result = createClassInstance(name); } } else if (A_RESOURCE.equals(action)) { result = loadResource(name); } else if (A_PRINTRESOURCE.equals(action)) { result = dumpResource(name); } else { result = usage(args); } return result; }
@Test public void testFindsResource() throws Throwable { run(FindClass.SUCCESS, FindClass.A_RESOURCE, "org/apache/hadoop/util/TestFindClass.class"); }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } DefaultQueryCacheEventData that = (DefaultQueryCacheEventData) o; if (sequence != that.sequence) { return false; } if (eventType != that.eventType) { return false; } if (partitionId != that.partitionId) { return false; } if (!Objects.equals(key, that.key)) { return false; } if (!Objects.equals(value, that.value)) { return false; } if (!Objects.equals(dataKey, that.dataKey)) { return false; } if (!Objects.equals(dataNewValue, that.dataNewValue)) { return false; } if (!Objects.equals(dataOldValue, that.dataOldValue)) { return false; } if (!Objects.equals(mapName, that.mapName)) { return false; } return Objects.equals(serializationService, that.serializationService); }
@Test public void testEquals() { assertEquals(queryCacheEventData, queryCacheEventData); assertEquals(queryCacheEventData, queryCacheEventDataSameAttributes); assertNotEquals(null, queryCacheEventData); assertNotEquals(new Object(), queryCacheEventData); assertNotEquals(queryCacheEventData, queryCacheEventDataOtherSequence); assertNotEquals(queryCacheEventData, queryCacheEventDataOtherEventType); assertNotEquals(queryCacheEventData, queryCacheEventDataOtherPartitionId); assertNotEquals(queryCacheEventData, queryCacheEventDataOtherKey); assertNotEquals(queryCacheEventData, queryCacheEventDataOtherValue); assertNotEquals(queryCacheEventData, queryCacheEventDataOtherDataKey); assertNotEquals(queryCacheEventData, queryCacheEventDataOtherDataNewValue); assertNotEquals(queryCacheEventData, queryCacheEventDataOtherDataOldValue); assertNotEquals(queryCacheEventData, queryCacheEventDataOtherSerializationService); assertNotEquals(queryCacheEventData, queryCacheEventDataOtherMapName); }
public CoercedExpressionResult coerce() { final Class<?> leftClass = left.getRawClass(); final Class<?> nonPrimitiveLeftClass = toNonPrimitiveType(leftClass); final Class<?> rightClass = right.getRawClass(); final Class<?> nonPrimitiveRightClass = toNonPrimitiveType(rightClass); boolean sameClass = leftClass == rightClass; boolean isUnificationExpression = left instanceof UnificationTypedExpression || right instanceof UnificationTypedExpression; if (sameClass || isUnificationExpression) { return new CoercedExpressionResult(left, right); } if (!canCoerce()) { throw new CoercedExpressionException(new InvalidExpressionErrorResult("Comparison operation requires compatible types. Found " + leftClass + " and " + rightClass)); } if ((nonPrimitiveLeftClass == Integer.class || nonPrimitiveLeftClass == Long.class) && nonPrimitiveRightClass == Double.class) { CastExpr castExpression = new CastExpr(PrimitiveType.doubleType(), this.left.getExpression()); return new CoercedExpressionResult( new TypedExpression(castExpression, double.class, left.getType()), right, false); } final boolean leftIsPrimitive = leftClass.isPrimitive() || Number.class.isAssignableFrom( leftClass ); final boolean canCoerceLiteralNumberExpr = canCoerceLiteralNumberExpr(leftClass); boolean rightAsStaticField = false; final Expression rightExpression = right.getExpression(); final TypedExpression coercedRight; if (leftIsPrimitive && canCoerceLiteralNumberExpr && rightExpression instanceof LiteralStringValueExpr) { final Expression coercedLiteralNumberExprToType = coerceLiteralNumberExprToType((LiteralStringValueExpr) right.getExpression(), leftClass); coercedRight = right.cloneWithNewExpression(coercedLiteralNumberExprToType); coercedRight.setType( leftClass ); } else if (shouldCoerceBToString(left, right)) { coercedRight = coerceToString(right); } else if (isNotBinaryExpression(right) && canBeNarrowed(leftClass, rightClass) && right.isNumberLiteral()) { coercedRight = castToClass(leftClass); } else if (leftClass == long.class && rightClass == int.class) { coercedRight = right.cloneWithNewExpression(new CastExpr(PrimitiveType.longType(), right.getExpression())); } else if (leftClass == Date.class && rightClass == String.class) { coercedRight = coerceToDate(right); rightAsStaticField = true; } else if (leftClass == LocalDate.class && rightClass == String.class) { coercedRight = coerceToLocalDate(right); rightAsStaticField = true; } else if (leftClass == LocalDateTime.class && rightClass == String.class) { coercedRight = coerceToLocalDateTime(right); rightAsStaticField = true; } else if (shouldCoerceBToMap()) { coercedRight = castToClass(toNonPrimitiveType(leftClass)); } else if (isBoolean(leftClass) && !isBoolean(rightClass)) { coercedRight = coerceBoolean(right); } else { coercedRight = right; } final TypedExpression coercedLeft; if (nonPrimitiveLeftClass == Character.class && shouldCoerceBToString(right, left)) { coercedLeft = coerceToString(left); } else { coercedLeft = left; } return new CoercedExpressionResult(coercedLeft, coercedRight, rightAsStaticField); }
@Test public void avoidCoercing2() { final TypedExpression left = expr("$pr.compareTo(new BigDecimal(\"0.0\"))", int.class); final TypedExpression right = expr("0", int.class); final CoercedExpression.CoercedExpressionResult coerce = new CoercedExpression(left, right, false).coerce(); assertThat(coerce.getCoercedRight()).isEqualTo(expr("0", int.class)); }
@Override public void close() { if (_executionFuture != null) { _executionFuture.cancel(true); } }
@Test public void shouldReturnDataBlockThenMetadataBlock() { // Given: QueryContext queryContext = QueryContextConverterUtils.getQueryContext("SELECT strCol, intCol FROM tbl"); DataSchema schema = new DataSchema(new String[]{"strCol", "intCol"}, new DataSchema.ColumnDataType[]{DataSchema.ColumnDataType.STRING, DataSchema.ColumnDataType.INT}); List<BaseResultsBlock> dataBlocks = Collections.singletonList( new SelectionResultsBlock(schema, Arrays.asList(new Object[]{"foo", 1}, new Object[]{"", 2}), queryContext)); InstanceResponseBlock metadataBlock = new InstanceResponseBlock(new MetadataResultsBlock()); QueryExecutor queryExecutor = mockQueryExecutor(dataBlocks, metadataBlock); LeafStageTransferableBlockOperator operator = new LeafStageTransferableBlockOperator(OperatorTestUtil.getTracingContext(), mockQueryRequests(1), schema, queryExecutor, _executorService); _operatorRef.set(operator); // When: TransferableBlock resultBlock = operator.nextBlock(); // Then: Assert.assertEquals(resultBlock.getContainer().get(0), new Object[]{"foo", 1}); Assert.assertEquals(resultBlock.getContainer().get(1), new Object[]{"", 2}); Assert.assertTrue(operator.nextBlock().isEndOfStreamBlock(), "Expected EOS after reading 2 blocks"); operator.close(); }
@SafeVarargs public static <T> Set<T> intersectionDistinct(Collection<T> coll1, Collection<T> coll2, Collection<T>... otherColls) { final Set<T> result; if (isEmpty(coll1) || isEmpty(coll2)) { // 有一个空集合就直接返回空 return new LinkedHashSet<>(); } else { result = new LinkedHashSet<>(coll1); } if (ArrayUtil.isNotEmpty(otherColls)) { for (Collection<T> otherColl : otherColls) { if (isNotEmpty(otherColl)) { result.retainAll(otherColl); } else { // 有一个空集合就直接返回空 return new LinkedHashSet<>(); } } } result.retainAll(coll2); return result; }
@Test public void intersectionDistinctNullTest() { final List<String> list1 = new ArrayList<>(); list1.add("aa"); final List<String> list2 = null; // list2.add("aa"); final List<String> list3 = null; final Collection<String> collection = CollUtil.intersectionDistinct(list1, list2, list3); assertNotNull(collection); }
@Override public boolean find(Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return true; } try { try { final boolean found; if(containerService.isContainer(file)) { final CloudBlobContainer container = session.getClient().getContainerReference(containerService.getContainer(file).getName()); return container.exists(null, null, context); } if(file.isFile() || file.isPlaceholder()) { try { final CloudBlob blob = session.getClient().getContainerReference(containerService.getContainer(file).getName()) .getBlobReferenceFromServer(containerService.getKey(file)); return blob.exists(null, null, context); } catch(StorageException e) { switch(e.getHttpStatusCode()) { case HttpStatus.SC_NOT_FOUND: if(file.isPlaceholder()) { // Ignore failure and look for common prefix break; } default: throw e; } } } if(log.isDebugEnabled()) { log.debug(String.format("Search for common prefix %s", file)); } // Check for common prefix try { new AzureObjectListService(session, context).list(file, new CancellingListProgressListener()); return true; } catch(ListCanceledException l) { // Found common prefix return true; } } catch(StorageException e) { throw new AzureExceptionMappingService().map("Failure to read attributes of {0}", e, file); } catch(URISyntaxException e) { return false; } } catch(NotfoundException e) { return false; } }
@Test public void testFindFile() throws Exception { final Path container = new Path("cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path file = new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); new AzureTouchFeature(session, null).touch(file, new TransferStatus()); assertTrue(new AzureFindFeature(session, null).find(file)); assertFalse(new AzureFindFeature(session, null).find(new Path(file.getAbsolute(), EnumSet.of(Path.Type.directory)))); new AzureDeleteFeature(session, null).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static void onServiceStart(Service service, Intent intent, int startId) { onBroadcastServiceIntent(intent); }
@Test public void onServiceStart() { Service service = new Service() { @Nullable @Override public IBinder onBind(Intent intent) { return null; } }; PushAutoTrackHelper.onServiceStart(service, null, 100); }
static JavaType constructType(Type type) { try { return constructTypeInner(type); } catch (Exception e) { throw new InvalidDataTableTypeException(type, e); } }
@Test void upper_bound_of_wild_card_parameterized_type_replaces_wild_card_type() { JavaType javaType = TypeFactory.constructType(SUPPLIER_WILD_CARD_NUMBER); TypeFactory.Parameterized parameterized = (TypeFactory.Parameterized) javaType; JavaType elementType = parameterized.getElementTypes()[0]; assertThat(elementType.getOriginal(), equalTo(Number.class)); }
@Override public boolean readBoolean(@Nonnull String fieldName) throws IOException { FieldDefinition fd = cd.getField(fieldName); if (fd == null) { return false; } validateTypeCompatibility(fd, BOOLEAN); return super.readBoolean(fieldName); }
@Test public void testReadBoolean() throws Exception { boolean aBoolean = reader.readBoolean("boolean"); assertTrue(aBoolean); assertFalse(reader.readBoolean("NO SUCH FIELD")); }
public Collection<QualifiedTable> getSingleTables(final Collection<QualifiedTable> qualifiedTables) { Collection<QualifiedTable> result = new LinkedList<>(); for (QualifiedTable each : qualifiedTables) { Collection<DataNode> dataNodes = singleTableDataNodes.getOrDefault(each.getTableName().toLowerCase(), new LinkedList<>()); if (!dataNodes.isEmpty() && containsDataNode(each, dataNodes)) { result.add(each); } } return result; }
@Test void assertRemove() { SingleRule singleRule = new SingleRule(ruleConfig, DefaultDatabase.LOGIC_NAME, new H2DatabaseType(), dataSourceMap, Collections.singleton(mock(ShardingSphereRule.class, RETURNS_DEEP_STUBS))); String tableName = "employee"; singleRule.getAttributes().getAttribute(MutableDataNodeRuleAttribute.class).remove(DefaultDatabase.LOGIC_NAME, tableName); Collection<QualifiedTable> tableNames = new LinkedList<>(); tableNames.add(new QualifiedTable(DefaultDatabase.LOGIC_NAME, "employee")); assertTrue(singleRule.getSingleTables(tableNames).isEmpty()); assertTrue(singleRule.getAttributes().getAttribute(TableMapperRuleAttribute.class).getLogicTableNames().contains("student")); assertTrue(singleRule.getAttributes().getAttribute(TableMapperRuleAttribute.class).getLogicTableNames().contains("t_order_0")); assertTrue(singleRule.getAttributes().getAttribute(TableMapperRuleAttribute.class).getLogicTableNames().contains("t_order_1")); }
@Cacheable(value = "metadata-response", key = "#samlMetadataRequest.cacheableKey()") public SamlMetadataResponse resolveSamlMetadata(SamlMetadataRequest samlMetadataRequest) { LOGGER.info("Cache not found for saml-metadata {}", samlMetadataRequest.hashCode()); Connection connection = connectionService.getConnectionByEntityId(samlMetadataRequest.getConnectionEntityId()); MetadataResponseStatus metadataResponseStatus = null; nl.logius.digid.dc.domain.service.Service service = null; if (connection == null) { metadataResponseStatus = CONNECTION_NOT_FOUND; } else if (!connection.getStatus().isAllowed()) { metadataResponseStatus = CONNECTION_INACTIVE; } else if (!connection.getOrganization().getStatus().isAllowed()) { metadataResponseStatus = ORGANIZATION_INACTIVE; } else if (Boolean.FALSE.equals(connection.getOrganizationRole().getStatus().isAllowed())) { metadataResponseStatus = ORGANIZATION_ROLE_INACTIVE; } else { String serviceUUID = samlMetadataRequest.getServiceUuid() == null ? getServiceUUID(connection, samlMetadataRequest.getServiceEntityId(), samlMetadataRequest.getServiceIdx()) : samlMetadataRequest.getServiceUuid(); samlMetadataRequest.setServiceUuid(serviceUUID); service = serviceService.serviceExists(connection, samlMetadataRequest.getServiceEntityId(), serviceUUID); if (service == null) { metadataResponseStatus = SERVICE_NOT_FOUND; } else if (!service.getStatus().isAllowed()) { metadataResponseStatus = SERVICE_INACTIVE; } } if (metadataResponseStatus != null) { return metadataResponseMapper.mapErrorResponse(metadataResponseStatus.name(), metadataResponseStatus.label); } else { String samlMetadata = generateReducedMetadataString(connection, service.getEntityId()); return metadataResponseMapper.mapSuccessResponse(samlMetadata, connection, service, STATUS_OK.name()); } }
@Test void connectionInactiveTest() { Connection connection = newConnection(SAML_COMBICONNECT, false, true, true); when(connectionServiceMock.getConnectionByEntityId(anyString())).thenReturn(connection); SamlMetadataResponse response = metadataRetrieverServiceMock.resolveSamlMetadata(newMetadataRequest()); assertEquals(CONNECTION_INACTIVE.name(), response.getRequestStatus()); assertEquals(CONNECTION_INACTIVE.label, response.getErrorDescription()); }
@Override protected double maintain() { if (!nodeRepository().zone().cloud().dynamicProvisioning()) return 1.0; NodeList candidates = nodeRepository().nodes().list() .parents() .not().deprovisioning(); List<CloudAccount> cloudAccounts = candidates.stream() .map(Node::cloudAccount) .filter(cloudAccount -> !cloudAccount.isUnspecified()) .distinct() .toList(); Map<String, List<HostEvent>> eventsByHostId = hostProvisioner.hostEventsIn(cloudAccounts).stream() .collect(Collectors.groupingBy(HostEvent::hostId)); Instant now = nodeRepository().clock().instant(); for (var host : candidates) { List<HostEvent> events = eventsByHostId.get(host.id()); if (events == null || events.isEmpty()) continue; LOG.info("Deprovisioning " + host + " affected by maintenance event" + (events.size() > 1 ? "s" : "") + ": " + events); nodeRepository().nodes().deprovision(host.hostname(), Agent.system, now); } return 1.0; }
@Test public void retire_hosts() { NodeFlavors flavors = FlavorConfigBuilder.createDummies("default"); MockHostProvisioner hostProvisioner = new MockHostProvisioner(flavors.getFlavors()); ProvisioningTester tester = new ProvisioningTester.Builder().hostProvisioner(hostProvisioner) .flavors(flavors.getFlavors()) .dynamicProvisioning() .build(); HostRetirer retirer = new HostRetirer(tester.nodeRepository(), Duration.ofDays(1), new MockMetric(), hostProvisioner); tester.makeReadyHosts(3, new NodeResources(24, 48, 1000, 10)) .activateTenantHosts(); List<String> hostIds = tester.nodeRepository().nodes().list(Node.State.active).mapToList(Node::id); // No events scheduled retirer.maintain(); NodeList hosts = tester.nodeRepository().nodes().list(); assertEquals(0, hosts.deprovisioning().size()); // Event is scheduled for one known host hostProvisioner.addEvent(new HostEvent("event0", hostIds.get(1), getClass().getSimpleName())) .addEvent(new HostEvent("event1", "unknown-host-id", getClass().getSimpleName())); // Next run retires host retirer.maintain(); hosts = tester.nodeRepository().nodes().list(); assertEquals(1, hosts.deprovisioning().size()); }
public static Options options() { final Options options = new Options(); final OptionGroup actionGroup = new OptionGroup(); actionGroup.addOption(Option.builder() .longOpt(TerminalAction.upload.name()) .desc("Upload file or folder recursively") .hasArgs().numberOfArgs(2).argName("url> <file").build()); actionGroup.addOption(Option.builder("d") .longOpt(TerminalAction.download.name()) .desc("Download file or folder. Denote a folder with a trailing '/'") .hasArgs().numberOfArgs(2).argName("url> <file").build()); actionGroup.addOption(Option.builder() .longOpt(TerminalAction.copy.name()) .desc("Copy from origin server to target server") .hasArgs().numberOfArgs(2).argName("url> <url").build()); actionGroup.addOption(Option.builder() .longOpt(TerminalAction.move.name()) .desc("Move or rename file on server") .hasArgs().numberOfArgs(2).argName("url> <file").build()); actionGroup.addOption(Option.builder("l") .longOpt(TerminalAction.list.name()) .desc("List files in remote folder") .hasArg().argName("url").build()); actionGroup.addOption(Option.builder("L") .longOpt(TerminalAction.longlist.name()) .desc("Long list format with modification date and permission mask") .hasArg().argName("url").build()); actionGroup.addOption(Option.builder("D") .longOpt(TerminalAction.delete.name()) .desc("Delete") .hasArg().argName("url").build()); actionGroup.addOption(Option.builder("c") .longOpt(TerminalAction.mkdir.name()) .desc("Make directory") .hasArg().argName("url").build()); actionGroup.addOption(Option.builder() .longOpt(TerminalAction.synchronize.name()) .desc("Synchronize folders") .hasArgs().numberOfArgs(2).argName("url> <directory").build()); actionGroup.addOption(Option.builder() .longOpt(TerminalAction.edit.name()) .desc("Edit file in external editor") .hasArg().argName("url").build()); actionGroup.addOption(Option.builder() .longOpt(TerminalAction.purge.name()) .desc("Invalidate file in CDN") .hasArg().argName("url").build()); actionGroup.addOption(Option.builder("V") .longOpt(TerminalAction.version.name()) .desc("Show version number and quit.").build()); actionGroup.addOption(Option.builder("h") .longOpt(TerminalAction.help.name()) .desc("Print this help").build()); actionGroup.setRequired(true); options.addOptionGroup(actionGroup); options.addOption(Option.builder("u") .longOpt(Params.username.name()) .desc("Username") .hasArg().argName("username or access key").build()); options.addOption(Option.builder("p") .longOpt(Params.password.name()) .desc("Password") .hasArg().argName("password or secret key").build()); options.addOption(Option.builder() .longOpt(Params.anonymous.name()) .desc("No login").build()); options.addOption(Option.builder() .longOpt(Params.profile.name()) .desc("Use connection profile") .hasArg().argName("profile").build()); options.addOption(Option.builder("i") .longOpt(Params.identity.name()) .desc("Selects a file from which the identity (private key) for public key authentication is read.") .hasArg().argName("private key file").build()); options.addOption(Option.builder() .longOpt(Params.chmod.name()) .desc("Set explicit permission from octal mode value for uploaded file") .hasArg().argName("mode").build()); options.addOption(Option.builder() .longOpt(Params.application.name()) .desc("External editor application") .hasArg().argName("path").build()); options.addOption(Option.builder() .longOpt(Params.region.name()) .desc("Location of bucket or container") .hasArg().argName("location").build()); options.addOption(Option.builder("P") .longOpt(Params.preserve.name()) .desc("Preserve permission, ACL, metadata and modification date").build()); options.addOption(Option.builder("r") .longOpt(Params.retry.name()) .desc("Retry failed connection attempts") .optionalArg(true).argName("count").build()); options.addOption(Option.builder() .longOpt(Params.udt.name()) .desc("Use UDT protocol if applicable").build()); options.addOption(Option.builder() .longOpt(Params.parallel.name()) .desc("Number of concurrent connections to use for transfers") .hasArg().optionalArg(true).argName("connections").build()); options.addOption(Option.builder() .longOpt(Params.throttle.name()) .desc("Throttle bandwidth") .hasArg().argName("bytes per second").build()); options.addOption(Option.builder() .longOpt(Params.nochecksum.name()) .desc("Skip verifying checksum").build()); options.addOption(Option.builder() .longOpt(Params.nokeychain.name()) .desc("Do not save passwords in keychain").build()); options.addOption(Option.builder() .longOpt(Params.vault.name()) .desc("Unlock vault") .hasArg().argName("path").build()); final StringBuilder actions = new StringBuilder("Transfer actions for existing files").append(StringUtils.LF); actions.append("Downloads and uploads:").append(StringUtils.LF); for(TransferAction a : TransferAction.forTransfer(Transfer.Type.download)) { append(actions, a); } for(TransferAction a : Collections.singletonList(TransferAction.cancel)) { append(actions, a); } actions.append("Synchronize:").append(StringUtils.LF); for(TransferAction a : TransferAction.forTransfer(Transfer.Type.sync)) { append(actions, a); } for(TransferAction a : Collections.singletonList(TransferAction.cancel)) { append(actions, a); } options.addOption(Option.builder("e") .longOpt(Params.existing.name()) .desc(actions.toString()) .hasArg().argName("action").build()); options.addOption(Option.builder("v") .longOpt(Params.verbose.name()) .desc("Print transcript").build()); options.addOption(Option.builder() .longOpt(Params.debug.name()) .desc("Print debug output").build()); options.addOption(Option.builder("q") .longOpt(Params.quiet.name()) .desc("Suppress progress messages").build()); options.addOption(Option.builder("y") .longOpt(Params.assumeyes.name()) .desc("Assume yes for all prompts").build()); return options; }
@Test public void testOptions() { assertNotNull(TerminalOptionsBuilder.options()); }
public T send() throws IOException { return web3jService.send(this, responseType); }
@Test public void testEthGetBalance() throws Exception { web3j.ethGetBalance( "0x407d73d8a49eeb85d32cf465507dd71d507100c1", DefaultBlockParameterName.LATEST) .send(); verifyResult( "{\"jsonrpc\":\"2.0\",\"method\":\"eth_getBalance\"," + "\"params\":[\"0x407d73d8a49eeb85d32cf465507dd71d507100c1\",\"latest\"]," + "\"id\":1}"); }
@Override public List<URIStatus> listStatus(AlluxioURI path, ListStatusPOptions options) throws FileDoesNotExistException, IOException, AlluxioException { if (options.getRecursive()) { // Do not cache results of recursive list status, // because some results might be cached multiple times. // Otherwise, needs more complicated logic inside the cache, // that might not worth the effort of caching. return mDelegatedFileSystem.listStatus(path, options); } List<URIStatus> statuses = mMetadataCache.listStatus(path); if (statuses == null) { statuses = mDelegatedFileSystem.listStatus(path, options); mMetadataCache.put(path, statuses); } return statuses; }
@Test public void listStatusRecursive() throws Exception { mFs.listStatus(DIR, LIST_STATUS_OPTIONS.toBuilder().setRecursive(true).build()); assertEquals(1, mRpcCountingFs.listStatusRpcCount(DIR)); mFs.listStatus(DIR, LIST_STATUS_OPTIONS.toBuilder().setRecursive(true).build()); assertEquals(2, mRpcCountingFs.listStatusRpcCount(DIR)); }
public String getDropTableIfExistsStatement( String tableName ) { if ( databaseInterface instanceof DatabaseInterfaceExtended ) { return ( (DatabaseInterfaceExtended) databaseInterface ).getDropTableIfExistsStatement( tableName ); } // A fallback statement in case somehow databaseInterface is of an old version. // This is the previous, and in fact, buggy implementation. See BISERVER-13024. return DROP_TABLE_STATEMENT + tableName; }
@Test public void shouldFallBackWhenDatabaseInterfaceIsOfAnOldType() { String statement = databaseMeta.getDropTableIfExistsStatement( TABLE_NAME ); assertEquals( DROP_STATEMENT_FALLBACK, statement ); }
@Override public ConfigData get(String path) { return get(path, null); }
@Test void testGetMultipleKeysAndCompare() { ConfigData properties = envVarConfigProvider.get(""); assertNotEquals(0, properties.data().size()); assertEquals("value1", properties.data().get("test_var1")); assertEquals("value2", properties.data().get("secret_var2")); assertEquals("value3", properties.data().get("new_var3")); assertEquals("value4", properties.data().get("not_so_secret_var4")); }
@Override public String getName() { return ANALYZER_NAME; }
@Test public void testGetAnalyzerName() { assertEquals("Nuspec Analyzer", instance.getName()); }
public void setStringIfNotNull(@NotNull final String key, @Nullable final String value) { if (null != value) { setString(key, value); } }
@Test public void testSetStringIfNotNull() { String key = "nullableProperty"; String value = "someValue"; getSettings().setString(key, value); getSettings().setStringIfNotNull(key, null); // NO-OP String expResults = getSettings().getString(key); Assert.assertEquals(expResults, value); }
char[] decode(final ByteBuf in) { final CharBuffer charBuffer = CharBuffer.allocate(in.capacity()); encoder.reset(); final ByteBuffer nioBuffer = in.nioBuffer(); encoder.decode(nioBuffer, charBuffer, false); final char[] buf = new char[charBuffer.position()]; charBuffer.flip(); charBuffer.get(buf); // Netty won't update the reader-index of the original buffer when its nio-buffer representation is read from. Adjust the position of the original buffer. in.readerIndex(nioBuffer.position()); return buf; }
@Test public void testDecodeFirstByteOfMultibyteChar() throws Exception { // Setup test fixture. final byte[] multibyteCharacter = "\u3053".getBytes(StandardCharsets.UTF_8); // 3-byte character. final XMLLightweightParser parser = new XMLLightweightParser(); final ByteBuf in = ByteBufAllocator.DEFAULT.buffer(3); in.writeBytes(Arrays.copyOfRange(multibyteCharacter, 0, 1)); // Execute system under test. final char[] result = parser.decode(in); // Verify results. assertEquals(0, result.length); assertEquals(0, in.readerIndex()); }
synchronized O init(final E e) { if (o != null) { return o; } O res = function.apply(e); o = res; return res; }
@Test public void testInit() { Function<String, String> function = Function.identity(); FreshBeanHolder<String, String> freshBeanHolder = new FreshBeanHolder<>(function); freshBeanHolder.init("hello world"); assertEquals("hello world", freshBeanHolder.init("hello")); }
public void smoke() { tobacco.smoke(this); }
@Test void testSmoke() { final var simpleWizard = new SimpleWizard(); simpleWizard.smoke(); assertEquals("SimpleWizard smoking OldTobyTobacco", appender.getLastMessage()); assertEquals(1, appender.getLogSize()); }
public static String getPropsVersionNode(final String version) { return String.join("/", getPropsVersionsNode(), version); }
@Test void assertGetPropsVersionNode() { assertThat(GlobalNode.getPropsVersionNode("0"), is("/props/versions/0")); }
@Override public String toString() { return path(); }
@Test public void shouldReturnPath_asToString() {//this is important because its used in xml fragment that is spit out on job-console assertThat(new PathFromAncestor(new CaseInsensitiveString("grand-parent/parent/child")).toString(), is("grand-parent/parent/child")); }
@Override public Topology currentTopology() { Iterable<Device> devices = manager.getVirtualDevices(networkId()) .stream() .collect(Collectors.toSet()); Iterable<Link> links = manager.getVirtualLinks(networkId()) .stream() .collect(Collectors.toSet()); DefaultGraphDescription graph = new DefaultGraphDescription(System.nanoTime(), System.currentTimeMillis(), devices, links); return new DefaultTopology(PID, graph); }
@Test public void testCurrentTopology() { VirtualNetwork virtualNetwork = setupVirtualNetworkTopology(); TopologyService topologyService = manager.get(virtualNetwork.id(), TopologyService.class); Topology topology = topologyService.currentTopology(); assertNotNull("The topology should not be null.", topology); }
public NonClosedTracking<RAW, BASE> trackNonClosed(Input<RAW> rawInput, Input<BASE> baseInput) { NonClosedTracking<RAW, BASE> tracking = NonClosedTracking.of(rawInput, baseInput); // 1. match by rule, line, line hash and message match(tracking, LineAndLineHashAndMessage::new); // 2. match issues with same rule, same line and same line hash, but not necessarily with same message match(tracking, LineAndLineHashKey::new); // 3. detect code moves by comparing blocks of codes detectCodeMoves(rawInput, baseInput, tracking); // 4. match issues with same rule, same message and same line hash match(tracking, LineHashAndMessageKey::new); // 5. match issues with same rule, same line and same message match(tracking, LineAndMessageKey::new); // 6. match issues with same rule and same line hash but different line and different message. // See SONAR-2812 match(tracking, LineHashKey::new); return tracking; }
@Test public void line_hash_has_greater_priority_than_line() { FakeInput baseInput = new FakeInput("H1", "H2", "H3"); Issue base1 = baseInput.createIssueOnLine(1, RULE_SYSTEM_PRINT, "msg"); Issue base2 = baseInput.createIssueOnLine(3, RULE_SYSTEM_PRINT, "msg"); FakeInput rawInput = new FakeInput("a", "b", "H1", "H2", "H3"); Issue raw1 = rawInput.createIssueOnLine(3, RULE_SYSTEM_PRINT, "msg"); Issue raw2 = rawInput.createIssueOnLine(5, RULE_SYSTEM_PRINT, "msg"); Tracking<Issue, Issue> tracking = tracker.trackNonClosed(rawInput, baseInput); assertThat(tracking.baseFor(raw1)).isSameAs(base1); assertThat(tracking.baseFor(raw2)).isSameAs(base2); }
@VisibleForTesting public long getTimeout() { return timeout; }
@Test public void testUserUpdateSetting() throws IOException { ShellBasedIdMapping iug = new ShellBasedIdMapping(new Configuration()); assertThat(iug.getTimeout()).isEqualTo( IdMappingConstant.USERGROUPID_UPDATE_MILLIS_DEFAULT); Configuration conf = new Configuration(); conf.setLong(IdMappingConstant.USERGROUPID_UPDATE_MILLIS_KEY, 0); iug = new ShellBasedIdMapping(conf); assertThat(iug.getTimeout()).isEqualTo( IdMappingConstant.USERGROUPID_UPDATE_MILLIS_MIN); conf.setLong(IdMappingConstant.USERGROUPID_UPDATE_MILLIS_KEY, IdMappingConstant.USERGROUPID_UPDATE_MILLIS_DEFAULT * 2); iug = new ShellBasedIdMapping(conf); assertThat(iug.getTimeout()).isEqualTo( IdMappingConstant.USERGROUPID_UPDATE_MILLIS_DEFAULT * 2); }
public BranchesList getBranches(String serverUrl, String token, String projectSlug, String repositorySlug) { HttpUrl url = buildUrl(serverUrl, format("/rest/api/1.0/projects/%s/repos/%s/branches", projectSlug, repositorySlug)); return doGet(token, url, body -> buildGson().fromJson(body, BranchesList.class)); }
@Test public void getBranches_given2Branches_returnListWithTwoBranches() { String bodyWith2Branches = "{\n" + " \"size\": 2,\n" + " \"limit\": 25,\n" + " \"isLastPage\": true,\n" + " \"values\": [{\n" + " \"id\": \"refs/heads/demo\",\n" + " \"displayId\": \"demo\",\n" + " \"type\": \"BRANCH\",\n" + " \"latestCommit\": \"3e30a6701af6f29f976e9a6609a6076b32a69ac3\",\n" + " \"latestChangeset\": \"3e30a6701af6f29f976e9a6609a6076b32a69ac3\",\n" + " \"isDefault\": false\n" + " }, {\n" + " \"id\": \"refs/heads/master\",\n" + " \"displayId\": \"master\",\n" + " \"type\": \"BRANCH\",\n" + " \"latestCommit\": \"66633864d27c531ff43892f6dfea6d91632682fa\",\n" + " \"latestChangeset\": \"66633864d27c531ff43892f6dfea6d91632682fa\",\n" + " \"isDefault\": true\n" + " }],\n" + " \"start\": 0\n" + "}"; server.enqueue(new MockResponse() .setHeader("Content-Type", "application/json;charset=UTF-8") .setBody(bodyWith2Branches)); BranchesList branches = underTest.getBranches(server.url("/").toString(), "token", "projectSlug", "repoSlug"); assertThat(branches.getBranches()).hasSize(2); }
public void setDataId(String dataId) { this.dataId = dataId; }
@Test void setDataId() { ConfigFuture configFuture = new ConfigFuture("file.conf", "defaultValue", ConfigFuture.ConfigOperation.GET); Assertions.assertEquals("file.conf", configFuture.getDataId()); configFuture.setDataId("file-test.conf"); Assertions.assertEquals("file-test.conf", configFuture.getDataId()); }
public void unlockBatchMQ( final String addr, final UnlockBatchRequestBody requestBody, final long timeoutMillis, final boolean oneway ) throws RemotingException, MQBrokerException, InterruptedException { RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.UNLOCK_BATCH_MQ, new UnlockBatchMqRequestHeader()); request.setBody(requestBody.encode()); if (oneway) { this.remotingClient.invokeOneway(addr, request, timeoutMillis); } else { RemotingCommand response = this.remotingClient .invokeSync(MixAll.brokerVIPChannel(this.clientConfig.isVipChannelEnabled(), addr), request, timeoutMillis); switch (response.getCode()) { case ResponseCode.SUCCESS: { return; } default: break; } throw new MQBrokerException(response.getCode(), response.getRemark(), addr); } }
@Test public void testUnlockBatchMQ() throws RemotingException, InterruptedException, MQBrokerException { mockInvokeSync(); UnlockBatchRequestBody unlockBatchRequestBody = new UnlockBatchRequestBody(); mqClientAPI.unlockBatchMQ(defaultBrokerAddr, unlockBatchRequestBody, defaultTimeout, false); }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } QProfile qProfile = (QProfile) o; return key.equals(qProfile.key); }
@Test public void testEquals() { QProfile q1 = new QProfile("k1", "name1", null, null); QProfile q2 = new QProfile("k1", "name2", null, null); QProfile q3 = new QProfile("k3", "name3", null, null); assertThat(q1) .isEqualTo(q2) .isNotEqualTo(q3) .isNotNull() .isNotEqualTo("str"); assertThat(q2).isNotEqualTo(q3); assertThat(q1).hasSameHashCodeAs(q2); assertThat(q1.hashCode()).isNotEqualTo(q3.hashCode()); assertThat(q2.hashCode()).isNotEqualTo(q3.hashCode()); }
public void expand(String key, long value, RangeHandler rangeHandler, EdgeHandler edgeHandler) { if (value < lowerBound || value > upperBound) { // Value outside bounds -> expand to nothing. return; } int maxLevels = value > 0 ? maxPositiveLevels : maxNegativeLevels; int sign = value > 0 ? 1 : -1; // Append key to feature string builder StringBuilder builder = new StringBuilder(128); builder.append(key).append('='); long levelSize = arity; long edgeInterval = (value / arity) * arity; edgeHandler.handleEdge(createEdgeFeatureHash(builder, edgeInterval), (int) Math.abs(value - edgeInterval)); for (int i = 0; i < maxLevels; ++i) { long start = (value / levelSize) * levelSize; if (Math.abs(start) + levelSize - 1 < 0) { // overflow break; } rangeHandler.handleRange(createRangeFeatureHash(builder, start, start + sign * (levelSize - 1))); levelSize *= arity; if (levelSize <= 0 && levelSize != Long.MIN_VALUE) { //overflow break; } } }
@Test void requireThatMaxRangeIsExpanded() { PredicateRangeTermExpander expander = new PredicateRangeTermExpander(10); expander.expand("key", 9223372036854775807L, range -> fail(), (edge, value) -> { assertEquals(PredicateHash.hash64("key=9223372036854775800"), edge); assertEquals(7, value); }); }
public static UnifiedDiff parseUnifiedDiff(InputStream stream) throws IOException, UnifiedDiffParserException { UnifiedDiffReader parser = new UnifiedDiffReader(new BufferedReader(new InputStreamReader(stream))); return parser.parse(); }
@Test public void testParseIssue33() throws IOException { UnifiedDiff diff = UnifiedDiffReader.parseUnifiedDiff( UnifiedDiffReaderTest.class.getResourceAsStream("problem_diff_issue33.diff")); assertThat(diff.getFiles().size()).isEqualTo(1); UnifiedDiffFile file1 = diff.getFiles().get(0); assertThat(file1.getFromFile()).isEqualTo("Main.java"); assertThat(file1.getPatch().getDeltas().size()).isEqualTo(1); assertThat(diff.getTail()).isNull(); assertThat(diff.getHeader()).isNull(); }