focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
Duration getLockAtMostFor(AnnotationData annotation) { return getValue( annotation.getLockAtMostFor(), annotation.getLockAtMostForString(), this.defaultLockAtMostFor, "lockAtMostForString"); }
@Test public void shouldLockTimeFromAnnotationWithDurationString() throws NoSuchMethodException { noopResolver(); SpringLockConfigurationExtractor.AnnotationData annotation = getAnnotation("annotatedMethodWithDurationString"); TemporalAmount lockAtMostFor = extractor.getLockAtMostFor(annotation); assertThat(lockAtMostFor).isEqualTo(Duration.of(1, SECONDS)); }
@SuppressWarnings({"checkstyle:npathcomplexity", "checkstyle:cyclomaticcomplexity", "checkstyle:methodlength"}) void planMigrations(int partitionId, PartitionReplica[] oldReplicas, PartitionReplica[] newReplicas, MigrationDecisionCallback callback) { assert oldReplicas.length == newReplicas.length : "Replica addresses with different lengths! Old: " + Arrays.toString(oldReplicas) + ", New: " + Arrays.toString(newReplicas); if (logger.isFinestEnabled()) { logger.finest("partitionId=%d, Initial state: %s", partitionId, Arrays.toString(oldReplicas)); logger.finest("partitionId=%d, Final state: %s", partitionId, Arrays.toString(newReplicas)); } initState(oldReplicas); assertNoDuplicate(partitionId, oldReplicas, newReplicas); // fix cyclic partition replica movements if (fixCycle(oldReplicas, newReplicas)) { if (logger.isFinestEnabled()) { logger.finest("partitionId=%d, Final state (after cycle fix): %s", partitionId, Arrays.toString(newReplicas)); } } int currentIndex = 0; while (currentIndex < oldReplicas.length) { if (logger.isFinestEnabled()) { logger.finest("partitionId=%d, Current index: %d, state: %s", partitionId, currentIndex, Arrays.toString(state)); } assertNoDuplicate(partitionId, oldReplicas, newReplicas); if (newReplicas[currentIndex] == null) { if (state[currentIndex] != null) { // replica owner is removed and no one will own this replica logger.finest("partitionId=%d, New address is null at index: %d", partitionId, currentIndex); callback.migrate(state[currentIndex], currentIndex, -1, null, -1, -1); state[currentIndex] = null; } currentIndex++; continue; } if (state[currentIndex] == null) { int i = getReplicaIndex(state, newReplicas[currentIndex]); if (i == -1) { // fresh replica copy is needed, so COPY replica to newReplicas[currentIndex] from partition owner logger.finest("partitionId=%d, COPY %s to index: %d", partitionId, newReplicas[currentIndex], currentIndex); callback.migrate(null, -1, -1, newReplicas[currentIndex], -1, currentIndex); state[currentIndex] = newReplicas[currentIndex]; currentIndex++; continue; } if (i > currentIndex) { // SHIFT UP replica from i to currentIndex, copy data from partition owner logger.finest("partitionId=%d, SHIFT UP-2 %s from old addresses index: %d to index: %d", partitionId, state[i], i, currentIndex); callback.migrate(null, -1, -1, state[i], i, currentIndex); state[currentIndex] = state[i]; state[i] = null; continue; } throw new AssertionError("partitionId=" + partitionId + "Migration decision algorithm failed during SHIFT UP! INITIAL: " + Arrays.toString(oldReplicas) + ", CURRENT: " + Arrays.toString(state) + ", FINAL: " + Arrays.toString(newReplicas)); } if (newReplicas[currentIndex].equals(state[currentIndex])) { // no change, no action needed currentIndex++; continue; } if (getReplicaIndex(newReplicas, state[currentIndex]) == -1 && getReplicaIndex(state, newReplicas[currentIndex]) == -1) { // MOVE partition replica from its old owner to new owner logger.finest("partitionId=%d, MOVE %s to index: %d", partitionId, newReplicas[currentIndex], currentIndex); callback.migrate(state[currentIndex], currentIndex, -1, newReplicas[currentIndex], -1, currentIndex); state[currentIndex] = newReplicas[currentIndex]; currentIndex++; continue; } if (getReplicaIndex(state, newReplicas[currentIndex]) == -1) { int newIndex = getReplicaIndex(newReplicas, state[currentIndex]); assert newIndex > currentIndex : "partitionId=" + partitionId + ", Migration decision algorithm failed during SHIFT DOWN! INITIAL: " + Arrays.toString(oldReplicas) + ", CURRENT: " + Arrays.toString(state) + ", FINAL: " + Arrays.toString(newReplicas); if (state[newIndex] == null) { // it is a SHIFT DOWN logger.finest("partitionId=%d, SHIFT DOWN %s to index: %d, COPY %s to index: %d", partitionId, state[currentIndex], newIndex, newReplicas[currentIndex], currentIndex); callback.migrate(state[currentIndex], currentIndex, newIndex, newReplicas[currentIndex], -1, currentIndex); state[newIndex] = state[currentIndex]; } else { logger.finest("partitionId=%d, MOVE-3 %s to index: %d", partitionId, newReplicas[currentIndex], currentIndex); callback.migrate(state[currentIndex], currentIndex, -1, newReplicas[currentIndex], -1, currentIndex); } state[currentIndex] = newReplicas[currentIndex]; currentIndex++; continue; } planMigrations(partitionId, oldReplicas, newReplicas, callback, currentIndex); } assert Arrays.equals(state, newReplicas) : "partitionId=" + partitionId + ", Migration decisions failed! INITIAL: " + Arrays.toString(oldReplicas) + " CURRENT: " + Arrays.toString(state) + ", FINAL: " + Arrays.toString(newReplicas); }
@Test public void test_SHIFT_UP_multipleTimes() throws UnknownHostException { final PartitionReplica[] oldReplicas = { new PartitionReplica(new Address("localhost", 5702), uuids[1]), null, new PartitionReplica(new Address("localhost", 5703), uuids[2]), new PartitionReplica(new Address("localhost", 5704), uuids[3]), null, null, null, }; final PartitionReplica[] newReplicas = { new PartitionReplica(new Address("localhost", 5702), uuids[1]), new PartitionReplica(new Address("localhost", 5703), uuids[2]), new PartitionReplica(new Address("localhost", 5704), uuids[3]), null, null, null, null, }; migrationPlanner.planMigrations(0, oldReplicas, newReplicas, callback); verify(callback).migrate(null, -1, -1, new PartitionReplica(new Address("localhost", 5703), uuids[2]), 2, 1); verify(callback).migrate(null, -1, -1, new PartitionReplica(new Address("localhost", 5704), uuids[3]), 3, 2); }
void updateOldestSegmentReference() throws IOException { final Optional<Path> previousOldestSegmentPath = oldestSegmentPath; oldestSegmentPath = listSegmentPathsSortedBySegmentId(this.queuePath) .filter(p -> p.toFile().length() > 1) // take the files that have content to process .findFirst(); if (!oldestSegmentPath.isPresent()) { oldestSegmentTimestamp = Optional.empty(); return; } boolean previousPathEqualsToCurrent = previousOldestSegmentPath.isPresent() && // contains a value previousOldestSegmentPath.get().equals(oldestSegmentPath.get()); // and the value is the same as the current if (!previousPathEqualsToCurrent) { // oldest segment path has changed logger.debug("Oldest segment is {}", oldestSegmentPath.get()); } // extract the newest timestamp from the oldest segment Optional<Timestamp> foundTimestamp = readTimestampOfLastEventInSegment(oldestSegmentPath.get()); if (!foundTimestamp.isPresent()) { // clean also the last segment, because doesn't contain a timestamp (corrupted maybe) // or is not present anymore oldestSegmentPath = Optional.empty(); } oldestSegmentTimestamp = foundTimestamp; }
@Test public void testUpdateOldestSegmentReference() throws IOException { try (DeadLetterQueueWriter sut = DeadLetterQueueWriter .newBuilderWithoutFlusher(dir, 10 * MB, 20 * MB) .build()) { final byte[] eventBytes = new DLQEntry(new Event(), "", "", "").serialize(); try(RecordIOWriter writer = new RecordIOWriter(dir.resolve("1.log"))){ writer.writeEvent(eventBytes); } try(RecordIOWriter writer = new RecordIOWriter(dir.resolve("2.log"))){ writer.writeEvent(eventBytes); } try(RecordIOWriter writer = new RecordIOWriter(dir.resolve("3.log"))){ writer.writeEvent(eventBytes); } // Exercise sut.updateOldestSegmentReference(); // Verify final Optional<Path> oldestSegmentPath = sut.getOldestSegmentPath(); assertTrue(oldestSegmentPath.isPresent()); assertEquals("1.log", oldestSegmentPath.get().getFileName().toString()); } }
@Override public void onAddClassLoader(ModuleModel scopeModel, ClassLoader classLoader) { refreshClassLoader(classLoader); }
@Test void test() { FrameworkModel frameworkModel = new FrameworkModel(); ApplicationModel applicationModel = frameworkModel.newApplication(); ModuleModel moduleModel = applicationModel.newModule(); SerializeSecurityManager ssm = frameworkModel.getBeanFactory().getBean(SerializeSecurityManager.class); SerializeSecurityConfigurator serializeSecurityConfigurator = new SerializeSecurityConfigurator(moduleModel); serializeSecurityConfigurator.onAddClassLoader( moduleModel, Thread.currentThread().getContextClassLoader()); Assertions.assertTrue(ssm.getAllowedPrefix().contains("java.util.HashMap")); Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.example.DemoInterface")); Assertions.assertTrue(ssm.getAllowedPrefix().contains("com.sun.Interface1")); Assertions.assertTrue(ssm.getDisAllowedPrefix().contains("com.exampletest.DemoInterface")); Assertions.assertFalse(ssm.getAllowedPrefix().contains("com.sun.Interface2")); Assertions.assertEquals(AllowClassNotifyListener.DEFAULT_STATUS, ssm.getCheckStatus()); frameworkModel.destroy(); }
private static List<Object> getConvertedAttributes(List<String> mbeanAttributes) { initJRockitMBeansIfNeeded(); final List<Object> result = new ArrayList<>(); final List<MBeanServer> mBeanServers = getMBeanServers(); for (final String mbeansAttribute : mbeanAttributes) { final int lastIndexOfPoint = mbeansAttribute.lastIndexOf('.'); if (lastIndexOfPoint <= 0) { throw new IllegalArgumentException(mbeansAttribute); } final String name = mbeansAttribute.substring(0, lastIndexOfPoint); final String attribute = mbeansAttribute.substring(lastIndexOfPoint + 1); // on ne veut pas afficher l'attribut password, jamais // (notamment, dans users tomcat ou dans datasources tomcat) if ("password".equalsIgnoreCase(attribute)) { throw new IllegalArgumentException(name + '.' + attribute); } InstanceNotFoundException instanceNotFoundException = null; for (final MBeanServer mbeanServer : mBeanServers) { try { final MBeans mbeans = new MBeans(mbeanServer); final Object jmxValue = mbeans.convertValueIfNeeded( mbeans.getAttribute(new ObjectName(name), attribute)); result.add(jmxValue); instanceNotFoundException = null; // ObjectName trouvé dans ce MBeanServer, inutile de chercher dans les suivants // où il n'est d'ailleurs pas break; } catch (final InstanceNotFoundException e) { // ObjectName non trouvé dans ce MBeanServer, donc on cherche dans le suivant // (nécessaire pour JBoss 5.0.x) instanceNotFoundException = e; continue; } catch (final JMException e) { throw new IllegalArgumentException(name + '.' + attribute, e); } } if (instanceNotFoundException != null) { throw new IllegalArgumentException(name + '.' + attribute, instanceNotFoundException); } } return result; }
@Test public void testGetConvertedAttribute() { final String firstMBean = mbeansList.get(0).toString(); final String message = "getConvertedAttributes"; assertNotNull(message, MBeans.getConvertedAttributes(firstMBean + ".maxThreads")); assertNotNull(message, MBeans .getConvertedAttributes(firstMBean + ".maxThreads|" + firstMBean + ".maxThreads")); assertNotNull(message, MBeans.getConvertedAttributes(firstMBean + ".intArrayAsInJRockit")); assertNotNull(message, MBeans.getConvertedAttributes(firstMBean + ".doubleArrayAsInJRockit")); try { MBeans.getConvertedAttributes("Catalina:type=instanceNotFound.maxThreads"); } catch (final IllegalArgumentException e) { assertNotNull("e", e); } try { MBeans.getConvertedAttributes("n'importe quoi.maxThreads"); } catch (final IllegalArgumentException e) { assertNotNull("e", e); } try { MBeans.getConvertedAttributes(firstMBean + ".Password"); } catch (final IllegalArgumentException e) { assertNotNull("e", e); } try { MBeans.getConvertedAttributes("noAttribute"); } catch (final IllegalArgumentException e) { assertNotNull("e", e); } }
@Override public Iterable<K> loadAllKeys() { // If loadAllKeys property is disabled, don't load anything if (!genericMapStoreProperties.loadAllKeys) { return Collections.emptyList(); } awaitSuccessfulInit(); String sql = queries.loadAllKeys(); SqlResult keysResult = sqlService.execute(sql); // The contract for loadAllKeys says that if iterator implements Closable // then it will be closed when the iteration is over return () -> new MappingClosingIterator<>( keysResult.iterator(), (SqlRow row) -> row.getObject(genericMapStoreProperties.idColumn), keysResult::close ); }
@Test public void givenRow_whenLoadAllKeysWithSingleColumn_thenReturnKeys() { ObjectSpec spec = objectProvider.createObject(mapName); mapLoaderSingleColumn = createMapLoaderSingleColumn(); objectProvider.insertItems(spec, 1); List<Integer> ids = newArrayList(mapLoaderSingleColumn.loadAllKeys()); assertThat(ids).contains(0); }
@Override public boolean subsumes(final Object other) { if (other instanceof PatternInspector) { if (pattern.getObjectType() .getType() .equals(((PatternInspector) other).getPattern() .getObjectType() .getType())) { return inspectorList.subsumes(((PatternInspector) other).inspectorList); } else { return false; } } else { return false; } }
@Test void testSubsumpt02() throws Exception { final PatternInspector x = new PatternInspector(new Pattern("x", new ObjectType("org.Address", configurationMock), configurationMock), mock(RuleInspectorUpdater.class), mock(AnalyzerConfiguration.class)); assertThat(x.subsumes(b)).isFalse(); assertThat(b.subsumes(x)).isFalse(); }
@SneakyThrows(GeneralSecurityException.class) public static byte[] encryptWithRSAPublicKey(final String password, final byte[] seed, final String transformation, final String publicKey) { byte[] formattedPassword = null == password ? new byte[]{0} : Bytes.concat(password.getBytes(), new byte[]{0}); return encryptWithRSAPublicKey(xor(formattedPassword, seed, formattedPassword.length), parseRSAPublicKey(publicKey), transformation); }
@Test void assertEncryptWithRSAPublicKey() { assertDoesNotThrow(() -> PasswordEncryption.encryptWithRSAPublicKey("123456", getRandomSeed(), "RSA/ECB/OAEPWithSHA-1AndMGF1Padding", mockPublicKey())); }
private PlantUmlDiagram createDiagram(List<String> rawDiagramLines) { List<String> diagramLines = filterOutComments(rawDiagramLines); Set<PlantUmlComponent> components = parseComponents(diagramLines); PlantUmlComponents plantUmlComponents = new PlantUmlComponents(components); List<ParsedDependency> dependencies = parseDependencies(plantUmlComponents, diagramLines); return new PlantUmlDiagram.Builder(plantUmlComponents) .withDependencies(dependencies) .build(); }
@Test public void parses_component_diagram_with_multiple_stereotypes() { PlantUmlDiagram diagram = createDiagram(TestDiagram.in(temporaryFolder) .component("someComponent") .withStereoTypes("..firstPackage..", "..secondPackage..", "..thirdPackage..") .write()); PlantUmlComponent component = getOnlyElement(diagram.getAllComponents()); assertThat(component.getStereotypes()).containsOnly( new Stereotype("..firstPackage.."), new Stereotype("..secondPackage.."), new Stereotype("..thirdPackage..")); }
@Override public final void isEqualTo(@Nullable Object other) { if (Objects.equal(actual, other)) { return; } // Fail but with a more descriptive message: if (actual == null || !(other instanceof Map)) { super.isEqualTo(other); return; } containsEntriesInAnyOrder((Map<?, ?>) other, /* allowUnexpected= */ false); }
@Test public void isEqualToFailureExtraAndMissing() { ImmutableMap<String, Integer> actual = ImmutableMap.of("jan", 1, "feb", 2, "march", 3); ImmutableMap<String, Integer> expectedMap = ImmutableMap.of("jan", 1, "feb", 2, "mar", 3); expectFailureWhenTestingThat(actual).isEqualTo(expectedMap); assertFailureKeys( "missing keys", "for key", "expected value", "unexpected keys", "for key", "unexpected value", "---", "expected", "but was"); assertFailureValueIndexed("for key", 0, "mar"); assertFailureValue("expected value", "3"); assertFailureValueIndexed("for key", 1, "march"); assertFailureValue("unexpected value", "3"); }
public Command getCommand() { return command; }
@Test public void emptyConstructorShouldReturnAnInstanceWithoutACommand() { message = new SmppMessage(camelContext, null, new SmppConfiguration()); assertNull(message.getCommand()); assertTrue(message.getHeaders().isEmpty()); }
public boolean tryRemove(final Agent agent) { Objects.requireNonNull(agent, "agent cannot be null"); if (Status.ACTIVE != status) { throw new IllegalStateException("remove called when not active"); } return removeAgent.compareAndSet(null, agent); }
@Test void shouldNotAllowRemoveAfterClose() { final DynamicCompositeAgent compositeAgent = new DynamicCompositeAgent(ROLE_NAME); final AgentInvoker invoker = new AgentInvoker(Throwable::printStackTrace, null, compositeAgent); invoker.close(); assertThrows(IllegalStateException.class, () -> compositeAgent.tryRemove(mock(Agent.class))); }
public Analysis analyze(Statement statement) { return analyze(statement, false); }
@Test(expectedExceptions = SemanticException.class, expectedExceptionsMessageRegExp = "line 1:8: 't.y' cannot be resolved") public void testInvalidAttributeCorrectErrorMessage() { analyze("SELECT t.y FROM (VALUES 1) t(x)"); }
@GET public Map<OpensearchKeystoreProvider.Store, KeyStoreDto> getCertificates() { Map<OpensearchKeystoreProvider.Store, KeyStoreDto> certificates = new HashMap<>(); try { KeyStore keystore = datanodeKeystore.loadKeystore(); certificates.put(OpensearchKeystoreProvider.Store.CONFIGURED, KeyStoreDto.fromKeyStore(keystore)); } catch (DatanodeKeystoreException | KeyStoreException e) { log.error("Could not load datanode keystore", e); } certificates.putAll(opensearchKeystore); return certificates; }
@Test public void testOptionalSecurityConfiguration() throws Exception { when(datanodeKeystore.loadKeystore()).thenReturn(testKeyStore()); Map<OpensearchKeystoreProvider.Store, KeyStoreDto> certificates = certificatesController.getCertificates(); assertThat(certificates).hasSize(4); assertThat(certificates.get(OpensearchKeystoreProvider.Store.CONFIGURED).certificates()).hasSize(3); assertThat(certificates.get(OpensearchKeystoreProvider.Store.CONFIGURED).certificates().get("ca")).hasSize(1); assertThat(certificates.get(OpensearchKeystoreProvider.Store.CONFIGURED).certificates().get("host")).hasSize(2); assertThat(certificates.get(OpensearchKeystoreProvider.Store.CONFIGURED).certificates().get("cert")).hasSize(1); assertThat(certificates.get(OpensearchKeystoreProvider.Store.TRUSTSTORE).certificates()).hasSize(0); }
public Optional<Node> node(String listenerName) { Endpoint endpoint = listeners().get(listenerName); if (endpoint == null) { return Optional.empty(); } return Optional.of(new Node(id, endpoint.host(), endpoint.port(), rack.orElse(null))); }
@Test public void testToNode() { assertEquals(Optional.empty(), REGISTRATIONS.get(0).node("NONEXISTENT")); assertEquals(Optional.of(new Node(0, "localhost", 9090, null)), REGISTRATIONS.get(0).node("INTERNAL")); assertEquals(Optional.of(new Node(1, "localhost", 9091, null)), REGISTRATIONS.get(1).node("INTERNAL")); assertEquals(Optional.of(new Node(2, "localhost", 9092, "myrack")), REGISTRATIONS.get(2).node("INTERNAL")); assertEquals(Optional.of(new Node(3, "localhost", 9093, null)), REGISTRATIONS.get(3).node("INTERNAL")); }
public static Object getInstance(String name, String source) throws CompileException, ReflectiveOperationException { return getClass(name, source).getDeclaredConstructor().newInstance(); }
@Test public void testGetInstance() throws Exception { Function<Integer, Integer> square = (Function<Integer, Integer>) StringCompiler.getInstance("Square", SQUARE_SOURCE); assertEquals(4, (int) square.apply(2)); }
public static String buildSplitScanQuery( TableId tableId, SeaTunnelRowType rowType, boolean isFirstSplit, boolean isLastSplit) { return buildSplitQuery(tableId, rowType, isFirstSplit, isLastSplit, -1, true); }
@Test public void testSplitScanQuery() { String splitScanSQL = MySqlUtils.buildSplitScanQuery( TableId.parse("db1.table1"), new SeaTunnelRowType( new String[] {"id"}, new SeaTunnelDataType[] {BasicType.LONG_TYPE}), false, false); Assertions.assertEquals( "SELECT * FROM `db1`.`table1` WHERE `id` >= ? AND NOT (`id` = ?) AND `id` <= ?", splitScanSQL); splitScanSQL = MySqlUtils.buildSplitScanQuery( TableId.parse("db1.table1"), new SeaTunnelRowType( new String[] {"id"}, new SeaTunnelDataType[] {BasicType.LONG_TYPE}), true, true); Assertions.assertEquals("SELECT * FROM `db1`.`table1`", splitScanSQL); splitScanSQL = MySqlUtils.buildSplitScanQuery( TableId.parse("db1.table1"), new SeaTunnelRowType( new String[] {"id"}, new SeaTunnelDataType[] {BasicType.LONG_TYPE}), true, false); Assertions.assertEquals( "SELECT * FROM `db1`.`table1` WHERE `id` <= ? AND NOT (`id` = ?)", splitScanSQL); splitScanSQL = MySqlUtils.buildSplitScanQuery( TableId.parse("db1.table1"), new SeaTunnelRowType( new String[] {"id"}, new SeaTunnelDataType[] {BasicType.LONG_TYPE}), false, true); Assertions.assertEquals("SELECT * FROM `db1`.`table1` WHERE `id` >= ?", splitScanSQL); }
@Override public boolean isInfinite() { return cpu == Double.POSITIVE_INFINITY || network == Double.POSITIVE_INFINITY; }
@Test public void testIsInfinite() { CostFactory factory = CostFactory.INSTANCE; assertFalse(factory.makeCost(1.0, 2.0d, 3.0d).isInfinite()); assertTrue(factory.makeCost(1.0, Double.POSITIVE_INFINITY, 3.0d).isInfinite()); assertTrue(factory.makeCost(1.0, 2.0d, Double.POSITIVE_INFINITY).isInfinite()); assertTrue(factory.makeCost(1.0, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY).isInfinite()); assertTrue(factory.makeInfiniteCost().isInfinite()); }
@Description("Returns the Geometry value that represents the point set intersection of two Geometries") @ScalarFunction("ST_Intersection") @SqlType(GEOMETRY_TYPE_NAME) public static Slice stIntersection(@SqlType(GEOMETRY_TYPE_NAME) Slice left, @SqlType(GEOMETRY_TYPE_NAME) Slice right) { GeometrySerializationType leftType = deserializeType(left); GeometrySerializationType rightType = deserializeType(right); if (leftType == GeometrySerializationType.ENVELOPE && rightType == GeometrySerializationType.ENVELOPE) { Envelope leftEnvelope = deserializeEnvelope(left); Envelope rightEnvelope = deserializeEnvelope(right); // Envelope#intersect updates leftEnvelope to the intersection of the two envelopes if (!leftEnvelope.intersect(rightEnvelope)) { return EMPTY_POLYGON; } Envelope intersection = leftEnvelope; if (intersection.getXMin() == intersection.getXMax() || intersection.getYMin() == intersection.getYMax()) { if (intersection.getXMin() == intersection.getXMax() && intersection.getYMin() == intersection.getYMax()) { return EsriGeometrySerde.serialize(createFromEsriGeometry(new Point(intersection.getXMin(), intersection.getYMin()), null)); } return EsriGeometrySerde.serialize(createFromEsriGeometry(new Polyline(new Point(intersection.getXMin(), intersection.getYMin()), new Point(intersection.getXMax(), intersection.getYMax())), null)); } return EsriGeometrySerde.serialize(intersection); } // If one side is an envelope, then if it contains the other's envelope we can just return the other geometry. if (leftType == GeometrySerializationType.ENVELOPE && deserializeEnvelope(left).contains(deserializeEnvelope(right))) { return right; } if (rightType == GeometrySerializationType.ENVELOPE && deserializeEnvelope(right).contains(deserializeEnvelope(left))) { return left; } OGCGeometry leftGeometry = EsriGeometrySerde.deserialize(left); OGCGeometry rightGeometry = EsriGeometrySerde.deserialize(right); verifySameSpatialReference(leftGeometry, rightGeometry); return EsriGeometrySerde.serialize(leftGeometry.intersection(rightGeometry)); }
@Test public void testSTIntersection() { assertFunction("ST_AsText(ST_Intersection(ST_GeometryFromText('POINT (50 100)'), ST_GeometryFromText('POINT (150 150)')))", VARCHAR, "MULTIPOLYGON EMPTY"); assertFunction("ST_AsText(ST_Intersection(ST_GeometryFromText('MULTIPOINT (50 100, 50 200)'), ST_GeometryFromText('Point (50 100)')))", VARCHAR, "POINT (50 100)"); assertFunction("ST_AsText(ST_Intersection(ST_GeometryFromText('LINESTRING (50 100, 50 200)'), ST_GeometryFromText('LINESTRING (20 150, 100 150)')))", VARCHAR, "POINT (50 150)"); assertFunction("ST_AsText(ST_Intersection(ST_GeometryFromText('MULTILINESTRING ((1 1, 5 1), (2 4, 4 4))'), ST_GeometryFromText('MULTILINESTRING ((3 4, 6 4), (5 0, 5 4))')))", VARCHAR, "GEOMETRYCOLLECTION (POINT (5 1), LINESTRING (3 4, 4 4))"); assertFunction("ST_AsText(ST_Intersection(ST_GeometryFromText('POLYGON ((1 1, 1 3, 3 3, 3 1, 1 1))'), ST_GeometryFromText('POLYGON ((4 4, 4 5, 5 5, 5 4, 4 4))')))", VARCHAR, "MULTIPOLYGON EMPTY"); assertFunction("ST_AsText(ST_Intersection(ST_GeometryFromText('MULTIPOLYGON (((1 1, 1 3, 3 3, 3 1, 1 1)), ((0 0, 0 2, 2 2, 2 0, 0 0)))'), ST_GeometryFromText('POLYGON ((0 1, 3 1, 3 3, 0 3, 0 1))')))", VARCHAR, "GEOMETRYCOLLECTION (LINESTRING (1 1, 2 1), MULTIPOLYGON (((0 1, 0 2, 1 2, 1 1, 0 1)), ((2 1, 2 2, 1 2, 1 3, 3 3, 3 1, 2 1))))"); assertFunction("ST_AsText(ST_Intersection(ST_GeometryFromText('POLYGON ((1 1, 1 4, 4 4, 4 1, 1 1))'), ST_GeometryFromText('LINESTRING (2 0, 2 3)')))", VARCHAR, "LINESTRING (2 1, 2 3)"); assertFunction("ST_AsText(ST_Intersection(ST_GeometryFromText('POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0))'), ST_GeometryFromText('LINESTRING (0 0, 1 -1, 1 2)')))", VARCHAR, "GEOMETRYCOLLECTION (POINT (0 0), LINESTRING (1 0, 1 1))"); // test intersection of envelopes assertEnvelopeIntersection("POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0))", "POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0))", "POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0))"); assertEnvelopeIntersection("POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0))", "POLYGON ((-1 4, 1 4, 1 6, -1 6, -1 4))", "POLYGON ((0 4, 0 5, 1 5, 1 4, 0 4))"); assertEnvelopeIntersection("POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0))", "POLYGON ((1 4, 2 4, 2 6, 1 6, 1 4))", "POLYGON ((1 4, 1 5, 2 5, 2 4, 1 4))"); assertEnvelopeIntersection("POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0))", "POLYGON ((4 4, 6 4, 6 6, 4 6, 4 4))", "POLYGON ((4 4, 4 5, 5 5, 5 4, 4 4))"); assertEnvelopeIntersection("POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0))", "POLYGON ((10 10, 11 10, 11 11, 10 11, 10 10))", "POLYGON EMPTY"); assertEnvelopeIntersection("POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0))", "POLYGON ((-1 -1, 0 -1, 0 1, -1 1, -1 -1))", "LINESTRING (0 0, 0 1)"); assertEnvelopeIntersection("POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0))", "POLYGON ((1 -1, 2 -1, 2 0, 1 0, 1 -1))", "LINESTRING (1 0, 2 0)"); assertEnvelopeIntersection("POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0))", "POLYGON ((-1 -1, 0 -1, 0 0, -1 0, -1 -1))", "POINT (0 0)"); assertEnvelopeIntersection("POLYGON ((0 0, 0 5, 5 5, 5 0, 0 0))", "POLYGON ((5 -1, 5 0, 6 0, 6 -1, 5 -1))", "POINT (5 0)"); }
@Nullable static String channelName(@Nullable Destination destination) { if (destination == null) return null; boolean isQueue = isQueue(destination); try { if (isQueue) { return ((Queue) destination).getQueueName(); } else { return ((Topic) destination).getTopicName(); } } catch (Throwable t) { propagateIfFatal(t); log(t, "error getting destination name from {0}", destination, null); } return null; }
@Test void channelName_queueAndTopic_null() { assertThat(MessageParser.channelName(null)).isNull(); }
@Override public String getDefaultConfigLocation() { return NACOS_LOGBACK_LOCATION; }
@Test void testGetDefaultConfigLocation() { assertEquals("classpath:nacos-logback12.xml", logbackNacosLoggingAdapter.getDefaultConfigLocation()); }
public List<String> column(final int column) { return new ColumnView(column); }
@Test void column_should_throw_for_negative_column_value() { assertThrows(IndexOutOfBoundsException.class, () -> createSimpleTable().column(-1)); }
protected static void handleMigration(ListMultimap<TStorageMedium, Long> tabletMetaMigrationMap, long backendId) { TabletInvertedIndex invertedIndex = GlobalStateMgr.getCurrentState().getTabletInvertedIndex(); AgentBatchTask batchTask = new AgentBatchTask(); OUTER: for (TStorageMedium storageMedium : tabletMetaMigrationMap.keySet()) { List<Long> tabletIds = tabletMetaMigrationMap.get(storageMedium); List<TabletMeta> tabletMetaList = invertedIndex.getTabletMetaList(tabletIds); for (int i = 0; i < tabletMetaList.size(); i++) { long tabletId = tabletIds.get(i); TabletMeta tabletMeta = tabletMetaList.get(i); // 1. If size of tabletMigrationMap exceeds (Config.tablet_sched_max_migration_task_sent_once - running_tasks_on_be), // dot not send more tasks. The number of tasks running on BE cannot exceed Config.tablet_sched_max_migration_task_sent_once if (batchTask.getTaskNum() >= Config.tablet_sched_max_migration_task_sent_once - AgentTaskQueue.getTaskNum(backendId, TTaskType.STORAGE_MEDIUM_MIGRATE, false)) { LOG.debug("size of tabletMigrationMap + size of running tasks on BE is bigger than {}", Config.tablet_sched_max_migration_task_sent_once); break OUTER; } // 2. If the task already running on BE, do not send again if (AgentTaskQueue.getTask(backendId, TTaskType.STORAGE_MEDIUM_MIGRATE, tabletId) != null) { LOG.debug("migrate of tablet:{} is already running on BE", tabletId); continue; } // 3. There are some limitations for primary table, details in migratableTablet() Database db = GlobalStateMgr.getCurrentState().getDb(tabletMeta.getDbId()); if (db == null) { continue; } OlapTable table = (OlapTable) db.getTable(tabletMeta.getTableId()); if (table == null) { continue; } if (!migratableTablet(db, table, tabletMeta.getPhysicalPartitionId(), tabletMeta.getIndexId(), tabletId)) { continue; } // always get old schema hash(as effective one) int effectiveSchemaHash = tabletMeta.getOldSchemaHash(); StorageMediaMigrationTask task = new StorageMediaMigrationTask(backendId, tabletId, effectiveSchemaHash, storageMedium); batchTask.addTask(task); } } AgentTaskQueue.addBatchTask(batchTask); AgentTaskExecutor.submit(batchTask); }
@Test public void testHandleMigration() throws TException { List<Long> tabletIds = GlobalStateMgr.getCurrentState().getTabletInvertedIndex().getTabletIdsByBackendId(10001); ListMultimap<TStorageMedium, Long> tabletMetaMigrationMap = ArrayListMultimap.create(); for (Long tabletId : tabletIds) { tabletMetaMigrationMap.put(TStorageMedium.SSD, tabletId); } ReportHandler.handleMigration(tabletMetaMigrationMap, 10001); final SystemInfoService currentSystemInfo = GlobalStateMgr.getCurrentState().getNodeMgr().getClusterInfo(); Backend reportBackend = currentSystemInfo.getBackend(10001); BackendStatus backendStatus = reportBackend.getBackendStatus(); backendStatus.lastSuccessReportTabletsTime = TimeUtils.longToTimeString(Long.MAX_VALUE); ReportHandler.handleMigration(tabletMetaMigrationMap, 10001); TabletInvertedIndex invertedIndex = GlobalStateMgr.getCurrentState().getTabletInvertedIndex(); List<TabletMeta> tabletMetaList = invertedIndex.getTabletMetaList(tabletIds); for (int i = 0; i < tabletMetaList.size(); i++) { long tabletId = tabletIds.get(i); TabletMeta tabletMeta = tabletMetaList.get(i); Database db = GlobalStateMgr.getCurrentState().getDb("test"); if (db == null) { continue; } OlapTable table = null; Locker locker = new Locker(); locker.lockDatabase(db, LockType.READ); try { table = (OlapTable) db.getTable(tabletMeta.getTableId()); } finally { locker.unLockDatabase(db, LockType.READ); } Partition partition = table.getPartition(tabletMeta.getPartitionId()); MaterializedIndex idx = partition.getIndex(tabletMeta.getIndexId()); LocalTablet tablet = (LocalTablet) idx.getTablet(tabletId); for (Replica replica : tablet.getImmutableReplicas()) { replica.setMaxRowsetCreationTime(System.currentTimeMillis() / 1000); } } Config.tablet_sched_max_migration_task_sent_once = 1000000; Config.primary_key_disk_schedule_time = 0; ReportHandler.handleMigration(tabletMetaMigrationMap, 10001); }
public static ConfigurableResource parseResourceConfigValue(String value) throws AllocationConfigurationException { return parseResourceConfigValue(value, Long.MAX_VALUE); }
@Test public void testParseNewStyleResourceWithPercentagesMemoryNegative() throws Exception { expectNegativePercentageNewStyle(); parseResourceConfigValue("vcores=75%,memory-mb=-40%"); }
@Override public synchronized void close() { if (!closed.compareAndSet(false, true)) { return; } for (PageBufferClient client : allClients.values()) { closeQuietly(client); } pageBuffer.clear(); systemMemoryContext.setBytes(0); bufferRetainedSizeInBytes = 0; if (pageBuffer.peekLast() != NO_MORE_PAGES) { checkState(pageBuffer.add(NO_MORE_PAGES), "Could not add no more pages marker"); } notifyBlockedCallers(); }
@Test public void testClose() throws Exception { DataSize bufferCapacity = new DataSize(1, BYTE); DataSize maxResponseSize = new DataSize(1, BYTE); MockExchangeRequestProcessor processor = new MockExchangeRequestProcessor(maxResponseSize); URI location = URI.create("http://localhost:8080"); processor.addPage(location, createPage(1)); processor.addPage(location, createPage(2)); processor.addPage(location, createPage(3)); ExchangeClient exchangeClient = createExchangeClient(processor, bufferCapacity, maxResponseSize); exchangeClient.addLocation(location, TaskId.valueOf("taskid.0.0.0.0")); exchangeClient.noMoreLocations(); // fetch a page assertFalse(exchangeClient.isClosed()); assertPageEquals(getNextPage(exchangeClient), createPage(1)); // close client while pages are still available exchangeClient.close(); waitUntilEquals(exchangeClient::isFinished, true, new Duration(5, SECONDS)); assertTrue(exchangeClient.isClosed()); assertNull(exchangeClient.pollPage()); assertEquals(exchangeClient.getStatus().getBufferedPages(), 0); assertEquals(exchangeClient.getStatus().getBufferedBytes(), 0); // client should have sent only 2 requests: one to get all pages and once to get the done signal Optional<PageBufferClientStatus> clientStatusOptional = exchangeClient.getStatus().getPageBufferClientStatuses().stream().filter(pageBufferClientStatus -> pageBufferClientStatus.getUri().equals(location)).findFirst(); assertTrue(clientStatusOptional.isPresent()); assertStatus(clientStatusOptional.get(), "closed", "not scheduled"); }
public static PredicateTreeAnalyzerResult analyzePredicateTree(Predicate predicate) { AnalyzerContext context = new AnalyzerContext(); int treeSize = aggregatePredicateStatistics(predicate, false, context); int minFeature = ((int)Math.ceil(findMinFeature(predicate, false, context))) + (context.hasNegationPredicate ? 1 : 0); return new PredicateTreeAnalyzerResult(minFeature, treeSize, context.subTreeSizes); }
@Test void require_that_minvalue_feature_set_considers_all_values() { { Predicate p = and( feature("foo").inSet("A", "B"), feature("foo").inSet("B")); PredicateTreeAnalyzerResult r = PredicateTreeAnalyzer.analyzePredicateTree(p); assertEquals(1, r.minFeature); assertEquals(2, r.treeSize); } { Predicate p = and( feature("foo").inSet("A", "B"), feature("foo").inSet("C")); PredicateTreeAnalyzerResult r = PredicateTreeAnalyzer.analyzePredicateTree(p); assertEquals(2, r.minFeature); assertEquals(2, r.treeSize); } }
@Override public final int position() { return pos; }
@Test public void testPositionNewPos() { out.position(1); assertEquals(1, out.pos); }
public static CompressionType preferredCompressionType(List<CompressionType> acceptedCompressionTypes) { if (acceptedCompressionTypes != null && !acceptedCompressionTypes.isEmpty()) { // Broker is providing the compression types in order of preference. Grab the // first one. return acceptedCompressionTypes.get(0); } return CompressionType.NONE; }
@Test public void testPreferredCompressionType() { assertEquals(CompressionType.NONE, ClientTelemetryUtils.preferredCompressionType(Collections.emptyList())); assertEquals(CompressionType.NONE, ClientTelemetryUtils.preferredCompressionType(null)); assertEquals(CompressionType.NONE, ClientTelemetryUtils.preferredCompressionType(Arrays.asList(CompressionType.NONE, CompressionType.GZIP))); assertEquals(CompressionType.GZIP, ClientTelemetryUtils.preferredCompressionType(Arrays.asList(CompressionType.GZIP, CompressionType.NONE))); }
public void validate(String effectivePath, String artifactMD5, ChecksumValidationPublisher checksumValidationPublisher) { if (artifactMd5Checksums == null) { checksumValidationPublisher.md5ChecksumFileNotFound(); return; } String expectedMd5 = artifactMd5Checksums.md5For(effectivePath); if (StringUtils.isBlank(expectedMd5)) { checksumValidationPublisher.md5NotFoundFor(effectivePath); return; } if (expectedMd5.equals(artifactMD5)) { checksumValidationPublisher.md5Match(effectivePath); } else { checksumValidationPublisher.md5Mismatch(effectivePath); } }
@Test public void shouldNotifyPublisherWhenArtifactChecksumFileIsMissing() throws IOException { new ChecksumValidator(null).validate(null,null,checksumValidationPublisher); verify(checksumValidationPublisher).md5ChecksumFileNotFound(); }
public Number getParam() { return param; }
@Test public void testParamInteger() { JaegerConfig jaegerConfig = (JaegerConfig) Config.getInstance().getJsonObjectConfig("jaeger-tracing-true", JaegerConfig.class); Assert.assertEquals(jaegerConfig.getParam(), 1000); }
public int doWork() { final long nowNs = nanoClock.nanoTime(); trackTime(nowNs); int workCount = 0; workCount += processTimers(nowNs); if (!asyncClientCommandInFlight) { workCount += clientCommandAdapter.receive(); } workCount += drainCommandQueue(); workCount += trackStreamPositions(workCount, nowNs); workCount += nameResolver.doWork(cachedEpochClock.time()); workCount += freeEndOfLifeResources(ctx.resourceFreeLimit()); return workCount; }
@Test void shouldNotErrorWhenConflictingUnreliableSessionSpecificSubscriptionAddedToDifferentSessions() { final long id1 = driverProxy.addSubscription(CHANNEL_4000 + "|session-id=1024|reliable=true", STREAM_ID_1); driverConductor.doWork(); final long id2 = driverProxy.addSubscription(CHANNEL_4000 + "|session-id=1025|reliable=false", STREAM_ID_1); driverConductor.doWork(); verify(mockClientProxy).onSubscriptionReady(eq(id1), anyInt()); verify(mockClientProxy).onSubscriptionReady(eq(id2), anyInt()); }
public final Span joinSpan(TraceContext context) { if (context == null) throw new NullPointerException("context == null"); if (!supportsJoin) return newChild(context); // set shared flag if not already done int flags = InternalPropagation.instance.flags(context); if (!context.shared()) { flags |= FLAG_SHARED; return toSpan(context, InternalPropagation.instance.withFlags(context, flags)); } else { flags &= ~FLAG_SHARED; return toSpan(InternalPropagation.instance.withFlags(context, flags), context); } }
@Test void localRootId_joinSpan_sampled() { TraceContext context1 = TraceContext.newBuilder().traceId(1).spanId(2).sampled(true).build(); TraceContext context2 = TraceContext.newBuilder().traceId(1).spanId(3).sampled(true).build(); localRootId(context1, context2, ctx -> tracer.joinSpan(ctx.context())); }
public WorkflowOverviewResponse getWorkflowOverview(@NotNull String workflowId) { return withMetricLogError( () -> withRetryableQuery( GET_WORKFLOW_OVERVIEW_QUERY, stmt -> { stmt.setString(1, workflowId); stmt.setString(2, workflowId); }, result -> { if (result.next()) { return workflowOverviewFromResult(workflowId, result); } throw new MaestroNotFoundException( "Cannot find workflow [%s], which is either not created or has been deleted.", workflowId); }), "getWorkflowOverview", "Failed to get the workflow overview for [{}]", workflowId); }
@Test public void testGetWorkflowOverview() throws Exception { WorkflowDefinition wfd = loadWorkflow(TEST_WORKFLOW_ID1); workflowDao.addWorkflowDefinition(wfd, wfd.getPropertiesSnapshot().extractProperties()); assertNotNull(wfd.getInternalId()); long internalId = wfd.getInternalId(); wfd.setIsActive(false); workflowDao.addWorkflowDefinition(wfd, null); assertEquals(internalId, (long) wfd.getInternalId()); WorkflowOverviewResponse response = workflowDao.getWorkflowOverview(TEST_WORKFLOW_ID1); assertEquals(TEST_WORKFLOW_ID1, response.getWorkflowId()); assertEquals(1L, response.getActiveVersionId().longValue()); assertEquals(2L, response.getLatestVersionId().longValue()); assertEquals(1L, response.getDefaultVersionId().longValue()); assertEquals(RunStrategy.create(20), response.getPropertiesSnapshot().getRunStrategy()); assertEquals(20L, response.getStepConcurrency().longValue()); Map<WorkflowInstance.Status, Long> statusStats = new EnumMap<>(WorkflowInstance.Status.class); statusStats.put(WorkflowInstance.Status.CREATED, 0L); statusStats.put(WorkflowInstance.Status.IN_PROGRESS, 0L); statusStats.put(WorkflowInstance.Status.PAUSED, 0L); assertEquals(statusStats, response.getNonterminalInstances()); assertEquals(0L, response.getFailedInstances().longValue()); assertNull(response.getLatestInstanceId()); workflowDao.deactivate(TEST_WORKFLOW_ID1, User.create("test")); WorkflowOverviewResponse response1 = workflowDao.getWorkflowOverview(TEST_WORKFLOW_ID1); assertNull(response1.getActiveVersionId()); assertEquals(2L, response1.getLatestVersionId().longValue()); assertEquals(2L, response1.getDefaultVersionId().longValue()); AssertHelper.assertThrows( "Cannot get non-existing workflow overview", MaestroNotFoundException.class, "Cannot find workflow [non-existing], which is either not created or has been deleted.", () -> workflowDao.getWorkflowOverview("non-existing")); }
@Override public Properties getConfig(RedisClusterNode node, String pattern) { RedisClient entry = getEntry(node); RFuture<List<String>> f = executorService.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.CONFIG_GET, pattern); List<String> r = syncFuture(f); if (r != null) { return Converters.toProperties(r); } return null; }
@Test public void testGetConfig() { RedisClusterNode master = getFirstMaster(); Properties config = connection.getConfig(master, "*"); assertThat(config.size()).isGreaterThan(20); }
@Override public void validateInputFilePatternSupported(String filepattern) { getGcsPath(filepattern); verifyPath(filepattern); verifyPathIsAccessible(filepattern, "Could not find file %s"); }
@Test public void testWhenBucketDoesNotExist() throws Exception { doThrow(new FileNotFoundException()) .when(mockGcsUtil) .verifyBucketAccessible(any(GcsPath.class)); expectedException.expect(RuntimeException.class); expectedException.expectCause(instanceOf(FileNotFoundException.class)); expectedException.expectMessage("Could not find file gs://non-existent-bucket/location"); validator.validateInputFilePatternSupported("gs://non-existent-bucket/location"); }
public boolean compatibleVersion(String acceptableVersionRange, String actualVersion) { V pluginVersion = parseVersion(actualVersion); // Treat a single version "1.4" as a left bound, equivalent to "[1.4,)" if (acceptableVersionRange.matches(VERSION_REGEX)) { return ge(pluginVersion, parseVersion(acceptableVersionRange)); } // Otherwise ensure it is a version range with bounds Matcher matcher = INTERVAL_PATTERN.matcher(acceptableVersionRange); Preconditions.checkArgument(matcher.matches(), "invalid version range"); String leftBound = matcher.group("left"); String rightBound = matcher.group("right"); Preconditions.checkArgument( leftBound != null || rightBound != null, "left and right bounds cannot both be empty"); BiPredicate<V, V> leftComparator = acceptableVersionRange.startsWith("[") ? VersionChecker::ge : VersionChecker::gt; BiPredicate<V, V> rightComparator = acceptableVersionRange.endsWith("]") ? VersionChecker::le : VersionChecker::lt; if (leftBound != null && !leftComparator.test(pluginVersion, parseVersion(leftBound))) { return false; } if (rightBound != null && !rightComparator.test(pluginVersion, parseVersion(rightBound))) { return false; } return true; }
@Test public void testRange_leftClosed_exact() { Assert.assertTrue(checker.compatibleVersion("[2.3,4.3]", "2.3")); Assert.assertTrue(checker.compatibleVersion("[2.3,4.3)", "2.3")); Assert.assertTrue(checker.compatibleVersion("[2.3,)", "2.3")); Assert.assertTrue(checker.compatibleVersion("[2.3,]", "2.3")); }
public static byte[] decode(String input) { // Check special case if (input == null || input.equals("")) { return new byte[0]; } char[] sArr = input.toCharArray(); int sLen = sArr.length; if (sLen == 0) { return new byte[0]; } int sIx = 0; // Start and end index after trimming. int eIx = sLen - 1; // Trim illegal chars from start while (sIx < eIx && IALPHABET[sArr[sIx]] < 0) { sIx++; } // Trim illegal chars from end while (eIx > 0 && IALPHABET[sArr[eIx]] < 0) { eIx--; } // get the padding count (=) (0, 1 or 2) // Count '=' at end. int pad = sArr[eIx] == '=' ? (sArr[eIx - 1] == '=' ? 2 : 1) : 0; // Content count including possible separators int cCnt = eIx - sIx + 1; int sepCnt = sLen > 76 ? (sArr[76] == '\r' ? cCnt / 78 : 0) << 1 : 0; // The number of decoded bytes int len = ((cCnt - sepCnt) * 6 >> 3) - pad; // Preallocate byte[] of exact length byte[] dArr = new byte[len]; // Decode all but the last 0 - 2 bytes. int d = 0; int three = 3; int eight = 8; for (int cc = 0, eLen = (len / three) * three; d < eLen; ) { // Assemble three bytes into an int from four "valid" characters. int i = ctoi(sArr[sIx++]) << 18 | ctoi(sArr[sIx++]) << 12 | ctoi(sArr[sIx++]) << 6 | ctoi(sArr[sIx++]); // Add the bytes dArr[d++] = (byte) (i >> 16); dArr[d++] = (byte) (i >> 8); dArr[d++] = (byte) i; // If line separator, jump over it. if (sepCnt > 0 && ++cc == 19) { sIx += 2; cc = 0; } } if (d < len) { // Decode last 1-3 bytes (incl '=') into 1-3 bytes int i = 0; for (int j = 0; sIx <= eIx - pad; j++) { i |= ctoi(sArr[sIx++]) << (18 - j * 6); } for (int r = 16; d < len; r -= eight) { dArr[d++] = (byte) (i >> r); } } return dArr; }
@Test void testStandardDecode() { String origin = "aGVsbG8sbmFjb3MhdGVzdEJhc2U2NGVuY29kZQ=="; String expectDecodeOrigin = "hello,nacos!testBase64encode"; byte[] decodeOrigin = Base64Decode.decode(origin); assertArrayEquals(decodeOrigin, expectDecodeOrigin.getBytes()); }
@VisibleForTesting static <T, CoderT extends Coder<T>, CandidateT> void verifyCompatible( CoderT coder, Type candidateType) throws IncompatibleCoderException { // Various representations of the coder's class @SuppressWarnings("unchecked") Class<CoderT> coderClass = (Class<CoderT>) coder.getClass(); TypeDescriptor<CoderT> coderDescriptor = TypeDescriptor.of(coderClass); // Various representations of the actual coded type @SuppressWarnings("unchecked") TypeDescriptor<T> codedDescriptor = CoderUtils.getCodedType(coderDescriptor); @SuppressWarnings("unchecked") Class<T> codedClass = (Class<T>) codedDescriptor.getRawType(); Type codedType = codedDescriptor.getType(); // Various representations of the candidate type @SuppressWarnings("unchecked") TypeDescriptor<CandidateT> candidateDescriptor = (TypeDescriptor<CandidateT>) TypeDescriptor.of(candidateType); @SuppressWarnings("unchecked") Class<CandidateT> candidateClass = (Class<CandidateT>) candidateDescriptor.getRawType(); // If coder has type Coder<T> where the actual value of T is lost // to erasure, then we cannot rule it out. if (candidateType instanceof TypeVariable) { return; } // If the raw types are not compatible, we can certainly rule out // coder compatibility if (!codedClass.isAssignableFrom(candidateClass)) { throw new IncompatibleCoderException( String.format( "Cannot encode elements of type %s with coder %s because the" + " coded type %s is not assignable from %s", candidateType, coder, codedClass, candidateType), coder, candidateType); } // we have established that this is a covariant upcast... though // coders are invariant, we are just checking one direction @SuppressWarnings("unchecked") TypeDescriptor<T> candidateOkDescriptor = (TypeDescriptor<T>) candidateDescriptor; // If the coded type is a parameterized type where any of the actual // type parameters are not compatible, then the whole thing is certainly not // compatible. if ((codedType instanceof ParameterizedType) && !isNullOrEmpty(coder.getCoderArguments())) { ParameterizedType parameterizedSupertype = (ParameterizedType) candidateOkDescriptor.getSupertype(codedClass).getType(); Type[] typeArguments = parameterizedSupertype.getActualTypeArguments(); List<? extends Coder<?>> typeArgumentCoders = coder.getCoderArguments(); if (typeArguments.length < typeArgumentCoders.size()) { throw new IncompatibleCoderException( String.format( "Cannot encode elements of type %s with coder %s:" + " the generic supertype %s has %s type parameters, which is less than the" + " number of coder arguments %s has (%s).", candidateOkDescriptor, coder, parameterizedSupertype, typeArguments.length, coder, typeArgumentCoders.size()), coder, candidateOkDescriptor.getType()); } for (int i = 0; i < typeArgumentCoders.size(); i++) { try { Coder<?> typeArgumentCoder = typeArgumentCoders.get(i); verifyCompatible( typeArgumentCoder, candidateDescriptor.resolveType(typeArguments[i]).getType()); } catch (IncompatibleCoderException exn) { throw new IncompatibleCoderException( String.format( "Cannot encode elements of type %s with coder %s" + " because some component coder is incompatible", candidateType, coder), coder, candidateType, exn); } } } }
@Test public void testTooManyCoderArguments() throws Exception { thrown.expect(IncompatibleCoderException.class); thrown.expectMessage("type parameters"); thrown.expectMessage("less than the number of coder arguments"); CoderRegistry.verifyCompatible( new TooManyComponentCoders<>(BigEndianIntegerCoder.of()), List.class); }
static Point map(MeasurementInfo info, Instant timestamp, Gauge<?> gauge) { Point.Builder builder = builder(info, timestamp); Object value = gauge.getValue(); if (value instanceof Number) { builder.addField("value", (Number) value); } else { builder.addField("value", String.valueOf(value)); } return builder.build(); }
@Test void testMapCounter() { Counter counter = new SimpleCounter(); counter.inc(42L); verifyPoint(MetricMapper.map(INFO, TIMESTAMP, counter), "count=42"); }
@Udf public final <T> T nullIf( @UdfParameter(description = "expression 1") final T expr1, @UdfParameter(description = "expression 2") final T expr2 ) { if (expr1 == null) { return null; } if (expr1.equals(expr2)) { return null; } else { return expr1; } }
@Test public void shouldReturnValue1IfValue1IsNotNullAndValue2IsNull(){ assertThat(udf.nullIf("a", null), is("a")); }
@Override public List<KsqlPartitionLocation> locate( final List<KsqlKey> keys, final RoutingOptions routingOptions, final RoutingFilterFactory routingFilterFactory, final boolean isRangeScan ) { if (isRangeScan && keys.isEmpty()) { throw new IllegalStateException("Query is range scan but found no range keys."); } final ImmutableList.Builder<KsqlPartitionLocation> partitionLocations = ImmutableList.builder(); final Set<Integer> filterPartitions = routingOptions.getPartitions(); final Optional<Set<KsqlKey>> keySet = keys.isEmpty() ? Optional.empty() : Optional.of(Sets.newHashSet(keys)); // Depending on whether this is a key-based lookup, determine which metadata method to use. // If we don't have keys, find the metadata for all partitions since we'll run the query for // all partitions of the state store rather than a particular one. //For issue #7174. Temporarily turn off metadata finding for a partition with keys //if there are more than one key. final List<PartitionMetadata> metadata; if (keys.size() == 1 && keys.get(0).getKey().size() == 1 && !isRangeScan) { metadata = getMetadataForKeys(keys, filterPartitions); } else { metadata = getMetadataForAllPartitions(filterPartitions, keySet); } if (metadata.isEmpty()) { final MaterializationException materializationException = new MaterializationException( "Cannot determine which host contains the required partitions to serve the pull query. \n" + "The underlying persistent query may be restarting (e.g. as a result of " + "ALTER SYSTEM) view the status of your by issuing <DESCRIBE foo>."); LOG.debug(materializationException.getMessage()); throw materializationException; } // Go through the metadata and group them by partition. for (PartitionMetadata partitionMetadata : metadata) { LOG.debug("Handling pull query for partition {} of state store {}.", partitionMetadata.getPartition(), storeName); final HostInfo activeHost = partitionMetadata.getActiveHost(); final Set<HostInfo> standByHosts = partitionMetadata.getStandbyHosts(); final int partition = partitionMetadata.getPartition(); final Optional<Set<KsqlKey>> partitionKeys = partitionMetadata.getKeys(); LOG.debug("Active host {}, standby {}, partition {}.", activeHost, standByHosts, partition); // For a given partition, find the ordered, filtered list of hosts to consider final List<KsqlNode> filteredHosts = getFilteredHosts(routingOptions, routingFilterFactory, activeHost, standByHosts, partition); partitionLocations.add(new PartitionLocation(partitionKeys, partition, filteredHosts)); } return partitionLocations.build(); }
@Test public void shouldReturnOwnerIfKnown() { // Given: getActiveAndStandbyMetadata(); // When: final List<KsqlPartitionLocation> result = locator.locate(ImmutableList.of(KEY), routingOptions, routingFilterFactoryActive, false); // Then: List<KsqlNode> nodeList = result.get(0).getNodes(); final Optional<URI> url = nodeList.stream().findFirst().map(KsqlNode::location); assertThat(url.map(URI::getScheme), is(Optional.of(LOCAL_HOST_URL.getProtocol()))); assertThat(url.map(URI::getHost), is(Optional.of(ACTIVE_HOST.host()))); assertThat(url.map(URI::getPort), is(Optional.of(ACTIVE_HOST.port()))); assertThat(url.map(URI::getPath), is(Optional.of("/"))); }
protected File initRootProjectWorkDir(File baseDir, Map<String, String> rootProperties) { String workDir = rootProperties.get(CoreProperties.WORKING_DIRECTORY); if (StringUtils.isBlank(workDir)) { return new File(baseDir, CoreProperties.WORKING_DIRECTORY_DEFAULT_VALUE); } File customWorkDir = new File(workDir); if (customWorkDir.isAbsolute()) { return customWorkDir; } return new File(baseDir, customWorkDir.getPath()); }
@Test public void shouldInitWorkDirWithCustomRelativeFolder() { Map<String, String> props = singletonMap("sonar.working.directory", ".foo"); ProjectReactorBuilder builder = new ProjectReactorBuilder(new ScannerProperties(props), mock(AnalysisWarnings.class)); File baseDir = new File("target/tmp/baseDir"); File workDir = builder.initRootProjectWorkDir(baseDir, props); assertThat(workDir).isEqualTo(new File(baseDir, ".foo")); }
@Override @Transactional public OAuth2AccessTokenDO createAccessToken(Long userId, Integer userType, String clientId, List<String> scopes) { OAuth2ClientDO clientDO = oauth2ClientService.validOAuthClientFromCache(clientId); // 创建刷新令牌 OAuth2RefreshTokenDO refreshTokenDO = createOAuth2RefreshToken(userId, userType, clientDO, scopes); // 创建访问令牌 return createOAuth2AccessToken(refreshTokenDO, clientDO); }
@Test public void testCreateAccessToken() { TenantContextHolder.setTenantId(0L); // 准备参数 Long userId = randomLongId(); Integer userType = UserTypeEnum.ADMIN.getValue(); String clientId = randomString(); List<String> scopes = Lists.newArrayList("read", "write"); // mock 方法 OAuth2ClientDO clientDO = randomPojo(OAuth2ClientDO.class).setClientId(clientId) .setAccessTokenValiditySeconds(30).setRefreshTokenValiditySeconds(60); when(oauth2ClientService.validOAuthClientFromCache(eq(clientId))).thenReturn(clientDO); // mock 数据(用户) AdminUserDO user = randomPojo(AdminUserDO.class); when(adminUserService.getUser(userId)).thenReturn(user); // 调用 OAuth2AccessTokenDO accessTokenDO = oauth2TokenService.createAccessToken(userId, userType, clientId, scopes); // 断言访问令牌 OAuth2AccessTokenDO dbAccessTokenDO = oauth2AccessTokenMapper.selectByAccessToken(accessTokenDO.getAccessToken()); assertPojoEquals(accessTokenDO, dbAccessTokenDO, "createTime", "updateTime", "deleted"); assertEquals(userId, accessTokenDO.getUserId()); assertEquals(userType, accessTokenDO.getUserType()); assertEquals(2, accessTokenDO.getUserInfo().size()); assertEquals(user.getNickname(), accessTokenDO.getUserInfo().get("nickname")); assertEquals(user.getDeptId().toString(), accessTokenDO.getUserInfo().get("deptId")); assertEquals(clientId, accessTokenDO.getClientId()); assertEquals(scopes, accessTokenDO.getScopes()); assertFalse(DateUtils.isExpired(accessTokenDO.getExpiresTime())); // 断言访问令牌的缓存 OAuth2AccessTokenDO redisAccessTokenDO = oauth2AccessTokenRedisDAO.get(accessTokenDO.getAccessToken()); assertPojoEquals(accessTokenDO, redisAccessTokenDO, "createTime", "updateTime", "deleted"); // 断言刷新令牌 OAuth2RefreshTokenDO refreshTokenDO = oauth2RefreshTokenMapper.selectList().get(0); assertPojoEquals(accessTokenDO, refreshTokenDO, "id", "expiresTime", "createTime", "updateTime", "deleted"); assertFalse(DateUtils.isExpired(refreshTokenDO.getExpiresTime())); }
@Override public WatchKey register(final Watchable folder, final WatchEvent.Kind<?>[] events, final WatchEvent.Modifier... modifiers) throws IOException { if(log.isInfoEnabled()) { log.info(String.format("Register file %s for events %s", folder, Arrays.toString(events))); } final Pointer[] values = { CFStringRef.toCFString(folder.toString()).getPointer()}; final MacOSXWatchKey key = new MacOSXWatchKey(folder, this, events); final double latency = 1.0; // Latency in seconds final Map<File, Long> timestamps = createLastModifiedMap(new File(folder.toString())); final FSEvents.FSEventStreamCallback callback = new Callback(key, timestamps); final FSEventStreamRef stream = library.FSEventStreamCreate( Pointer.NULL, callback, Pointer.NULL, library.CFArrayCreate(null, values, CFIndex.valueOf(1), null), -1, latency, kFSEventStreamCreateFlagNoDefer); final CountDownLatch lock = new CountDownLatch(1); final CFRunLoop loop = new CFRunLoop(lock, stream); threadFactory.newThread(loop).start(); Uninterruptibles.awaitUninterruptibly(lock); loops.put(key, loop); callbacks.put(key, callback); return key; }
@Test @Ignore public void testListener() throws Exception { final FileWatcher watcher = new FileWatcher(new FSEventWatchService()); final Local file = LocalFactory.get(LocalFactory.get(System.getProperty("java.io.tmpdir")), String.format("é%s", new AlphanumericRandomStringService().random())); final CountDownLatch update = new CountDownLatch(1); final CountDownLatch delete = new CountDownLatch(1); final FileWatcherListener listener = new DisabledFileWatcherListener() { @Override public void fileWritten(final Local file) { try { assertEquals(new File(file.getAbsolute()).getCanonicalPath(), new File(file.getAbsolute()).getCanonicalPath()); } catch(IOException e) { fail(); } update.countDown(); } @Override public void fileDeleted(final Local file) { try { assertEquals(new File(file.getAbsolute()).getCanonicalPath(), new File(file.getAbsolute()).getCanonicalPath()); } catch(IOException e) { fail(); } delete.countDown(); } }; LocalTouchFactory.get().touch(file); assertTrue(file.exists()); assertTrue(watcher.register(file.getParent(), new FileWatcher.DefaultFileFilter(file), listener).await(1, TimeUnit.SECONDS)); final ProcessBuilder sh = new ProcessBuilder("sh", "-c", String.format("echo 'Test' >> %s", file.getAbsolute())); final Process cat = sh.start(); assertTrue(cat.waitFor(5L, TimeUnit.SECONDS)); assertTrue(update.await(5L, TimeUnit.SECONDS)); file.delete(); assertTrue(delete.await(5L, TimeUnit.SECONDS)); watcher.close(); }
@Override public K getKey() { if (keyObject == null) { keyObject = serializationService.toObject(keyData); } return keyObject; }
@Override @Test public void getKey_caching() { QueryableEntry entry = createEntry("key", "value"); assertSame(entry.getKey(), entry.getKey()); }
public static KTableHolder<GenericKey> build( final KGroupedStreamHolder groupedStream, final StreamAggregate aggregate, final RuntimeBuildContext buildContext, final MaterializedFactory materializedFactory) { return build( groupedStream, aggregate, buildContext, materializedFactory, new AggregateParamsFactory() ); }
@Test @SuppressFBWarnings("RV_RETURN_VALUE_IGNORED_NO_SIDE_EFFECT") public void shouldBuildHoppingWindowedAggregateCorrectly() { // Given: givenHoppingWindowedAggregate(); // When: final KTableHolder<Windowed<GenericKey>> result = windowedAggregate.build(planBuilder, planInfo); // Then: assertThat(result.getTable(), is(windowedWithWindowBounds)); verify(gracePeriodClause).toDuration(); verify(retentionClause).toDuration(); final InOrder inOrder = Mockito.inOrder( groupedStream, timeWindowedStream, windowed, windowedWithResults, windowedWithWindowBounds ); inOrder.verify(groupedStream).windowedBy(TimeWindows.ofSizeAndGrace(WINDOW, gracePeriodClause.toDuration()).advanceBy(HOP)); inOrder.verify(timeWindowedStream).aggregate(initializer, aggregator, timeWindowMaterialized); inOrder.verify(windowed).transformValues(any(), any(Named.class)); inOrder.verify(windowedWithResults).transformValues(any(), any(Named.class)); inOrder.verifyNoMoreInteractions(); assertThat(result.getTable(), is(windowedWithWindowBounds)); }
@Override public Host filter(final KsqlHostInfo host) { if (!heartbeatAgent.isPresent()) { return Host.include(host); } final Map<KsqlHostInfo, HostStatus> allHostsStatus = heartbeatAgent.get().getHostsStatus(); final HostStatus status = allHostsStatus.get(host); if (status == null) { return Host.include(host); } if (status.isHostAlive()) { return Host.include(host); } else { return Host.exclude(host, "Host is not alive as of time " + status.getLastStatusUpdateMs()); } }
@Test public void shouldFilterActiveAlive() { // Given: allHostsStatus = ImmutableMap.of( activeHost, HOST_ALIVE, standByHost1, HOST_DEAD, standByHost2, HOST_DEAD ); when(heartbeatAgent.getHostsStatus()).thenReturn(allHostsStatus); // When: final Host filterActive = livenessFilter.filter(activeHost); final Host filterStandby1 = livenessFilter.filter(standByHost1); final Host filterStandby2 = livenessFilter.filter(standByHost2); // Then: assertThat(filterActive.isSelected(), is(true)); assertThat(filterStandby1.isSelected(), is(false)); assertThat(filterStandby1.getReasonNotSelected(), is("Host is not alive as of time 0")); assertThat(filterStandby2.isSelected(), is(false)); assertThat(filterStandby2.getReasonNotSelected(), is("Host is not alive as of time 0")); }
public static void preserve(FileSystem targetFS, Path path, CopyListingFileStatus srcFileStatus, EnumSet<FileAttribute> attributes, boolean preserveRawXattrs) throws IOException { // strip out those attributes we don't need any more attributes.remove(FileAttribute.BLOCKSIZE); attributes.remove(FileAttribute.CHECKSUMTYPE); // If not preserving anything from FileStatus, don't bother fetching it. FileStatus targetFileStatus = attributes.isEmpty() ? null : targetFS.getFileStatus(path); String group = targetFileStatus == null ? null : targetFileStatus.getGroup(); String user = targetFileStatus == null ? null : targetFileStatus.getOwner(); boolean chown = false; if (attributes.contains(FileAttribute.ACL)) { List<AclEntry> srcAcl = srcFileStatus.getAclEntries(); List<AclEntry> targetAcl = getAcl(targetFS, targetFileStatus); if (!srcAcl.equals(targetAcl)) { targetFS.removeAcl(path); targetFS.setAcl(path, srcAcl); } // setAcl doesn't preserve sticky bit, so also call setPermission if needed. if (srcFileStatus.getPermission().getStickyBit() != targetFileStatus.getPermission().getStickyBit()) { targetFS.setPermission(path, srcFileStatus.getPermission()); } } else if (attributes.contains(FileAttribute.PERMISSION) && !srcFileStatus.getPermission().equals(targetFileStatus.getPermission())) { targetFS.setPermission(path, srcFileStatus.getPermission()); } final boolean preserveXAttrs = attributes.contains(FileAttribute.XATTR); if (preserveXAttrs || preserveRawXattrs) { final String rawNS = StringUtils.toLowerCase(XAttr.NameSpace.RAW.name()); Map<String, byte[]> srcXAttrs = srcFileStatus.getXAttrs(); Map<String, byte[]> targetXAttrs = getXAttrs(targetFS, path); if (srcXAttrs != null && !srcXAttrs.equals(targetXAttrs)) { for (Entry<String, byte[]> entry : srcXAttrs.entrySet()) { String xattrName = entry.getKey(); if (xattrName.startsWith(rawNS) || preserveXAttrs) { targetFS.setXAttr(path, xattrName, entry.getValue()); } } } } // The replication factor can only be preserved for replicated files. // It is ignored when either the source or target file are erasure coded. if (attributes.contains(FileAttribute.REPLICATION) && !targetFileStatus.isDirectory() && !targetFileStatus.isErasureCoded() && !srcFileStatus.isErasureCoded() && srcFileStatus.getReplication() != targetFileStatus.getReplication()) { targetFS.setReplication(path, srcFileStatus.getReplication()); } if (attributes.contains(FileAttribute.GROUP) && !group.equals(srcFileStatus.getGroup())) { group = srcFileStatus.getGroup(); chown = true; } if (attributes.contains(FileAttribute.USER) && !user.equals(srcFileStatus.getOwner())) { user = srcFileStatus.getOwner(); chown = true; } if (chown) { targetFS.setOwner(path, user, group); } if (attributes.contains(FileAttribute.TIMES)) { targetFS.setTimes(path, srcFileStatus.getModificationTime(), srcFileStatus.getAccessTime()); } }
@Test public void testPreservePermissionOnFile() throws IOException { FileSystem fs = FileSystem.get(config); EnumSet<FileAttribute> attributes = EnumSet.of(FileAttribute.PERMISSION); Path dst = new Path("/tmp/dest2"); Path src = new Path("/tmp/src2"); createFile(fs, src); createFile(fs, dst); fs.setPermission(src, fullPerm); fs.setOwner(src, "somebody", "somebody-group"); fs.setTimes(src, 0, 0); fs.setReplication(src, (short) 1); fs.setPermission(dst, noPerm); fs.setOwner(dst, "nobody", "nobody-group"); fs.setTimes(dst, 100, 100); fs.setReplication(dst, (short) 2); CopyListingFileStatus srcStatus = new CopyListingFileStatus(fs.getFileStatus(src)); DistCpUtils.preserve(fs, dst, srcStatus, attributes, false); CopyListingFileStatus dstStatus = new CopyListingFileStatus(fs.getFileStatus(dst)); // FileStatus.equals only compares path field, must explicitly compare all fields Assert.assertTrue(srcStatus.getPermission().equals(dstStatus.getPermission())); Assert.assertFalse(srcStatus.getOwner().equals(dstStatus.getOwner())); Assert.assertFalse(srcStatus.getGroup().equals(dstStatus.getGroup())); Assert.assertFalse(srcStatus.getAccessTime() == dstStatus.getAccessTime()); Assert.assertFalse(srcStatus.getModificationTime() == dstStatus.getModificationTime()); Assert.assertFalse(srcStatus.getReplication() == dstStatus.getReplication()); }
public static List<String> splitStatementsAcrossBlocks(CharSequence string) { List<String> statements = codeAwareSplitOnChar(string, false, true, ';', '\n', '{', '}'); return statements.stream() .filter(stmt -> !(stmt.isEmpty())) .filter(stmt -> !(stmt.startsWith("//"))) .collect(Collectors.toList()); }
@Test public void splitStatementsAcrossBlocksDoWhile() { String text = "do {\n" + " $fact.value1 = 2;\n" + " drools.update($fact);\n" + "} while (false);"; List<String> statements = splitStatementsAcrossBlocks(text); assertThat(statements.get(0)).isEqualTo("do"); assertThat(statements.get(1)).isEqualTo("$fact.value1 = 2"); assertThat(statements.get(2)).isEqualTo("drools.update($fact)"); assertThat(statements.get(3)).isEqualTo("while (false)"); }
static MetricsConfig create(String prefix) { return loadFirst(prefix, "hadoop-metrics2-" + StringUtils.toLowerCase(prefix) + ".properties", DEFAULT_FILE_NAME); }
@Test public void testDelimiterConf() { String filename = getTestFilename("test-metrics2-delimiter"); new ConfigBuilder().add("p1.foo", "p1foo1,p1foo2,p1foo3").save(filename); MetricsConfig mc = MetricsConfig.create("p1", filename); Configuration expected = new ConfigBuilder() .add("foo", "p1foo1") .add("foo", "p1foo2") .add("foo", "p1foo3") .config; assertEq(expected, mc); }
public static long[] cityHash128(byte[] data) { return CityHash.hash128(data).getLongArray(); }
@Test public void cityHash128Test(){ String s="Google发布的Hash计算算法:CityHash64 与 CityHash128"; final long[] hash = HashUtil.cityHash128(StrUtil.utf8Bytes(s)); assertEquals(0x5944f1e788a18db0L, hash[0]); assertEquals(0xc2f68d8b2bf4a5cfL, hash[1]); }
public String getString(String path) { Object object = get(path); return convertObjectTo(object, String.class); }
@Test public void xmlPathWorksWithSoap() throws Exception { // Given String soap = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n" + "<env:Envelope \n" + " xmlns:soapenc=\"http://schemas.xmlsoap.org/soap/encoding/\" \n" + " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\" \n" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\" \n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n" + " <env:Header/>\n" + "\n" + "<env:Body>\n" + " <n1:importProjectResponse \n" + " xmlns:n1=\"n1\" \n" + " xmlns:n2=\"n2\" \n" + " xsi:type=\"n2:ArrayOfProjectImportResultCode\">\n" + " <n2:ProjectImportResultCode>\n" + " <n2:code>1</n2:code>\n" + " <n2:message>Project 'test1' import was successful.</n2:message>\n" + " </n2:ProjectImportResultCode>\n" + " </n1:importProjectResponse>\n" + "</env:Body></env:Envelope>"; // When XmlPath xmlPath = new XmlPath(soap); // Then assertThat(xmlPath.getString("Envelope.Body.importProjectResponse.ProjectImportResultCode.code"), equalTo("1")); }
@Override public T deserialize(final String topic, final byte[] bytes) { try { if (bytes == null) { return null; } // don't use the JsonSchemaConverter to read this data because // we require that the MAPPER enables USE_BIG_DECIMAL_FOR_FLOATS, // which is not currently available in the standard converters final JsonNode value = isJsonSchema ? JsonSerdeUtils.readJsonSR(bytes, MAPPER, JsonNode.class) : MAPPER.readTree(bytes); final Object coerced = enforceFieldType( "$", new JsonValueContext(value, schema) ); if (LOG.isTraceEnabled()) { LOG.trace("Deserialized {}. topic:{}, row:{}", target, topic, coerced); } return SerdeUtils.castToTargetType(coerced, targetType); } catch (final Exception e) { // Clear location in order to avoid logging data, for security reasons if (e instanceof JsonParseException) { ((JsonParseException) e).clearLocation(); } throw new SerializationException( "Failed to deserialize " + target + " from topic: " + topic + ". " + e.getMessage(), e); } }
@Test public void shouldThrowIfNotAnArray() { // Given: final KsqlJsonDeserializer<List> deserializer = givenDeserializerForSchema( SchemaBuilder .array(Schema.OPTIONAL_STRING_SCHEMA) .build(), List.class ); final byte[] bytes = serializeJson(BooleanNode.valueOf(true)); // When: final Exception e = assertThrows( SerializationException.class, () -> deserializer.deserialize(SOME_TOPIC, bytes) ); // Then: assertThat(e.getCause(), (hasMessage(startsWith( "Can't convert type. sourceType: BooleanNode, requiredType: ARRAY<VARCHAR>")))); }
@Override public Mono<Void> execute(final ServerWebExchange exchange, final ShenyuPluginChain chain) { initCacheConfig(); final String pluginName = named(); PluginData pluginData = BaseDataCache.getInstance().obtainPluginData(pluginName); // early exit if (Objects.isNull(pluginData) || !pluginData.getEnabled()) { return chain.execute(exchange); } final String path = getRawPath(exchange); List<SelectorData> selectors = BaseDataCache.getInstance().obtainSelectorData(pluginName); if (CollectionUtils.isEmpty(selectors)) { return handleSelectorIfNull(pluginName, exchange, chain); } SelectorData selectorData = obtainSelectorDataCacheIfEnabled(path); // handle Selector if (Objects.nonNull(selectorData) && StringUtils.isBlank(selectorData.getId())) { return handleSelectorIfNull(pluginName, exchange, chain); } if (Objects.isNull(selectorData)) { selectorData = trieMatchSelector(exchange, pluginName, path); if (Objects.isNull(selectorData)) { selectorData = defaultMatchSelector(exchange, selectors, path); if (Objects.isNull(selectorData)) { return handleSelectorIfNull(pluginName, exchange, chain); } } } printLog(selectorData, pluginName); if (!selectorData.getContinued()) { // if continued, not match rules return doExecute(exchange, chain, selectorData, defaultRuleData(selectorData)); } List<RuleData> rules = BaseDataCache.getInstance().obtainRuleData(selectorData.getId()); if (CollectionUtils.isEmpty(rules)) { return handleRuleIfNull(pluginName, exchange, chain); } if (selectorData.getType() == SelectorTypeEnum.FULL_FLOW.getCode()) { //get last RuleData rule = rules.get(rules.size() - 1); printLog(rule, pluginName); return doExecute(exchange, chain, selectorData, rule); } // lru map as L1 cache,the cache is enabled by default. // if the L1 cache fails to hit, using L2 cache based on trie cache. // if the L2 cache fails to hit, execute default strategy. RuleData ruleData = obtainRuleDataCacheIfEnabled(path); if (Objects.nonNull(ruleData) && Objects.isNull(ruleData.getId())) { return handleRuleIfNull(pluginName, exchange, chain); } if (Objects.isNull(ruleData)) { // L1 cache not exist data, try to get data through trie cache ruleData = trieMatchRule(exchange, selectorData, path); // trie cache fails to hit, execute default strategy if (Objects.isNull(ruleData)) { ruleData = defaultMatchRule(exchange, rules, path); if (Objects.isNull(ruleData)) { return handleRuleIfNull(pluginName, exchange, chain); } } } printLog(ruleData, pluginName); return doExecute(exchange, chain, selectorData, ruleData); }
@Test public void executeRuleIsNullTest() { List<ConditionData> conditionDataList = Collections.singletonList(conditionData); this.selectorData.setMatchMode(0); this.selectorData.setLogged(true); this.selectorData.setMatchRestful(false); this.selectorData.setConditionList(conditionDataList); BaseDataCache.getInstance().cachePluginData(pluginData); BaseDataCache.getInstance().cacheSelectData(selectorData); StepVerifier.create(testShenyuPlugin.execute(exchange, shenyuPluginChain)).expectSubscription().verifyComplete(); verify(shenyuPluginChain).execute(exchange); }
public static PDImageXObject createFromImage(PDDocument document, BufferedImage image) throws IOException { if (isGrayImage(image)) { return createFromGrayImage(image, document); } // We try to encode the image with predictor if (USE_PREDICTOR_ENCODER) { PDImageXObject pdImageXObject = new PredictorEncoder(document, image).encode(); if (pdImageXObject != null) { if (pdImageXObject.getColorSpace() == PDDeviceRGB.INSTANCE && pdImageXObject.getBitsPerComponent() < 16 && image.getWidth() * image.getHeight() <= 50 * 50) { // also create classic compressed image, compare sizes PDImageXObject pdImageXObjectClassic = createFromRGBImage(image, document); if (pdImageXObjectClassic.getCOSObject().getLength() < pdImageXObject.getCOSObject().getLength()) { pdImageXObject.getCOSObject().close(); return pdImageXObjectClassic; } else { pdImageXObjectClassic.getCOSObject().close(); } } return pdImageXObject; } } // Fallback: We export the image as 8-bit sRGB and might lose color information return createFromRGBImage(image, document); }
@Test void testCreateLosslessFromTransparent1BitGIF() throws IOException { PDDocument document = new PDDocument(); BufferedImage image = ImageIO.read(this.getClass().getResourceAsStream("gif-1bit-transparent.gif")); assertEquals(Transparency.BITMASK, image.getColorModel().getTransparency()); assertEquals(BufferedImage.TYPE_BYTE_BINARY, image.getType()); PDImageXObject ximage = LosslessFactory.createFromImage(document, image); int w = image.getWidth(); int h = image.getHeight(); validate(ximage, 8, w, h, "png", PDDeviceRGB.INSTANCE.getName()); checkIdent(image, ximage.getImage()); checkIdentRGB(image, ximage.getOpaqueImage(null, 1)); assertNotNull(ximage.getSoftMask()); validate(ximage.getSoftMask(), 1, w, h, "png", PDDeviceGray.INSTANCE.getName()); assertEquals(2, colorCount(ximage.getSoftMask().getImage())); doWritePDF(document, ximage, TESTRESULTSDIR, "gif-1bit-transparent.pdf"); }
public static ValueSelector valueOf(String s) { int fromIndex = s.lastIndexOf("DicomAttribute"); try { return new ValueSelector(AttributeSelector.valueOf(s), AttributeSelector.selectNumber(s, fromIndex) - 1); } catch (Exception e) { throw new IllegalArgumentException(s); } }
@Test public void testValueOf() { ValueSelector vs = ValueSelector.valueOf(XPATH); assertEquals(Tag.StudyInstanceUID, vs.tag()); assertNull(vs.privateCreator()); assertEquals(0, vs.valueIndex()); assertEquals(1, vs.level()); ItemPointer ip = vs.itemPointer(0); assertEquals(Tag.RequestAttributesSequence, ip.sequenceTag); assertNull(ip.privateCreator); assertEquals(0, ip.itemIndex); }
@Override public void write(InputT element, Context context) throws IOException, InterruptedException { while (bufferedRequestEntries.size() >= maxBufferedRequests) { flush(); } addEntryToBuffer(elementConverter.apply(element, context), false); nonBlockingFlush(); }
@Test public void testThatIntermittentlyFailingEntriesAreEnqueuedOnToTheBufferWithCorrectSize() throws IOException, InterruptedException { AsyncSinkWriterImpl sink = new AsyncSinkWriterImplBuilder() .context(sinkInitContext) .maxRecordSizeInBytes(110) .simulateFailures(true) .build(); sink.write(String.valueOf(225)); // Buffer: 100/110B; 1/10 elements; 0 inflight sink.write(String.valueOf(1)); // Buffer: 104/110B; 2/10 elements; 0 inflight sink.write(String.valueOf(2)); // Buffer: 108/110B; 3/10 elements; 0 inflight sink.write(String.valueOf(3)); // Buffer: 112/110B; 4/10 elements; 0 inflight -- flushing assertThat(res.size()) .isEqualTo(2); // Request was [225, 1, 2], element 225 failed on first attempt sink.write(String.valueOf(4)); // Buffer: 8/110B; 2/10 elements; 1 inflight sink.write(String.valueOf(5)); // Buffer: 12/110B; 3/10 elements; 1 inflight sink.write(String.valueOf(6)); // Buffer: 16/110B; 4/10 elements; 1 inflight sink.write(String.valueOf(325)); // Buffer: 116/110B; 5/10 elements; 1 inflight -- flushing // inflight request is processed, buffer: [225, 3, 4, 5, 6, 325] assertThat(res).isEqualTo(Arrays.asList(1, 2, 225, 3, 4)); // Buffer: [5, 6, 325]; 0 inflight }
public static Expression convert(Predicate[] predicates) { Expression expression = Expressions.alwaysTrue(); for (Predicate predicate : predicates) { Expression converted = convert(predicate); Preconditions.checkArgument( converted != null, "Cannot convert Spark predicate to Iceberg expression: %s", predicate); expression = Expressions.and(expression, converted); } return expression; }
@Test public void testEqualToNaN() { String col = "col"; NamedReference namedReference = FieldReference.apply(col); LiteralValue value = new LiteralValue(Float.NaN, DataTypes.FloatType); org.apache.spark.sql.connector.expressions.Expression[] attrAndValue = new org.apache.spark.sql.connector.expressions.Expression[] {namedReference, value}; org.apache.spark.sql.connector.expressions.Expression[] valueAndAttr = new org.apache.spark.sql.connector.expressions.Expression[] {value, namedReference}; Predicate eqNaN1 = new Predicate("=", attrAndValue); Expression expectedEqNaN = Expressions.isNaN(col); Expression actualEqNaN1 = SparkV2Filters.convert(eqNaN1); assertThat(actualEqNaN1.toString()).isEqualTo(expectedEqNaN.toString()); Predicate eqNaN2 = new Predicate("=", valueAndAttr); Expression actualEqNaN2 = SparkV2Filters.convert(eqNaN2); assertThat(actualEqNaN2.toString()).isEqualTo(expectedEqNaN.toString()); }
public static Predicate<MetricDto> isOptimizedForBestValue() { return m -> m != null && m.isOptimizedBestValue() && m.getBestValue() != null; }
@Test void isOptimizedForBestValue_is_false_when_no_best_value() { metric = new MetricDto() .setBestValue(null) .setOptimizedBestValue(true); boolean result = MetricDtoFunctions.isOptimizedForBestValue().test(metric); assertThat(result).isFalse(); }
@Override public Object getSingleValue() { if (!isSingleValue()) { throw new IllegalStateException("SortedRangeSet does not have just a single value"); } return lowIndexedRanges.values().iterator().next().getSingleValue(); }
@Test public void testGetSingleValue() { assertEquals(SortedRangeSet.of(BIGINT, 0L).getSingleValue(), 0L); assertThrows(IllegalStateException.class, () -> SortedRangeSet.all(BIGINT).getSingleValue()); }
public static Set<String> getFieldsForRecordExtractor(@Nullable IngestionConfig ingestionConfig, Schema schema) { Set<String> fieldsForRecordExtractor = new HashSet<>(); if (null != ingestionConfig && (null != ingestionConfig.getSchemaConformingTransformerConfig() || null != ingestionConfig.getSchemaConformingTransformerV2Config())) { // The SchemaConformingTransformer requires that all fields are extracted, indicated by returning an empty set // here. Compared to extracting the fields specified below, extracting all fields should be a superset. return fieldsForRecordExtractor; } extractFieldsFromIngestionConfig(ingestionConfig, fieldsForRecordExtractor); extractFieldsFromSchema(schema, fieldsForRecordExtractor); fieldsForRecordExtractor = getFieldsToReadWithComplexType(fieldsForRecordExtractor, ingestionConfig); return fieldsForRecordExtractor; }
@Test public void testExtractFieldsIngestionConfig() { Schema schema = new Schema(); // filter config IngestionConfig ingestionConfig = new IngestionConfig(); ingestionConfig.setFilterConfig(new FilterConfig("Groovy({x > 100}, x)")); Set<String> fields = IngestionUtils.getFieldsForRecordExtractor(ingestionConfig, schema); Assert.assertEquals(fields.size(), 1); Assert.assertTrue(fields.containsAll(Sets.newHashSet("x"))); schema.addField(new DimensionFieldSpec("y", FieldSpec.DataType.STRING, true)); fields = IngestionUtils.getFieldsForRecordExtractor(ingestionConfig, schema); Assert.assertEquals(fields.size(), 2); Assert.assertTrue(fields.containsAll(Sets.newHashSet("x", "y"))); // transform configs schema = new Schema.SchemaBuilder().addSingleValueDimension("d1", FieldSpec.DataType.STRING).build(); ingestionConfig = new IngestionConfig(); ingestionConfig.setTransformConfigs( Collections.singletonList(new TransformConfig("d1", "Groovy({function}, argument1, argument2)"))); List<String> extract = new ArrayList<>(IngestionUtils.getFieldsForRecordExtractor(ingestionConfig, schema)); Assert.assertEquals(extract.size(), 3); Assert.assertTrue(extract.containsAll(Arrays.asList("d1", "argument1", "argument2"))); // groovy function, no arguments ingestionConfig.setTransformConfigs(Collections.singletonList(new TransformConfig("d1", "Groovy({function})"))); extract = new ArrayList<>(IngestionUtils.getFieldsForRecordExtractor(ingestionConfig, schema)); Assert.assertEquals(extract.size(), 1); Assert.assertTrue(extract.contains("d1")); // inbuilt functions schema = new Schema.SchemaBuilder().addSingleValueDimension("hoursSinceEpoch", FieldSpec.DataType.LONG).build(); ingestionConfig.setTransformConfigs( Collections.singletonList(new TransformConfig("hoursSinceEpoch", "toEpochHours(timestampColumn)"))); extract = new ArrayList<>(IngestionUtils.getFieldsForRecordExtractor(ingestionConfig, schema)); Assert.assertEquals(extract.size(), 2); Assert.assertTrue(extract.containsAll(Arrays.asList("timestampColumn", "hoursSinceEpoch"))); // inbuilt functions with literal schema = new Schema.SchemaBuilder().addSingleValueDimension("tenMinutesSinceEpoch", FieldSpec.DataType.LONG).build(); ingestionConfig.setTransformConfigs(Collections.singletonList( new TransformConfig("tenMinutesSinceEpoch", "toEpochMinutesBucket(timestampColumn, 10)"))); extract = new ArrayList<>(IngestionUtils.getFieldsForRecordExtractor(ingestionConfig, schema)); Assert.assertEquals(extract.size(), 2); Assert.assertTrue(extract.containsAll(Lists.newArrayList("tenMinutesSinceEpoch", "timestampColumn"))); // inbuilt functions on DateTimeFieldSpec schema = new Schema.SchemaBuilder().addDateTime("dateColumn", FieldSpec.DataType.STRING, "1:DAYS:SIMPLE_DATE_FORMAT:yyyy-MM-dd", "1:DAYS").build(); ingestionConfig.setTransformConfigs( Collections.singletonList(new TransformConfig("dateColumn", "toDateTime(timestampColumn, 'yyyy-MM-dd')"))); extract = new ArrayList<>(IngestionUtils.getFieldsForRecordExtractor(ingestionConfig, schema)); Assert.assertEquals(extract.size(), 2); Assert.assertTrue(extract.containsAll(Lists.newArrayList("dateColumn", "timestampColumn"))); // filter + transform configs + schema fields + schema transform schema = new Schema.SchemaBuilder().addSingleValueDimension("d1", FieldSpec.DataType.STRING) .addSingleValueDimension("d2", FieldSpec.DataType.STRING).addMetric("m1", FieldSpec.DataType.INT) .addDateTime("dateColumn", FieldSpec.DataType.STRING, "1:DAYS:SIMPLE_DATE_FORMAT:yyyy-MM-dd", "1:DAYS").build(); schema.getFieldSpecFor("d2").setTransformFunction("reverse(xy)"); ingestionConfig.setFilterConfig(new FilterConfig("Groovy({d1 == \"10\"}, d1)")); extract = new ArrayList<>(IngestionUtils.getFieldsForRecordExtractor(ingestionConfig, schema)); Assert.assertEquals(extract.size(), 6); Assert.assertTrue(extract.containsAll(Lists.newArrayList("d1", "d2", "m1", "dateColumn", "xy", "timestampColumn"))); // filter + transform configs + schema fields + schema transform + complex type configs schema = new Schema.SchemaBuilder().addSingleValueDimension("d1", FieldSpec.DataType.STRING) .addSingleValueDimension("d2", FieldSpec.DataType.STRING).addMetric("m1", FieldSpec.DataType.INT) .addDateTime("dateColumn", FieldSpec.DataType.STRING, "1:DAYS:SIMPLE_DATE_FORMAT:yyyy-MM-dd", "1:DAYS").build(); schema.getFieldSpecFor("d2").setTransformFunction("reverse(xy)"); ingestionConfig.setComplexTypeConfig(new ComplexTypeConfig(Arrays.asList("before.test", "after.test"), ".", ComplexTypeConfig.CollectionNotUnnestedToJson.NON_PRIMITIVE, Collections.singletonMap("before", "after"))); extract = new ArrayList<>(IngestionUtils.getFieldsForRecordExtractor(ingestionConfig, schema)); Assert.assertEquals(extract.size(), 8); List<String> expectedColumns = Arrays.asList("d1", "d2", "m1", "dateColumn", "xy", "timestampColumn", "before", "after"); Assert.assertTrue(extract.containsAll(expectedColumns)); }
@Override public int readFile(final ByteBufferCollector metaBufferCollector, final String fileName, final long offset, final long maxCount) throws IOException, RetryAgainException { // read the whole meta file. if (fileName.equals(Snapshot.JRAFT_SNAPSHOT_META_FILE)) { final ByteBuffer metaBuf = this.metaTable.saveToByteBufferAsRemote(); // because bufRef will flip the buffer before using, so we must set the meta buffer position to it's limit. BufferUtils.position(metaBuf, metaBuf.limit()); metaBufferCollector.setBuffer(metaBuf); return EOF; } final LocalFileMeta fileMeta = this.metaTable.getFileMeta(fileName); if (fileMeta == null) { throw new FileNotFoundException("LocalFileMeta not found for " + fileName); } // go through throttle long newMaxCount = maxCount; if (this.snapshotThrottle != null) { newMaxCount = this.snapshotThrottle.throttledByThroughput(maxCount); if (newMaxCount < maxCount) { // if it's not allowed to read partly or it's allowed but // throughput is throttled to 0, try again. if (newMaxCount == 0) { throw new RetryAgainException("readFile throttled by throughput"); } } } return readFileWithMeta(metaBufferCollector, fileName, fileMeta, offset, newMaxCount); }
@Test public void testReadFile() throws Exception { final ByteBufferCollector bufRef = ByteBufferCollector.allocate(); try { this.reader.readFile(bufRef, "unfound", 0, 1024); fail(); } catch (final FileNotFoundException e) { } final String data = writeData(); addDataMeta(); final int read = this.reader.readFile(bufRef, "data", 0, 1024); assertEquals(-1, read); final ByteBuffer buf = bufRef.getBuffer(); BufferUtils.flip(buf); assertEquals(data.length(), buf.remaining()); final byte[] bs = new byte[data.length()]; buf.get(bs); assertEquals(data, new String(bs)); }
@Override public void prepareContainer(ContainerRuntimeContext ctx) throws ContainerExecutionException { @SuppressWarnings("unchecked") List<String> localDirs = ctx.getExecutionAttribute(CONTAINER_LOCAL_DIRS); @SuppressWarnings("unchecked") Map<org.apache.hadoop.fs.Path, List<String>> resources = ctx.getExecutionAttribute(LOCALIZED_RESOURCES); @SuppressWarnings("unchecked") List<String> commands = ctx.getExecutionAttribute(CONTAINER_RUN_CMDS); Map<String, String> env = ctx.getContainer().getLaunchContext().getEnvironment(); String username = ctx.getExecutionAttribute(USER); if(!isSandboxContainerWhitelisted(username, commands)) { String tmpDirBase = configuration.get("hadoop.tmp.dir"); if (tmpDirBase == null) { throw new ContainerExecutionException("hadoop.tmp.dir not set!"); } try { String containerID = ctx.getExecutionAttribute(CONTAINER_ID_STR); initializePolicyDir(); List<String> groupPolicyFiles = getGroupPolicyFiles(configuration, ctx.getExecutionAttribute(USER)); Path policyFilePath = Files.createFile( Paths.get(policyFileDir.toString(), containerID + "-" + NMContainerPolicyUtils.POLICY_FILE), POLICY_ATTR); try(OutputStream policyOutputStream = Files.newOutputStream(policyFilePath)) { containerPolicies.put(containerID, policyFilePath); NMContainerPolicyUtils.generatePolicyFile(policyOutputStream, localDirs, groupPolicyFiles, resources, configuration); NMContainerPolicyUtils.appendSecurityFlags( commands, env, policyFilePath, sandboxMode); } } catch (IOException e) { throw new ContainerExecutionException(e); } } }
@Test public void testEnabledSandboxWithWhitelist() throws ContainerExecutionException{ String[] inputCommand = { "$JAVA_HOME/bin/java jar -Djava.security.manager MyJob.jar" }; List<String> commands = Arrays.asList(inputCommand); conf.set(YarnConfiguration.YARN_CONTAINER_SANDBOX_WHITELIST_GROUP, WHITELIST_GROUP); runtimeContextBuilder.setExecutionAttribute(USER, WHITELIST_USER); runtimeContextBuilder.setExecutionAttribute(CONTAINER_RUN_CMDS, commands); runtime.prepareContainer(runtimeContextBuilder.build()); Assert.assertTrue("Command should be modified to include " + "policy file in whitelisted Sandbox mode", commands.get(0).contains(SECURITY_FLAG) && commands.get(0).contains(POLICY_FLAG)); }
public static String toJson(final Object obj, boolean prettyFormat) { return JSON.toJSONString(obj, prettyFormat); }
@Test public void testToJson_normalString() { RemotingSerializable serializable = new RemotingSerializable() { private List<String> stringList = Arrays.asList("a", "o", "e", "i", "u", "v"); public List<String> getStringList() { return stringList; } public void setStringList(List<String> stringList) { this.stringList = stringList; } }; String string = serializable.toJson(); assertThat(string).isEqualTo("{\"stringList\":[\"a\",\"o\",\"e\",\"i\",\"u\",\"v\"]}"); }
@Override public void updateDataSourceConfig(DataSourceConfigSaveReqVO updateReqVO) { // 校验存在 validateDataSourceConfigExists(updateReqVO.getId()); DataSourceConfigDO updateObj = BeanUtils.toBean(updateReqVO, DataSourceConfigDO.class); validateConnectionOK(updateObj); // 更新 dataSourceConfigMapper.updateById(updateObj); }
@Test public void testUpdateDataSourceConfig_success() { try (MockedStatic<JdbcUtils> databaseUtilsMock = mockStatic(JdbcUtils.class)) { // mock 数据 DataSourceConfigDO dbDataSourceConfig = randomPojo(DataSourceConfigDO.class); dataSourceConfigMapper.insert(dbDataSourceConfig);// @Sql: 先插入出一条存在的数据 // 准备参数 DataSourceConfigSaveReqVO reqVO = randomPojo(DataSourceConfigSaveReqVO.class, o -> { o.setId(dbDataSourceConfig.getId()); // 设置更新的 ID }); // mock 方法 databaseUtilsMock.when(() -> JdbcUtils.isConnectionOK(eq(reqVO.getUrl()), eq(reqVO.getUsername()), eq(reqVO.getPassword()))).thenReturn(true); // 调用 dataSourceConfigService.updateDataSourceConfig(reqVO); // 校验是否更新正确 DataSourceConfigDO dataSourceConfig = dataSourceConfigMapper.selectById(reqVO.getId()); // 获取最新的 assertPojoEquals(reqVO, dataSourceConfig); } }
public Map<String, String> transform(Map<String, String> configs) { return transform(null, configs); }
@Test public void testReplaceVariableWithTTLFirstCancelThenScheduleRestart() { // Setup when(worker.herder()).thenReturn(herder); when(herder.restartConnector(eq(1L), eq(MY_CONNECTOR), notNull())).thenReturn(requestId); when(herder.restartConnector(eq(10L), eq(MY_CONNECTOR), notNull())).thenReturn(requestId); // Execution Map<String, String> result = configTransformer.transform(MY_CONNECTOR, Collections.singletonMap(MY_KEY, "${test:testPath:testKeyWithTTL}")); // Assertions assertEquals(TEST_RESULT_WITH_TTL, result.get(MY_KEY)); verify(herder).restartConnector(eq(1L), eq(MY_CONNECTOR), notNull()); // Execution result = configTransformer.transform(MY_CONNECTOR, Collections.singletonMap(MY_KEY, "${test:testPath:testKeyWithLongerTTL}")); // Assertions assertEquals(TEST_RESULT_WITH_LONGER_TTL, result.get(MY_KEY)); verify(requestId, times(1)).cancel(); verify(herder).restartConnector(eq(10L), eq(MY_CONNECTOR), notNull()); }
@Override public void deleteDataSourceConfig(Long id) { // 校验存在 validateDataSourceConfigExists(id); // 删除 dataSourceConfigMapper.deleteById(id); }
@Test public void testDeleteDataSourceConfig_success() { // mock 数据 DataSourceConfigDO dbDataSourceConfig = randomPojo(DataSourceConfigDO.class); dataSourceConfigMapper.insert(dbDataSourceConfig);// @Sql: 先插入出一条存在的数据 // 准备参数 Long id = dbDataSourceConfig.getId(); // 调用 dataSourceConfigService.deleteDataSourceConfig(id); // 校验数据不存在了 assertNull(dataSourceConfigMapper.selectById(id)); }
protected long calculatePaneStart(long timeMillis) { return timeMillis - timeMillis % paneIntervalInMs; }
@Test void testCalculatePaneStart() { long time = System.currentTimeMillis(); assertTrue(window.calculatePaneStart(time) <= time); assertTrue(time < window.calculatePaneStart(time) + window.getPaneIntervalInMs()); }
@Override public PageResult<TenantPackageDO> getTenantPackagePage(TenantPackagePageReqVO pageReqVO) { return tenantPackageMapper.selectPage(pageReqVO); }
@Test public void testGetTenantPackagePage() { // mock 数据 TenantPackageDO dbTenantPackage = randomPojo(TenantPackageDO.class, o -> { // 等会查询到 o.setName("芋道源码"); o.setStatus(CommonStatusEnum.ENABLE.getStatus()); o.setRemark("源码解析"); o.setCreateTime(buildTime(2022, 10, 10)); }); tenantPackageMapper.insert(dbTenantPackage); // 测试 name 不匹配 tenantPackageMapper.insert(cloneIgnoreId(dbTenantPackage, o -> o.setName("源码"))); // 测试 status 不匹配 tenantPackageMapper.insert(cloneIgnoreId(dbTenantPackage, o -> o.setStatus(CommonStatusEnum.DISABLE.getStatus()))); // 测试 remark 不匹配 tenantPackageMapper.insert(cloneIgnoreId(dbTenantPackage, o -> o.setRemark("解析"))); // 测试 createTime 不匹配 tenantPackageMapper.insert(cloneIgnoreId(dbTenantPackage, o -> o.setCreateTime(buildTime(2022, 11, 11)))); // 准备参数 TenantPackagePageReqVO reqVO = new TenantPackagePageReqVO(); reqVO.setName("芋道"); reqVO.setStatus(CommonStatusEnum.ENABLE.getStatus()); reqVO.setRemark("源码"); reqVO.setCreateTime(buildBetweenTime(2022, 10, 9, 2022, 10, 11)); // 调用 PageResult<TenantPackageDO> pageResult = tenantPackageService.getTenantPackagePage(reqVO); // 断言 assertEquals(1, pageResult.getTotal()); assertEquals(1, pageResult.getList().size()); assertPojoEquals(dbTenantPackage, pageResult.getList().get(0)); }
static void dissectLookup( final MutableDirectBuffer buffer, final int offset, final StringBuilder builder) { int absoluteOffset = offset; absoluteOffset += dissectLogHeader(CONTEXT, NAME_RESOLUTION_LOOKUP, buffer, absoluteOffset, builder); final boolean isReLookup = 1 == buffer.getByte(absoluteOffset); absoluteOffset += SIZE_OF_BYTE; final long durationNs = buffer.getLong(absoluteOffset, LITTLE_ENDIAN); absoluteOffset += SIZE_OF_LONG; builder.append(": resolver="); absoluteOffset += buffer.getStringAscii(absoluteOffset, builder); absoluteOffset += SIZE_OF_INT; builder.append(" durationNs=").append(durationNs); builder.append(" name="); absoluteOffset += buffer.getStringAscii(absoluteOffset, builder); absoluteOffset += SIZE_OF_INT; builder.append(" isReLookup=").append(isReLookup); builder.append(" resolvedName="); buffer.getStringAscii(absoluteOffset, builder); }
@Test void dissectLookup() { final int offset = 48; final String resolver = "xyz"; final long durationNs = 32167; final String name = "localhost:7777"; final boolean isReLookup = false; final String resolvedName = "test:1234"; final int length = SIZE_OF_BOOLEAN + SIZE_OF_LONG + trailingStringLength(resolver, MAX_HOST_NAME_LENGTH) + trailingStringLength(name, MAX_HOST_NAME_LENGTH) + trailingStringLength(resolvedName, MAX_HOST_NAME_LENGTH); DriverEventEncoder.encodeLookup( buffer, offset, length, length, resolver, durationNs, name, isReLookup, resolvedName); final StringBuilder builder = new StringBuilder(); DriverEventDissector.dissectLookup(buffer, offset, builder); assertThat(builder.toString(), endsWith( "DRIVER: NAME_RESOLUTION_LOOKUP [47/47]: " + "resolver=xyz durationNs=32167 name=localhost:7777 isReLookup=false resolvedName=test:1234")); }
@Override public List<ColumnarFeature> process(String value) { try { LocalDate date = LocalDate.parse(value, formatter); List<ColumnarFeature> features = new ArrayList<>(featureTypes.size()); for (DateFeatureType f : featureTypes) { int featureValue = f.extract(date); ColumnarFeature feature = new ColumnarFeature(fieldName,f.toString(),featureValue); features.add(feature); } return features; } catch (DateTimeParseException e) { logger.log(Level.WARNING, e.getParsedString()); logger.log(Level.WARNING, String.format("Unable to parse date %s with formatter %s", value, formatter.toString())); return Collections.emptyList(); } }
@Test public void testValidBehaviour() { String isoFormat = "uuuu-MM-dd"; DateTimeFormatter isoFormatter = DateTimeFormatter.ofPattern(isoFormat, Locale.US); String isoInput = "1994-01-26"; DateFieldProcessor isoProc = new DateFieldProcessor("test-iso", EnumSet.allOf(DateFieldProcessor.DateFeatureType.class), isoFormat); LocalDate isoDate = LocalDate.parse(isoInput, isoFormatter); List<ColumnarFeature> isoFeatures = isoProc.process(isoInput); assertEquals(DateFieldProcessor.DateFeatureType.values().length, isoFeatures.size()); assertTrue(isoFeatures.contains( new ColumnarFeature("test-iso", "DAY", isoDate.getDayOfMonth()))); assertTrue(isoFeatures.contains( new ColumnarFeature("test-iso", "DAY", 26))); assertTrue(isoFeatures.contains( new ColumnarFeature("test-iso", "DAY_OF_WEEK", isoDate.getDayOfWeek().getValue()))); assertTrue(isoFeatures.contains( new ColumnarFeature("test-iso", "DAY_OF_WEEK", DayOfWeek.WEDNESDAY.getValue()))); assertTrue(isoFeatures.contains( new ColumnarFeature("test-iso", "DAY_OF_YEAR", isoDate.getDayOfYear()))); assertTrue(isoFeatures.contains( new ColumnarFeature("test-iso", "DAY_OF_YEAR", 26))); assertTrue(isoFeatures.contains( new ColumnarFeature("test-iso", "WEEK_OF_YEAR", isoDate.get(WeekFields.ISO.weekOfWeekBasedYear())))); assertTrue(isoFeatures.contains( new ColumnarFeature("test-iso", "WEEK_OF_MONTH", 4))); assertTrue(isoFeatures.contains( new ColumnarFeature("test-iso", "EVEN_OR_ODD_DAY", 0))); assertTrue(isoFeatures.contains( new ColumnarFeature("test-iso", "EVEN_OR_ODD_WEEK", 0))); assertTrue(isoFeatures.contains( new ColumnarFeature("test-iso", "EVEN_OR_ODD_MONTH", 1))); assertTrue(isoFeatures.contains( new ColumnarFeature("test-iso", "EVEN_OR_ODD_YEAR", 0))); assertTrue(isoFeatures.contains( new ColumnarFeature("test-iso", "CALENDAR_QUARTER", 1))); assertTrue(isoFeatures.contains( new ColumnarFeature("test-iso", "DAY_OF_QUARTER", 26))); assertTrue(isoFeatures.contains( new ColumnarFeature("test-iso", "MONTH", isoDate.getMonthValue()))); assertTrue(isoFeatures.contains( new ColumnarFeature("test-iso", "MONTH", Month.JANUARY.getValue()))); assertTrue(isoFeatures.contains( new ColumnarFeature("test-iso", "YEAR", isoDate.getYear()))); assertTrue(isoFeatures.contains( new ColumnarFeature("test-iso", "YEAR", 1994))); String usFormat = "MM-dd-uuuu"; DateTimeFormatter usFormatter = DateTimeFormatter.ofPattern(usFormat, Locale.US); String usInput = "09-08-1966"; DateFieldProcessor usProc = new DateFieldProcessor("test-us", EnumSet.allOf(DateFieldProcessor.DateFeatureType.class), usFormat); LocalDate usDate = LocalDate.parse(usInput, usFormatter); List<ColumnarFeature> usFeatures = usProc.process(usInput); assertEquals(DateFieldProcessor.DateFeatureType.values().length, usFeatures.size()); assertTrue(usFeatures.contains( new ColumnarFeature("test-us", "DAY", usDate.getDayOfMonth()))); assertTrue(usFeatures.contains( new ColumnarFeature("test-us", "DAY", 8))); assertTrue(usFeatures.contains( new ColumnarFeature("test-us", "DAY_OF_WEEK", usDate.getDayOfWeek().getValue()))); assertTrue(usFeatures.contains( new ColumnarFeature("test-us", "DAY_OF_WEEK", DayOfWeek.THURSDAY.getValue()))); assertTrue(usFeatures.contains( new ColumnarFeature("test-us", "DAY_OF_YEAR", usDate.getDayOfYear()))); assertTrue(usFeatures.contains( new ColumnarFeature("test-us", "DAY_OF_YEAR", 251))); assertTrue(usFeatures.contains( new ColumnarFeature("test-us", "WEEK_OF_YEAR", usDate.get(WeekFields.ISO.weekOfWeekBasedYear())))); assertTrue(usFeatures.contains( new ColumnarFeature("test-us", "WEEK_OF_MONTH", 2))); assertTrue(usFeatures.contains( new ColumnarFeature("test-us", "EVEN_OR_ODD_DAY", 1))); assertTrue(usFeatures.contains( new ColumnarFeature("test-us", "EVEN_OR_ODD_WEEK", 0))); assertTrue(usFeatures.contains( new ColumnarFeature("test-us", "EVEN_OR_ODD_MONTH", 1))); assertTrue(usFeatures.contains( new ColumnarFeature("test-us", "EVEN_OR_ODD_YEAR", 0))); assertTrue(usFeatures.contains( new ColumnarFeature("test-us", "CALENDAR_QUARTER", 3))); assertTrue(usFeatures.contains( new ColumnarFeature("test-us", "DAY_OF_QUARTER", 70))); assertTrue(usFeatures.contains( new ColumnarFeature("test-us", "MONTH", usDate.getMonthValue()))); assertTrue(usFeatures.contains( new ColumnarFeature("test-us", "MONTH", Month.SEPTEMBER.getValue()))); assertTrue(usFeatures.contains( new ColumnarFeature("test-us", "YEAR", usDate.getYear()))); assertTrue(usFeatures.contains( new ColumnarFeature("test-us", "YEAR", 1966))); String ukFormat = "dd-MM-uuuu"; DateTimeFormatter ukFormatter = DateTimeFormatter.ofPattern(ukFormat, Locale.US); String ukInput = "23-11-1963"; DateFieldProcessor ukProc = new DateFieldProcessor("test-uk", EnumSet.allOf(DateFieldProcessor.DateFeatureType.class), ukFormat); LocalDate ukDate = LocalDate.parse(ukInput, ukFormatter); List<ColumnarFeature> ukFeatures = ukProc.process(ukInput); assertEquals(DateFieldProcessor.DateFeatureType.values().length, ukFeatures.size()); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "DAY", ukDate.getDayOfMonth()))); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "DAY", 23))); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "DAY_OF_WEEK", ukDate.getDayOfWeek().getValue()))); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "DAY_OF_WEEK", DayOfWeek.SATURDAY.getValue()))); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "DAY_OF_YEAR", ukDate.getDayOfYear()))); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "DAY_OF_YEAR", 327))); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "WEEK_OF_YEAR", ukDate.get(WeekFields.ISO.weekOfWeekBasedYear())))); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "WEEK_OF_MONTH", 3))); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "EVEN_OR_ODD_DAY", 1))); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "EVEN_OR_ODD_WEEK", 1))); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "EVEN_OR_ODD_MONTH", 1))); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "EVEN_OR_ODD_YEAR", 1))); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "CALENDAR_QUARTER", 4))); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "DAY_OF_QUARTER", 54))); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "MONTH", ukDate.getMonthValue()))); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "MONTH", Month.NOVEMBER.getValue()))); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "YEAR", ukDate.getYear()))); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "YEAR", 1963))); ukProc = new DateFieldProcessor("test-uk", EnumSet.of(DateFieldProcessor.DateFeatureType.DAY, DateFieldProcessor.DateFeatureType.MONTH, DateFieldProcessor.DateFeatureType.YEAR), ukFormat); ukFeatures = ukProc.process(ukInput); assertEquals(3, ukFeatures.size()); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "DAY", 23))); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "MONTH", Month.NOVEMBER.getValue()))); assertTrue(ukFeatures.contains( new ColumnarFeature("test-uk", "YEAR", 1963))); }
@Override public long reservePermission(final int permits) { long timeoutInNanos = state.get().config.getTimeoutDuration().toNanos(); State modifiedState = updateStateWithBackOff(permits, timeoutInNanos); boolean canAcquireImmediately = modifiedState.nanosToWait <= 0; if (canAcquireImmediately) { publishRateLimiterAcquisitionEvent(true, permits); return 0; } boolean canAcquireInTime = timeoutInNanos >= modifiedState.nanosToWait; if (canAcquireInTime) { publishRateLimiterAcquisitionEvent(true, permits); return modifiedState.nanosToWait; } publishRateLimiterAcquisitionEvent(false, permits); return -1; }
@Test public void reserveFewThenSkipCyclesBeforeRefreshNonBlocking() throws Exception { setup(Duration.ofNanos(CYCLE_IN_NANOS)); setTimeOnNanos(CYCLE_IN_NANOS); long permission = rateLimiter.reservePermission(); then(permission).isZero(); then(metrics.getAvailablePermissions()).isZero(); then(metrics.getNanosToWait()).isEqualTo(CYCLE_IN_NANOS); then(metrics.getNumberOfWaitingThreads()).isZero(); long reservation = rateLimiter.reservePermission(); then(reservation).isPositive(); then(reservation).isLessThanOrEqualTo(CYCLE_IN_NANOS); then(metrics.getAvailablePermissions()).isEqualTo(-1); then(metrics.getNanosToWait()).isEqualTo(CYCLE_IN_NANOS * 2); then(metrics.getNumberOfWaitingThreads()).isZero(); long additionalReservation = rateLimiter.reservePermission(); then(additionalReservation).isEqualTo(-1); then(metrics.getAvailablePermissions()).isEqualTo(-1); then(metrics.getNanosToWait()).isEqualTo(CYCLE_IN_NANOS * 2); then(metrics.getNumberOfWaitingThreads()).isZero(); setTimeOnNanos(CYCLE_IN_NANOS * 6 + 10); then(metrics.getAvailablePermissions()).isEqualTo(1); then(metrics.getNanosToWait()).isEqualTo(0L); then(metrics.getNumberOfWaitingThreads()).isZero(); }
public static long lowerHexToUnsignedLong(CharSequence lowerHex) { int length = lowerHex.length(); if (length < 1 || length > 32) throw isntLowerHexLong(lowerHex); // trim off any high bits int beginIndex = length > 16 ? length - 16 : 0; return lowerHexToUnsignedLong(lowerHex, beginIndex); }
@Test void lowerHexToUnsignedLong_downgrades128bitIdsByDroppingHighBits() { assertThat(lowerHexToUnsignedLong("463ac35c9f6413ad48485a3953bb6124")) .isEqualTo(lowerHexToUnsignedLong("48485a3953bb6124")); }
@GetMapping("/updateSk") public ShenyuAdminResult updateSk(@RequestParam("appKey") @Existed(message = "app key not existed", provider = AppKeyProvider.class) final String appKey, @RequestParam("appSecret") final String appSecret) { return appAuthService.updateAppSecretByAppKey(appKey, appSecret); }
@Test public void testUpdateSk() throws Exception { this.mockMvc.perform(MockMvcRequestBuilders.get("/appAuth/updateSk") .param("appKey", "testAppKey") .param("appSecret", "updateAppSecret")) .andExpect(status().isOk()) .andReturn(); }
StateConfig getStateConfig(Map<String, Object> topoConf) throws Exception { StateConfig stateConfig; String providerConfig; ObjectMapper mapper = new ObjectMapper(); mapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY); if (topoConf.containsKey(Config.TOPOLOGY_STATE_PROVIDER_CONFIG)) { providerConfig = (String) topoConf.get(Config.TOPOLOGY_STATE_PROVIDER_CONFIG); stateConfig = mapper.readValue(providerConfig, StateConfig.class); } else { stateConfig = new StateConfig(); } return stateConfig; }
@Test public void testgetDefaultConfig() throws Exception { RedisKeyValueStateProvider provider = new RedisKeyValueStateProvider(); Map<String, Object> topoConf = new HashMap<>(); //topoConf.put(Config.TOPOLOGY_STATE_PROVIDER_CONFIG, "{\"keyClass\":\"String\"}"); RedisKeyValueStateProvider.StateConfig config = provider.getStateConfig(topoConf); assertNotNull(config); }
public void writeApplicationId(ApplicationId id) { if ( ! id.tenant().equals(tenantName)) throw new IllegalArgumentException("Cannot write application id '" + id + "' for tenant '" + tenantName + "'"); curator.set(applicationIdPath(), Utf8.toBytes(id.serializedForm())); }
@Test public void require_that_wrong_application_gives_exception() { ApplicationId id = new ApplicationId.Builder() .tenant("someOtherTenant") .applicationName("foo") .instanceName("bim") .build(); int sessionId = 3; SessionZooKeeperClient zkc = createSessionZKClient(sessionId); assertEquals("Cannot write application id 'someOtherTenant.foo.bim' for tenant 'default'", assertThrows(IllegalArgumentException.class, () -> zkc.writeApplicationId(id)).getMessage()); }
public String getProcessedMetadata(Long resultId) { return resultRepository.findById(resultId).map(SamlMetadataProcessResult::getMetadata).orElse(null); }
@Test void processedMetadataNotFound() { when(resultRepositoryMock.findById(anyLong())).thenReturn(Optional.empty()); String result = metadataRetrieverServiceMock.getProcessedMetadata(1L); verify(resultRepositoryMock, times(1)).findById(anyLong()); assertNull(result); }
public void setExcludedCipherSuites(String cipherSuites) { this.excludedCipherSuites = cipherSuites; }
@Test public void testSetExcludedCipherSuites() throws Exception { configurable.setSupportedCipherSuites(new String[] { "A", "B" }); configuration.setExcludedCipherSuites("A"); configuration.configure(configurable); assertTrue(Arrays.equals(new String[]{ "B" }, configurable.getEnabledCipherSuites())); }
public String getMatchingEndpointsForExchangeByChannel( final Exchange exchange, final String channel, final boolean firstMatchOnly, final boolean warnDroppedMessage) { List<String> matchingEndpoints = new ArrayList<>(); for (PrioritizedFilter filter : filterMap.get(channel)) { if (filter.predicate().matches(exchange)) { matchingEndpoints.add(filter.endpoint()); filter.statistics().incrementCount(); if (firstMatchOnly) { break; } } } String recipients = String.join(",", matchingEndpoints); if (ObjectHelper.isEmpty(recipients)) { Message message = exchange.getMessage(); message.setHeader(ORIGINAL_BODY_HEADER, message.getBody()); recipients = String.format(DynamicRouterConstants.LOG_ENDPOINT, this.getClass().getCanonicalName(), channel, warnDroppedMessage ? LoggingLevel.WARN : LoggingLevel.DEBUG); String error = String.format( "DynamicRouter channel '%s': no filters matched for an exchange from route: '%s'. " + "The 'originalBody' header contains the original message body.", channel, exchange.getFromEndpoint()); message.setBody(error, String.class); } return recipients; }
@Test void testGetMatchingEndpointsForExchangeByChannel() { String channel = "test"; Mockito.when(prioritizedFilter.id()).thenReturn("id"); Mockito.when(prioritizedFilter.priority()).thenReturn(1); Mockito.when(prioritizedFilter.predicate()).thenReturn(predicate); Mockito.when(prioritizedFilter.statistics()).thenReturn(prioritizedFilterStatistics); Mockito.when(prioritizedFilter.endpoint()).thenReturn("testEndpoint"); Mockito.doNothing().when(prioritizedFilterStatistics).incrementCount(); Mockito.when(predicate.matches(exchange)).thenReturn(true); filterService.addFilterForChannel(prioritizedFilter, DYNAMIC_ROUTER_CHANNEL, false); String result = filterService.getMatchingEndpointsForExchangeByChannel(exchange, channel, true, false); Assertions.assertEquals("testEndpoint", result); }
@Override public DeleteRecordsResult deleteRecords(final Map<TopicPartition, RecordsToDelete> recordsToDelete, final DeleteRecordsOptions options) { SimpleAdminApiFuture<TopicPartition, DeletedRecords> future = DeleteRecordsHandler.newFuture(recordsToDelete.keySet()); int timeoutMs = defaultApiTimeoutMs; if (options.timeoutMs() != null) { timeoutMs = options.timeoutMs(); } DeleteRecordsHandler handler = new DeleteRecordsHandler(recordsToDelete, logContext, timeoutMs); invokeDriver(handler, future, options.timeoutMs); return new DeleteRecordsResult(future.all()); }
@Test public void testDeleteRecordsTopicAuthorizationError() { String topic = "foo"; TopicPartition partition = new TopicPartition(topic, 0); try (AdminClientUnitTestEnv env = mockClientEnv()) { List<MetadataResponse.TopicMetadata> topics = new ArrayList<>(); topics.add(new MetadataResponse.TopicMetadata(Errors.TOPIC_AUTHORIZATION_FAILED, topic, false, Collections.emptyList())); env.kafkaClient().prepareResponse(RequestTestUtils.metadataResponse(env.cluster().nodes(), env.cluster().clusterResource().clusterId(), env.cluster().controller().id(), topics)); Map<TopicPartition, RecordsToDelete> recordsToDelete = new HashMap<>(); recordsToDelete.put(partition, RecordsToDelete.beforeOffset(10L)); DeleteRecordsResult results = env.adminClient().deleteRecords(recordsToDelete); TestUtils.assertFutureThrows(results.lowWatermarks().get(partition), TopicAuthorizationException.class); } }
public static MetaDataContexts create(final MetaDataPersistService persistService, final ContextManagerBuilderParameter param, final ComputeNodeInstanceContext computeNodeInstanceContext) throws SQLException { return persistService.getDatabaseMetaDataService().loadAllDatabaseNames().isEmpty() ? createByLocal(persistService, param, computeNodeInstanceContext) : createByRepository(persistService, param, computeNodeInstanceContext); }
@Test void assertCreateWithJDBCInstanceMetaData() throws SQLException { ComputeNodeInstanceContext computeNodeInstanceContext = mock(ComputeNodeInstanceContext.class, RETURNS_DEEP_STUBS); when(computeNodeInstanceContext.getInstance().getMetaData()).thenReturn(mock(JDBCInstanceMetaData.class)); try (MetaDataContexts actual = MetaDataContextsFactory.create(metaDataPersistService, createContextManagerBuilderParameter(), computeNodeInstanceContext)) { assertThat(actual.getMetaData().getGlobalRuleMetaData().getRules().size(), is(1)); assertThat(actual.getMetaData().getGlobalRuleMetaData().getRules().iterator().next(), instanceOf(MockedRule.class)); assertTrue(actual.getMetaData().getDatabases().containsKey("foo_db")); assertThat(actual.getMetaData().getDatabases().size(), is(1)); } }
public static BigDecimal chineseMoneyToNumber(String chineseMoneyAmount) { return NumberChineseFormatter.chineseMoneyToNumber(chineseMoneyAmount); }
@Test public void testChineseMoneyToNumber() { /* * s=陆万柒仟伍佰伍拾陆圆, n=67556 * s=陆万柒仟伍佰伍拾陆元, n=67556 * s=叁角, n=0.3 * s=贰分, n=0.02 * s=陆万柒仟伍佰伍拾陆元叁角, n=67556.3 * s=陆万柒仟伍佰伍拾陆元贰分, n=67556.02 * s=叁角贰分, n=0.32 * s=陆万柒仟伍佰伍拾陆元叁角贰分, n=67556.32 */ assertEquals(67556, Convert.chineseMoneyToNumber("陆万柒仟伍佰伍拾陆圆").longValue()); assertEquals(67556, Convert.chineseMoneyToNumber("陆万柒仟伍佰伍拾陆元").longValue()); assertEquals(0.3D, Convert.chineseMoneyToNumber("叁角").doubleValue(), 0); assertEquals(0.02, Convert.chineseMoneyToNumber("贰分").doubleValue(), 0); assertEquals(67556.3, Convert.chineseMoneyToNumber("陆万柒仟伍佰伍拾陆元叁角").doubleValue(), 0); assertEquals(67556.02, Convert.chineseMoneyToNumber("陆万柒仟伍佰伍拾陆元贰分").doubleValue(), 0); assertEquals(0.32, Convert.chineseMoneyToNumber("叁角贰分").doubleValue(), 0); assertEquals(67556.32, Convert.chineseMoneyToNumber("陆万柒仟伍佰伍拾陆元叁角贰分").doubleValue(), 0); }
@Override public BasicTypeDefine reconvert(Column column) { BasicTypeDefine.BasicTypeDefineBuilder builder = BasicTypeDefine.builder() .name(column.getName()) .precision(column.getColumnLength()) .length(column.getColumnLength()) .nullable(column.isNullable()) .comment(column.getComment()) .scale(column.getScale()) .defaultValue(column.getDefaultValue()); switch (column.getDataType().getSqlType()) { case NULL: builder.columnType(IRIS_NULL); builder.dataType(IRIS_NULL); break; case STRING: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType(String.format("%s(%s)", IRIS_VARCHAR, MAX_VARCHAR_LENGTH)); builder.dataType(IRIS_VARCHAR); } else if (column.getColumnLength() < MAX_VARCHAR_LENGTH) { builder.columnType( String.format("%s(%s)", IRIS_VARCHAR, column.getColumnLength())); builder.dataType(IRIS_VARCHAR); } else { builder.columnType(IRIS_LONG_VARCHAR); builder.dataType(IRIS_LONG_VARCHAR); } break; case BOOLEAN: builder.columnType(IRIS_BIT); builder.dataType(IRIS_BIT); break; case TINYINT: builder.columnType(IRIS_TINYINT); builder.dataType(IRIS_TINYINT); break; case SMALLINT: builder.columnType(IRIS_SMALLINT); builder.dataType(IRIS_SMALLINT); break; case INT: builder.columnType(IRIS_INTEGER); builder.dataType(IRIS_INTEGER); break; case BIGINT: builder.columnType(IRIS_BIGINT); builder.dataType(IRIS_BIGINT); break; case FLOAT: builder.columnType(IRIS_FLOAT); builder.dataType(IRIS_FLOAT); break; case DOUBLE: builder.columnType(IRIS_DOUBLE); builder.dataType(IRIS_DOUBLE); break; case DECIMAL: DecimalType decimalType = (DecimalType) column.getDataType(); long precision = decimalType.getPrecision(); int scale = decimalType.getScale(); if (scale < 0) { scale = 0; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is scale less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (scale > MAX_SCALE) { scale = MAX_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_SCALE, precision, scale); } if (precision < scale) { precision = scale; } if (precision <= 0) { precision = DEFAULT_PRECISION; scale = DEFAULT_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is precision less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (precision > MAX_PRECISION) { scale = MAX_SCALE; precision = MAX_PRECISION; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum precision of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_PRECISION, precision, scale); } builder.columnType(String.format("%s(%s,%s)", IRIS_DECIMAL, precision, scale)); builder.dataType(IRIS_DECIMAL); builder.precision(precision); builder.scale(scale); break; case BYTES: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType(IRIS_LONG_BINARY); builder.dataType(IRIS_LONG_BINARY); } else if (column.getColumnLength() < MAX_BINARY_LENGTH) { builder.dataType(IRIS_BINARY); builder.columnType( String.format("%s(%s)", IRIS_BINARY, column.getColumnLength())); } else { builder.columnType(IRIS_LONG_BINARY); builder.dataType(IRIS_LONG_BINARY); } break; case DATE: builder.columnType(IRIS_DATE); builder.dataType(IRIS_DATE); break; case TIME: builder.dataType(IRIS_TIME); if (Objects.nonNull(column.getScale()) && column.getScale() > 0) { Integer timeScale = column.getScale(); if (timeScale > MAX_TIME_SCALE) { timeScale = MAX_TIME_SCALE; log.warn( "The time column {} type time({}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to time({})", column.getName(), column.getScale(), MAX_TIME_SCALE, timeScale); } builder.columnType(String.format("%s(%s)", IRIS_TIME, timeScale)); builder.scale(timeScale); } else { builder.columnType(IRIS_TIME); } break; case TIMESTAMP: builder.columnType(IRIS_TIMESTAMP2); builder.dataType(IRIS_TIMESTAMP2); break; default: throw CommonError.convertToConnectorTypeError( DatabaseIdentifier.IRIS, column.getDataType().getSqlType().name(), column.getName()); } return builder.build(); }
@Test public void testReconvertBytes() { Column column = PhysicalColumn.builder() .name("test") .dataType(PrimitiveByteArrayType.INSTANCE) .columnLength(null) .build(); BasicTypeDefine typeDefine = IrisTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals(IrisTypeConverter.IRIS_LONG_BINARY, typeDefine.getColumnType()); Assertions.assertEquals(IrisTypeConverter.IRIS_LONG_BINARY, typeDefine.getDataType()); column = PhysicalColumn.builder() .name("test") .dataType(PrimitiveByteArrayType.INSTANCE) .columnLength(2L) .build(); typeDefine = IrisTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals(column.getColumnLength(), typeDefine.getLength()); Assertions.assertEquals( String.format(IrisTypeConverter.IRIS_BINARY + "(%s)", typeDefine.getLength()), typeDefine.getColumnType()); Assertions.assertEquals(IrisTypeConverter.IRIS_BINARY, typeDefine.getDataType()); column = PhysicalColumn.builder() .name("test") .dataType(PrimitiveByteArrayType.INSTANCE) .columnLength(MAX_BINARY_LENGTH) .build(); typeDefine = IrisTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals(IrisTypeConverter.IRIS_LONG_BINARY, typeDefine.getColumnType()); Assertions.assertEquals(IrisTypeConverter.IRIS_LONG_BINARY, typeDefine.getDataType()); }
public static String formatTimeTakenNs(long startTimeNs, String message) { return message + " took " + (System.nanoTime() - startTimeNs) + " ns."; }
@Test public void formatTimeTakenNs() { class TestCase { Pattern mExpected; String mInputMessage; public TestCase(String expectedRE, String inputMessage) { mExpected = Pattern.compile(expectedRE); mInputMessage = inputMessage; } } List<TestCase> testCases = new ArrayList<>(); testCases.add(new TestCase("^Task A took (.*) ns.$", "Task A")); testCases.add(new TestCase("^Task B took (.*) ns.$", "Task B")); long delta = 100000000; for (TestCase testCase : testCases) { String result = FormatUtils.formatTimeTakenNs(System.nanoTime() - delta, testCase.mInputMessage); Matcher match = testCase.mExpected.matcher(result); assertTrue(match.matches()); assertTrue(delta <= Long.parseLong(match.group(1))); assertTrue(Long.parseLong(match.group(1)) <= 2 * delta); } }
@Override public String providerId() { return "NextWordPrefsProvider"; }
@Test public void testId() { Assert.assertEquals( "NextWordPrefsProvider", new NextWordPrefsProvider(getApplicationContext(), emptyList()).providerId()); }
@Override public Collection<RedisServer> masters() { List<Map<String, String>> masters = connection.sync(StringCodec.INSTANCE, RedisCommands.SENTINEL_MASTERS); return toRedisServersList(masters); }
@Test public void testMasters() { Collection<RedisServer> masters = connection.masters(); assertThat(masters).hasSize(1); }
public ObjectMapper getObjectMapper() { return objectMapper; }
@Test void hasAnObjectMapper() throws Exception { assertThat(bootstrap.getObjectMapper()) .isNotNull(); }
public static LaplacianEigenmap of(double[][] data, int k) { return of(data, k, 2, -1); }
@Test public void test() throws Exception { System.out.println("Laplacian Eigenmap"); double[][] points = { { 0.0150, 0.0125}, {-0.0186, 0.0120}, { 0.0434, -0.0024}, { 0.0332, 0.0175}, {-0.0110, 0.0075}, { 0.0096, 0.0215}, {-0.0201, 0.0163}, { 0.0347, -0.0175}, { 0.0075, -0.0239}, { 0.0388, -0.0354}, { 0.0410, 0.0466}, {-0.0329, -0.0094}, { 0.0344, 0.0499}, {-0.0411, 0.0123}, {-0.0317, 0.0254}, {-0.0198, -0.0092}, {-0.0339, 0.0222}, { 0.0358, -0.0073}, {-0.0433, -0.0106}, {-0.0459, 0.0274}, {-0.0035, -0.0188}, {-0.0459, 0.0287}, {-0.0468, 0.0341}, {-0.0233, -0.0168}, { 0.0232, 0.0137}, { 0.0395, 0.0569}, {-0.0222, -0.0397}, {-0.0260, 0.0221}, {-0.0211, 0.0034}, { 0.0325, 0.0457}, { 0.0298, 0.0372}, { 0.0318, 0.0088}, {-0.0187, 0.0100}, { 0.0344, 0.0231}, {-0.0448, 0.0048}, { 0.0399, -0.0331}, { 0.0297, -0.0421}, { 0.0237, -0.0391}, { 0.0236, 0.0319}, {-0.0305, -0.0312}, {-0.0011, -0.0530}, {-0.0031, -0.0328}, {-0.0399, 0.0122}, { 0.0163, -0.0073}, { 0.0369, -0.0367}, {-0.0053, -0.0444}, { 0.0041, -0.0282}, {-0.0471, 0.0332}, { 0.0239, 0.0114}, {-0.0456, 0.0233}, {-0.0221, -0.0025}, {-0.0126, -0.0112}, { 0.0162, -0.0391}, {-0.0400, 0.0106}, { 0.0137, -0.0044}, {-0.0213, -0.0254}, { 0.0244, 0.0356}, { 0.0393, 0.0599}, { 0.0205, 0.0265}, {-0.0312, -0.0071}, {-0.0104, -0.0502}, {-0.0466, 0.0285}, {-0.0474, 0.0279}, {-0.0254, -0.0141}, {-0.0171, 0.0062}, { 0.0275, -0.0473}, {-0.0474, 0.0324}, { 0.0383, -0.0383}, { 0.0252, -0.0480}, { 0.0151, 0.0188}, {-0.0111, 0.0162}, { 0.0016, 0.0175}, {-0.0124, 0.0057}, { 0.0408, 0.0443}, { 0.0076, 0.0198}, { 0.0426, 0.0116}, { 0.0402, 0.0047}, {-0.0397, 0.0090}, { 0.0373, -0.0348}, {-0.0413, -0.0140}, { 0.0222, 0.0333}, { 0.0307, -0.0142}, { 0.0246, 0.0392}, { 0.0162, -0.0391}, {-0.0036, -0.0344}, { 0.0293, -0.0152}, {-0.0234, -0.0148}, {-0.0107, -0.0500}, { 0.0366, -0.0171}, { 0.0328, 0.0152}, {-0.0434, -0.0097}, {-0.0104, -0.0147}, { 0.0333, 0.0215}, { 0.0126, -0.0016}, { 0.0427, 0.0014}, { 0.0387, 0.0623}, {-0.0436, 0.0133}, { 0.0238, -0.0413}, {-0.0470, 0.0341}, { 0.0232, 0.0292}, { 0.0431, -0.0006}, {-0.0101, -0.0440}, {-0.0474, 0.0334}, {-0.0250, 0.0221}, {-0.0415, 0.0266}, {-0.0313, 0.0250}, {-0.0180, 0.0135}, { 0.0230, -0.0442}, { 0.0383, -0.0351}, {-0.0238, 0.0041}, {-0.0305, -0.0073}, { 0.0445, 0.0120}, {-0.0053, -0.0403}, { 0.0317, -0.0088}, { 0.0141, 0.0115}, {-0.0139, -0.0423}, { 0.0075, 0.0228}, {-0.0250, -0.0302}, { 0.0283, -0.0154}, { 0.0398, -0.0305}, { 0.0313, -0.0405}, { 0.0389, -0.0393}, { 0.0295, 0.0363}, { 0.0248, 0.0392}, {-0.0053, -0.0403}, {-0.0073, 0.0069}, { 0.0214, -0.0229}, { 0.0174, 0.0216}, { 0.0400, 0.0365}, {-0.0423, -0.0093}, { 0.0218, 0.0357}, { 0.0442, 0.0818}, { 0.0032, -0.0487}, { 0.0215, 0.0354}, { 0.0153, -0.0053}, { 0.0311, 0.0359}, { 0.0396, -0.0388}, { 0.0415, -0.0228}, {-0.0250, -0.0295}, { 0.0409, -0.0324}, {-0.0036, -0.0397}, {-0.0421, 0.0267}, {-0.0425, -0.0094}, { 0.0064, 0.0225}, { 0.0174, -0.0500}, {-0.0476, 0.0343}, {-0.0071, 0.0036}, {-0.0252, -0.0359}, { 0.0097, -0.0494}, { 0.0408, 0.0008}, {-0.0449, 0.0130}, { 0.0177, -0.0382}, { 0.0444, 0.0093}, { 0.0131, 0.0213}, { 0.0267, -0.0177}, {-0.0361, 0.0202}, { 0.0362, -0.0029}, { 0.0255, 0.0399}, { 0.0394, -0.0329}, {-0.0007, 0.0058}, {-0.0393, -0.0003}, { 0.0395, -0.0390}, {-0.0454, 0.0200}, { 0.0228, 0.0327}, { 0.0242, 0.0388}, {-0.0181, 0.0130}, { 0.0214, 0.0165}, {-0.0432, -0.0068}, {-0.0293, -0.0072}, { 0.0402, 0.0678}, {-0.0365, -0.0221}, {-0.0079, 0.0138}, {-0.0315, 0.0250}, {-0.0404, -0.0125}, { 0.0399, 0.0023}, { 0.0379, -0.0080}, { 0.0049, 0.0175}, { 0.0042, -0.0505}, {-0.0367, 0.0192}, { 0.0050, 0.0185}, {-0.0110, 0.0056}, {-0.0348, -0.0109}, {-0.0031, -0.0375}, { 0.0215, 0.0325}, { 0.0189, -0.0132}, { 0.0356, 0.0311}, {-0.0194, -0.0408}, { 0.0436, -0.0013}, {-0.0436, -0.0052}, {-0.0319, -0.0294}, { 0.0415, 0.0459}, { 0.0010, -0.0093}, { 0.0287, 0.0315}, {-0.0305, -0.0312}, { 0.0385, 0.0605}, { 0.0368, -0.0238}, { 0.0421, 0.0585}, { 0.0117, 0.0191}, { 0.0033, 0.0157}, { 0.0242, 0.0386}, { 0.0259, 0.0394}, {-0.0434, 0.0327}, { 0.0133, -0.0003}, {-0.0484, 0.0344}, { 0.0409, -0.0324}, { 0.0390, -0.0346}, {-0.0013, -0.0159}, {-0.0228, -0.0345}, { 0.0006, 0.0173}, {-0.0210, -0.0406}, {-0.0232, -0.0159}, {-0.0473, 0.0265}, {-0.0326, -0.0116}, {-0.0470, 0.0251}, { 0.0352, -0.0346}, {-0.0436, -0.0086}, {-0.0286, -0.0238}, {-0.0270, 0.0082}, {-0.0423, -0.0130}, { 0.0386, -0.0279}, { 0.0009, -0.0528}, { 0.0446, 0.0161}, { 0.0058, -0.0265}, { 0.0447, 0.0149}, { 0.0036, -0.0519}, {-0.0026, -0.0400}, { 0.0432, -0.0053}, {-0.0463, 0.0282}, {-0.0390, 0.0276}, { 0.0236, -0.0461}, { 0.0091, -0.0480}, {-0.0013, 0.0188}, { 0.0179, -0.0386}, { 0.0439, 0.0077}, { 0.0408, 0.0697}, { 0.0400, 0.0022}, { 0.0238, -0.0486}, { 0.0313, -0.0441}, {-0.0367, -0.0097}, { 0.0332, 0.0429}, { 0.0162, -0.0067}, { 0.0436, -0.0002}, { 0.0351, 0.0294}, { 0.0169, -0.0026}, {-0.0338, 0.0192}, { 0.0427, 0.0014}, {-0.0405, 0.0229}, {-0.0226, -0.0014}, { 0.0400, 0.0591}, {-0.0448, 0.0337}, { 0.0410, 0.0616}, { 0.0055, 0.0178}, {-0.0044, -0.0130}, {-0.0437, -0.0076}, {-0.0455, 0.0337}, { 0.0263, -0.0471}, {-0.0182, -0.0421}, {-0.0452, 0.0200}, { 0.0020, 0.0168}, {-0.0470, 0.0304}, { 0.0390, 0.0007}, { 0.0433, -0.0030}, {-0.0189, -0.0432}, {-0.0463, 0.0275}, { 0.0342, -0.0380}, { 0.0154, 0.0001}, {-0.0427, -0.0086}, { 0.0408, 0.0697}, {-0.0289, -0.0328}, {-0.0358, 0.0195}, { 0.0060, 0.0223}, { 0.0391, -0.0299}, {-0.0483, 0.0338}, { 0.0409, -0.0324}, { 0.0345, -0.0377}, { 0.0393, -0.0390}, { 0.0061, -0.0046}, {-0.0255, -0.0295}, {-0.0232, -0.0265}, {-0.0436, -0.0016}, { 0.0142, 0.0010}, { 0.0424, 0.0638}, {-0.0316, -0.0075}, { 0.0283, 0.0166}, {-0.0203, -0.0076}, {-0.0476, 0.0318}, { 0.0246, 0.0124}, { 0.0402, 0.0677}, { 0.0248, 0.0148}, { 0.0204, 0.0342}, { 0.0213, -0.0221}, {-0.0159, 0.0056}, {-0.0383, -0.0081}, { 0.0027, -0.0489}, {-0.0158, -0.0110}, { 0.0156, 0.0255}, {-0.0086, -0.0099}, {-0.0344, 0.0197}, { 0.0215, 0.0354}, { 0.0312, 0.0416}, {-0.0281, 0.0236}, { 0.0270, -0.0469}, { 0.0118, -0.0134}, {-0.0302, 0.0247}, { 0.0039, 0.0214}, {-0.0459, 0.0265}, {-0.0397, -0.0128}, {-0.0452, 0.0339}, { 0.0376, -0.0308}, { 0.0098, 0.0199}, { 0.0110, 0.0150}, { 0.0238, 0.0384}, {-0.0101, -0.0504}, {-0.0405, 0.0231}, { 0.0446, 0.0141}, { 0.0046, -0.0315}, { 0.0273, -0.0475}, { 0.0263, 0.0328}, {-0.0381, -0.0007}, { 0.0359, -0.0135}, { 0.0054, -0.0324}, { 0.0189, -0.0399}, {-0.0424, -0.0100}, { 0.0128, 0.0257}, { 0.0166, 0.0235}, { 0.0355, -0.0117}, {-0.0327, 0.0258}, {-0.0453, 0.0210}, { 0.0268, 0.0374}, {-0.0309, -0.0307}, { 0.0067, -0.0512}, {-0.0432, -0.0064}, { 0.0404, -0.0283}, { 0.0329, 0.0460}, {-0.0083, -0.0509}, { 0.0398, -0.0301}, {-0.0105, -0.0427}, {-0.0188, -0.0119}, {-0.0443, -0.0005}, { 0.0212, -0.0487}, {-0.0318, 0.0247}, {-0.0452, 0.0242}, { 0.0004, -0.0417}, { 0.0339, 0.0292}, { 0.0387, -0.0392}, { 0.0146, 0.0092}, {-0.0087, -0.0511}, { 0.0217, -0.0471}, {-0.0223, -0.0202}, { 0.0033, -0.0492}, {-0.0061, -0.0500}, {-0.0446, 0.0034}, { 0.0342, -0.0406}, {-0.0119, -0.0437}, { 0.0225, 0.0313}, { 0.0443, 0.0825}, { 0.0002, -0.0208}, {-0.0015, 0.0013}, { 0.0247, 0.0340}, { 0.0153, -0.0505}, { 0.0248, -0.0448}, {-0.0470, 0.0284}, {-0.0136, 0.0059}, {-0.0008, -0.0239}, {-0.0319, -0.0078}, {-0.0298, -0.0320}, {-0.0042, -0.0391}, {-0.0467, 0.0343}, {-0.0457, 0.0225}, {-0.0465, 0.0294}, {-0.0483, 0.0344}, { 0.0078, 0.0190}, {-0.0179, -0.0412}, {-0.0430, -0.0117}, {-0.0391, 0.0040}, { 0.0013, 0.0166}, { 0.0287, 0.0355}, { 0.0277, -0.0432}, { 0.0408, -0.0304}, { 0.0335, -0.0378}, { 0.0443, 0.0820}, { 0.0128, -0.0068}, { 0.0410, 0.0424}, { 0.0198, -0.0493}, {-0.0202, 0.0073}, { 0.0441, 0.0798}, { 0.0118, 0.0188}, {-0.0408, 0.0214}, { 0.0272, -0.0474}, { 0.0342, 0.0304}, {-0.0284, -0.0334}, { 0.0223, 0.0319}, {-0.0397, 0.0291}, { 0.0440, 0.0048}, { 0.0403, 0.0472}, {-0.0358, 0.0195}, { 0.0294, -0.0183}, {-0.0024, 0.0011}, { 0.0406, 0.0692}, {-0.0294, 0.0244}, {-0.0329, -0.0094}, { 0.0100, 0.0234}, { 0.0172, -0.0056}, { 0.0371, -0.0213}, { 0.0375, -0.0393}, {-0.0316, 0.0152}, {-0.0369, -0.0209}, {-0.0309, -0.0307}, {-0.0175, -0.0422}, {-0.0341, 0.0188}, {-0.0339, 0.0264}, { 0.0289, 0.0067}, {-0.0408, 0.0004}, {-0.0086, -0.0509}, { 0.0141, 0.0063}, {-0.0090, -0.0500}, { 0.0377, -0.0391}, {-0.0446, 0.0330}, { 0.0103, 0.0211}, { 0.0282, -0.0134}, {-0.0140, -0.0386}, {-0.0410, 0.0044}, { 0.0066, 0.0224}, { 0.0308, 0.0039}, { 0.0220, -0.0203}, { 0.0398, 0.0662}, {-0.0112, -0.0428}, {-0.0337, -0.0266}, {-0.0071, -0.0079}, {-0.0071, -0.0062}, { 0.0226, 0.0324}, { 0.0383, -0.0372}, {-0.0477, 0.0322}, {-0.0391, -0.0156}, { 0.0061, -0.0463}, {-0.0275, 0.0233}, { 0.0202, -0.0295}, {-0.0439, -0.0059}, {-0.0089, -0.0509}, {-0.0175, 0.0183}, {-0.0102, -0.0497}, {-0.0484, 0.0344}, {-0.0329, 0.0254}, { 0.0424, 0.0638}, {-0.0229, -0.0392}, { 0.0447, 0.0153}, {-0.0250, -0.0304}, {-0.0011, -0.0496}, { 0.0238, -0.0451}, { 0.0409, -0.0320}, { 0.0446, 0.0155}, {-0.0026, -0.0322}, {-0.0394, -0.0176}, {-0.0440, -0.0022}, {-0.0389, -0.0072}, {-0.0392, -0.0162}, {-0.0353, 0.0239}, { 0.0213, -0.0221}, {-0.0227, -0.0164}, { 0.0284, 0.0368}, { 0.0412, -0.0233}, { 0.0286, 0.0309}, { 0.0225, 0.0332}, {-0.0167, -0.0452}, {-0.0302, -0.0072}, {-0.0244, -0.0067}, { 0.0379, -0.0371}, { 0.0217, -0.0241}, {-0.0161, 0.0157}, {-0.0328, -0.0280}, { 0.0259, -0.0431}, { 0.0397, -0.0388}, { 0.0222, 0.0139}, { 0.0235, -0.0416}, { 0.0353, -0.0365}, { 0.0408, 0.0473}, {-0.0011, -0.0009}, { 0.0244, -0.0428}, {-0.0228, 0.0029}, {-0.0249, 0.0053}, { 0.0294, 0.0352}, {-0.0395, -0.0096}, { 0.0399, -0.0385}, { 0.0007, -0.0035}, {-0.0089, -0.0089}, { 0.0300, 0.0378}, { 0.0442, 0.0819}, {-0.0405, 0.0123}, { 0.0435, 0.0194}, {-0.0163, 0.0175}, { 0.0291, 0.0371}, { 0.0010, -0.0030}, {-0.0186, 0.0120}, {-0.0183, -0.0429}, {-0.0027, -0.0411}, { 0.0075, 0.0220}, {-0.0376, 0.0000}, {-0.0361, 0.0192}, { 0.0360, -0.0214}, { 0.0217, -0.0490}, {-0.0412, -0.0026}, {-0.0430, -0.0115}, { 0.0186, -0.0483}, { 0.0016, -0.0234}, { 0.0432, 0.0147}, { 0.0422, 0.0642}, { 0.0279, 0.0258}, { 0.0442, 0.0168}, { 0.0310, 0.0083}, {-0.0473, 0.0341}, { 0.0193, 0.0017}, { 0.0016, -0.0487}, {-0.0158, 0.0174}, { 0.0195, -0.0497}, { 0.0105, -0.0498}, { 0.0015, -0.0533}, {-0.0404, -0.0161}, {-0.0428, -0.0122}, { 0.0246, 0.0392}, { 0.0343, 0.0270}, { 0.0442, 0.0078}, { 0.0235, 0.0138}, { 0.0443, 0.0825}, {-0.0077, -0.0059}, {-0.0475, 0.0285}, { 0.0289, -0.0464}, {-0.0446, 0.0272}, {-0.0301, -0.0080}, {-0.0430, 0.0253}, {-0.0356, -0.0236}, { 0.0440, 0.0047}, { 0.0132, -0.0489}, {-0.0386, 0.0283}, { 0.0358, -0.0406}, { 0.0398, -0.0287}, {-0.0435, 0.0112}, { 0.0401, -0.0379}, {-0.0366, -0.0095}, {-0.0269, -0.0258}, { 0.0063, -0.0247}, {-0.0452, 0.0115}, {-0.0243, 0.0031}, {-0.0437, -0.0078}, {-0.0229, -0.0253}, { 0.0401, 0.0421}, {-0.0043, -0.0290}, {-0.0384, 0.0250}, {-0.0475, 0.0327}, {-0.0308, 0.0241}, { 0.0396, -0.0381}, {-0.0165, -0.0106}, {-0.0431, 0.0124}, { 0.0391, -0.0299}, {-0.0463, 0.0281}, { 0.0282, -0.0134}, {-0.0480, 0.0320}, {-0.0385, -0.0184}, {-0.0482, 0.0344}, {-0.0162, 0.0178}, {-0.0476, 0.0292}, { 0.0382, -0.0366}, {-0.0436, -0.0060}, {-0.0036, 0.0032}, {-0.0413, -0.0143}, { 0.0414, 0.0183}, {-0.0465, 0.0274}, { 0.0027, 0.0171}, { 0.0025, 0.0141}, { 0.0418, -0.0204}, {-0.0033, -0.0350}, {-0.0009, -0.0127}, { 0.0256, -0.0390}, { 0.0193, 0.0170}, {-0.0451, 0.0338}, { 0.0438, 0.0795}, { 0.0420, 0.0613}, { 0.0344, -0.0355}, {-0.0346, -0.0093}, { 0.0001, -0.0488}, {-0.0038, -0.0378}, {-0.0036, -0.0352}, {-0.0020, 0.0169}, {-0.0393, -0.0068}, { 0.0335, -0.0423}, { 0.0177, 0.0014}, {-0.0351, 0.0198}, {-0.0364, -0.0073}, { 0.0023, 0.0202}, {-0.0235, -0.0383}, { 0.0438, 0.0010}, {-0.0449, 0.0065}, { 0.0174, -0.0377}, {-0.0066, -0.0097}, { 0.0443, 0.0817}, {-0.0106, -0.0429}, {-0.0464, 0.0343}, {-0.0459, 0.0191}, { 0.0053, 0.0220}, { 0.0377, 0.0338}, {-0.0400, -0.0036}, {-0.0479, 0.0311}, {-0.0385, 0.0176}, { 0.0438, 0.0114}, {-0.0469, 0.0315}, {-0.0288, 0.0240}, { 0.0443, 0.0825}, { 0.0223, 0.0310}, {-0.0065, -0.0466}, { 0.0440, 0.0186}, {-0.0454, 0.0179}, { 0.0345, -0.0412}, {-0.0227, -0.0383}, { 0.0423, -0.0147}, { 0.0431, -0.0063}, {-0.0370, -0.0088}, { 0.0284, 0.0329}, {-0.0025, -0.0403}, {-0.0409, 0.0242}, {-0.0433, -0.0042}, {-0.0275, -0.0059}, {-0.0216, -0.0160}, {-0.0049, 0.0151}, { 0.0096, -0.0183}, { 0.0039, -0.0485}, { 0.0045, -0.0527}, { 0.0430, 0.0187}, { 0.0362, -0.0083}, {-0.0241, 0.0026}, {-0.0445, 0.0092}, { 0.0387, 0.0623}, { 0.0023, -0.0532}, {-0.0362, 0.0190}, {-0.0246, -0.0321}, { 0.0019, -0.0475}, {-0.0230, -0.0173}, { 0.0402, -0.0307}, { 0.0014, 0.0051}, {-0.0481, 0.0320}, {-0.0396, -0.0061}, { 0.0149, -0.0020}, { 0.0406, -0.0350}, { 0.0004, -0.0175}, {-0.0329, 0.0254}, {-0.0257, -0.0287}, { 0.0291, -0.0457}, {-0.0314, -0.0093}, { 0.0414, 0.0476}, { 0.0365, -0.0346}, { 0.0443, 0.0819}, { 0.0400, 0.0022}, {-0.0439, 0.0331}, {-0.0397, -0.0124}, { 0.0143, 0.0270}, {-0.0429, -0.0118}, { 0.0437, 0.0161}, { 0.0307, 0.0047}, {-0.0178, 0.0178}, {-0.0438, -0.0045}, { 0.0202, 0.0286}, { 0.0362, -0.0080}, {-0.0446, 0.0336}, { 0.0164, 0.0149}, {-0.0012, 0.0176}, {-0.0250, -0.0362}, { 0.0286, -0.0439}, { 0.0141, -0.0028}, {-0.0376, 0.0246}, { 0.0006, 0.0182}, { 0.0230, -0.0218}, { 0.0394, 0.0630}, { 0.0397, 0.0009}, {-0.0458, 0.0341}, { 0.0097, 0.0233}, {-0.0064, -0.0016}, { 0.0395, 0.0016}, {-0.0401, 0.0174}, { 0.0442, 0.0817}, {-0.0115, -0.0424}, {-0.0225, -0.0188}, { 0.0370, -0.0401}, { 0.0424, -0.0137}, {-0.0483, 0.0344}, { 0.0307, -0.0142}, {-0.0334, -0.0050}, { 0.0405, 0.0688}, { 0.0398, 0.0602}, {-0.0483, 0.0338}, { 0.0030, -0.0351}, {-0.0430, -0.0117}, { 0.0165, -0.0067}, { 0.0425, 0.0698}, { 0.0257, 0.0140}, { 0.0357, -0.0093}, {-0.0399, -0.0166}, { 0.0441, 0.0798}, { 0.0299, 0.0390}, { 0.0302, 0.0378}, {-0.0462, 0.0310}, { 0.0397, 0.0013}, { 0.0237, -0.0391}, {-0.0257, -0.0284}, { 0.0138, -0.0081}, { 0.0266, -0.0430}, { 0.0112, 0.0194}, { 0.0435, 0.0123}, {-0.0073, -0.0424}, {-0.0203, 0.0053}, {-0.0407, 0.0234}, { 0.0408, -0.0283}, { 0.0271, -0.0476}, {-0.0232, 0.0045}, {-0.0393, -0.0068}, { 0.0313, 0.0456}, {-0.0044, 0.0009}, { 0.0424, 0.0652}, {-0.0203, -0.0415}, {-0.0234, -0.0164}, {-0.0202, -0.0419}, {-0.0420, 0.0277}, {-0.0402, 0.0112}, {-0.0224, -0.0221}, {-0.0091, -0.0438}, { 0.0183, -0.0440}, {-0.0061, -0.0517}, {-0.0421, -0.0112}, { 0.0292, 0.0180}, { 0.0228, 0.0370}, { 0.0288, -0.0428}, { 0.0381, -0.0320}, {-0.0130, -0.0484}, {-0.0313, 0.0250}, {-0.0224, -0.0054}, { 0.0395, 0.0601}, {-0.0117, -0.0431}, {-0.0484, 0.0344}, { 0.0297, 0.0111}, {-0.0432, -0.0113}, { 0.0395, -0.0390}, {-0.0383, -0.0095}, { 0.0250, 0.0110}, { 0.0012, -0.0532}, {-0.0266, 0.0225}, { 0.0191, -0.0413}, { 0.0267, 0.0318}, {-0.0463, 0.0283}, {-0.0466, 0.0286}, { 0.0220, 0.0349}, {-0.0131, 0.0056}, { 0.0366, 0.0536}, { 0.0446, 0.0160}, { 0.0442, 0.0816}, { 0.0437, 0.0161}, { 0.0045, -0.0413}, {-0.0425, 0.0083}, { 0.0141, 0.0115}, {-0.0175, 0.0185}, {-0.0315, -0.0075}, {-0.0008, -0.0187}, { 0.0283, -0.0470}, { 0.0265, -0.0150}, { 0.0265, 0.0101}, { 0.0284, -0.0468}, { 0.0031, -0.0243}, { 0.0175, -0.0500}, { 0.0362, -0.0117}, {-0.0378, 0.0282}, {-0.0438, -0.0062}, {-0.0329, 0.0171}, {-0.0437, -0.0056}, { 0.0130, 0.0158}, { 0.0057, -0.0491}, { 0.0402, -0.0371}, { 0.0442, 0.0078}, { 0.0436, 0.0788}, {-0.0042, 0.0166}, {-0.0423, -0.0095}, {-0.0456, 0.0208}, { 0.0351, -0.0304}, { 0.0241, -0.0441}, { 0.0445, 0.0163}, {-0.0035, -0.0339}, { 0.0361, 0.0553}, { 0.0401, -0.0379}, {-0.0446, 0.0336}, {-0.0449, 0.0202}, { 0.0294, 0.0347}, { 0.0353, -0.0072}, { 0.0339, -0.0068}, {-0.0478, 0.0304}, {-0.0430, 0.0141}, { 0.0285, 0.0331}, {-0.0311, -0.0075}, {-0.0341, 0.0174}, {-0.0200, -0.0407}, { 0.0354, -0.0274}, {-0.0120, 0.0057}, {-0.0025, -0.0157}, {-0.0391, 0.0253}, { 0.0223, 0.0308}, { 0.0042, -0.0296}, {-0.0015, -0.0481}, { 0.0111, -0.0499}, {-0.0019, 0.0026}, { 0.0349, -0.0353}, {-0.0402, -0.0036}, {-0.0451, 0.0087}, { 0.0430, 0.0023}, {-0.0005, -0.0000}, {-0.0250, -0.0284}, {-0.0346, 0.0206}, {-0.0008, -0.0531}, { 0.0218, -0.0484}, {-0.0450, 0.0077}, {-0.0362, -0.0070}, {-0.0406, 0.0221}, { 0.0069, -0.0466}, { 0.0318, 0.0014}, {-0.0042, -0.0383}, { 0.0087, 0.0235}, {-0.0037, -0.0407}, {-0.0372, 0.0013}, {-0.0394, 0.0261}, {-0.0358, -0.0093}, { 0.0226, 0.0369}, { 0.0020, -0.0533}, { 0.0358, -0.0406}, { 0.0216, 0.0346}, { 0.0400, 0.0365}, { 0.0178, -0.0397}, {-0.0346, -0.0093}, {-0.0403, 0.0253}, {-0.0394, 0.0078}, { 0.0109, 0.0182}, { 0.0276, 0.0218}, {-0.0029, -0.0483}, {-0.0459, 0.0341}, {-0.0354, -0.0117}, {-0.0403, 0.0272}, {-0.0349, 0.0268}, {-0.0475, 0.0320}, { 0.0217, 0.0127}, { 0.0348, -0.0385}, { 0.0422, 0.0642}, { 0.0001, -0.0488}, {-0.0004, -0.0032}, { 0.0360, -0.0197}, { 0.0405, 0.0688}, {-0.0294, 0.0242}, { 0.0089, -0.0244}, {-0.0322, -0.0086}, {-0.0018, -0.0397}, {-0.0007, 0.0170}, { 0.0312, 0.0147}, {-0.0325, -0.0063}, { 0.0446, 0.0139}, { 0.0411, -0.0304}, {-0.0476, 0.0303}, { 0.0048, -0.0264}, {-0.0401, 0.0105}, {-0.0402, 0.0108}, {-0.0408, 0.0004}, {-0.0136, 0.0066}, {-0.0182, -0.0421}, { 0.0246, 0.0390}, {-0.0237, -0.0022}, {-0.0152, -0.0115}, {-0.0362, 0.0229}, { 0.0408, 0.0473}, { 0.0171, -0.0387}, {-0.0439, -0.0057}, {-0.0224, -0.0102}, {-0.0435, -0.0092}, { 0.0236, 0.0099}, {-0.0230, 0.0024}, {-0.0137, -0.0479}, { 0.0233, -0.0476}, { 0.0440, 0.0047}, {-0.0107, -0.0460}, { 0.0430, -0.0069}, { 0.0228, -0.0293}, {-0.0455, 0.0222}, { 0.0380, 0.0588}, { 0.0018, -0.0262}, { 0.0341, 0.0117}, { 0.0311, 0.0407}, {-0.0459, 0.0170}, { 0.0409, -0.0333}, {-0.0442, 0.0161}, {-0.0407, 0.0266}, {-0.0463, 0.0272}, {-0.0091, 0.0030}, {-0.0400, 0.0213}, { 0.0446, 0.0155}, { 0.0427, 0.0014}, { 0.0119, 0.0231}, { 0.0218, -0.0391}, { 0.0406, 0.0542}, {-0.0471, 0.0331}, {-0.0240, -0.0027}, {-0.0150, -0.0430}, {-0.0465, 0.0296}, { 0.0401, -0.0379}, {-0.0451, 0.0099}, { 0.0152, -0.0007}, {-0.0036, 0.0032}, { 0.0190, -0.0406}, {-0.0481, 0.0324}, { 0.0345, 0.0313}, {-0.0450, 0.0338}, { 0.0286, -0.0024}, { 0.0033, -0.0527}, { 0.0398, -0.0284}, { 0.0423, 0.0342}, { 0.0247, -0.0472}, {-0.0439, 0.0331}, {-0.0224, -0.0054}, {-0.0426, -0.0123}, {-0.0356, 0.0206}, {-0.0163, 0.0115}, { 0.0138, 0.0214}, { 0.0228, -0.0466}, { 0.0368, -0.0018}, {-0.0281, 0.0236}, { 0.0227, -0.0486}, {-0.0294, 0.0244}, {-0.0406, 0.0302}, { 0.0431, 0.0012}, { 0.0162, -0.0371}, {-0.0247, -0.0283}, { 0.0419, 0.0726}, { 0.0405, 0.0049}, { 0.0345, 0.0280}, {-0.0138, 0.0177}, { 0.0011, -0.0077}, {-0.0396, 0.0238}, {-0.0126, -0.0112}, { 0.0429, -0.0090}, { 0.0323, -0.0006}, {-0.0386, -0.0188}, { 0.0218, 0.0084}, { 0.0031, -0.0470}, {-0.0402, 0.0144}, { 0.0142, -0.0501}, { 0.0333, -0.0408}, { 0.0144, 0.0065}, {-0.0454, 0.0268}, { 0.0261, -0.0430}, { 0.0177, -0.0044}, { 0.0427, 0.0671}, {-0.0213, -0.0254}, { 0.0077, 0.0229}, { 0.0311, -0.0153}, {-0.0171, 0.0167}, { 0.0442, 0.0820}, { 0.0385, 0.0368}, { 0.0252, 0.0386}, {-0.0191, -0.0418}, {-0.0147, 0.0168}, { 0.0204, 0.0048}, {-0.0417, -0.0140}, {-0.0471, 0.0341}, {-0.0436, -0.0071}, { 0.0051, 0.0219}, {-0.0256, -0.0310}, { 0.0396, 0.0585}, {-0.0437, 0.0277}, { 0.0195, 0.0330}, {-0.0391, -0.0156}, {-0.0416, -0.0107}, {-0.0378, 0.0282}, { 0.0298, -0.0143}, {-0.0086, -0.0428}, { 0.0415, 0.0459}, { 0.0443, 0.0819}, {-0.0130, -0.0485}, {-0.0063, -0.0519}, { 0.0255, -0.0433}, { 0.0327, -0.0127}, {-0.0237, 0.0045}, {-0.0435, -0.0062}, {-0.0399, -0.0166}, { 0.0380, -0.0374}, {-0.0440, 0.0121}, { 0.0433, 0.0081}, { 0.0357, -0.0093}, { 0.0429, 0.0034}, { 0.0092, -0.0420}, {-0.0221, -0.0174}, { 0.0405, 0.0687}, { 0.0067, 0.0227}, { 0.0279, -0.0431}, {-0.0163, -0.0457}, { 0.0015, -0.0533}, { 0.0036, 0.0210}, { 0.0188, -0.0404}, {-0.0432, 0.0115}, {-0.0472, 0.0265}, { 0.0142, -0.0499}, { 0.0297, -0.0421}, {-0.0432, -0.0103} }; double[][] data = new double[1000][]; System.arraycopy(SwissRoll.data, 0, data, 0, data.length); LaplacianEigenmap laplacianEigenmap = LaplacianEigenmap.of(data, 7); for (int i = 0; i < points.length; i++) { for (int j = 0; j < points[0].length; j++) { assertEquals(Math.abs(points[i][j]), Math.abs(laplacianEigenmap.coordinates[i][j]), 1E-4); } } }
@Override public boolean isAutoIncrement(final int columnIndex) throws SQLException { return resultSetMetaData.isAutoIncrement(columnIndex); }
@Test void assertIsAutoIncrement() throws SQLException { assertTrue(queryResultMetaData.isAutoIncrement(1)); }
public void setAttribute(final String key, Object value) { setAttributeObject(key, value); }
@Test(expected=AttributeAlreadySetException.class) public void cannotSetDifferentClassAttributeValue() { Entry entry = new Entry(); entry.setAttribute(String.class, "value"); entry.setAttribute(String.class, "value2"); }
static BlockStmt getRowVariableDeclaration(final String variableName, final Row row) { final MethodDeclaration methodDeclaration = ROW_TEMPLATE.getMethodsByName(GETKIEPMMLROW).get(0).clone(); final BlockStmt toReturn = methodDeclaration.getBody().orElseThrow(() -> new KiePMMLException(String.format(MISSING_BODY_TEMPLATE, methodDeclaration))); final String columnValuesVariableName = String.format(VARIABLE_NAME_TEMPLATE, variableName, COLUMN_VALUES); final VariableDeclarator columnValuesVariableDeclarator = getVariableDeclarator(toReturn, COLUMN_VALUES).orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, ROW, toReturn))); columnValuesVariableDeclarator.setName(columnValuesVariableName); final MethodCallExpr columnValuesVariableInit =columnValuesVariableDeclarator.getInitializer() .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, COLUMN_VALUES, toReturn))) .asMethodCallExpr(); final MethodCallExpr columnValuesVariableScope = columnValuesVariableInit.getScope() .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, COLUMN_VALUES, toReturn))) .asMethodCallExpr(); final ArrayCreationExpr columnValuesVariableArray = columnValuesVariableScope.getArguments().get(0).asArrayCreationExpr(); final ArrayInitializerExpr columnValuesVariableArrayInit = columnValuesVariableArray.getInitializer() .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, COLUMN_VALUES, toReturn))) .asArrayInitializerExpr(); Map<String, Object> rowDataMap = getRowDataMap(row); NodeList<Expression> arguments = new NodeList<>(); rowDataMap.entrySet().forEach(entry -> { ArrayInitializerExpr argument = new ArrayInitializerExpr(); NodeList<Expression> values = NodeList.nodeList(new StringLiteralExpr(entry.getKey()), getExpressionForObject(entry.getValue())); argument.setValues(values); arguments.add(argument); }); columnValuesVariableArrayInit.setValues(arguments); final VariableDeclarator variableDeclarator = getVariableDeclarator(toReturn, ROW) .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, ROW, toReturn))); variableDeclarator.setName(variableName); final ObjectCreationExpr objectCreationExpr = variableDeclarator.getInitializer() .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, ROW, toReturn))) .asObjectCreationExpr(); final NameExpr nameExpr = new NameExpr(columnValuesVariableName); objectCreationExpr.getArguments().set(0, nameExpr); return toReturn; }
@Test void getDataEncodedRowVariableDeclaration() throws IOException { String variableName = "variableName"; BlockStmt retrieved = KiePMMLRowFactory.getRowVariableDeclaration(variableName, DATAENCODED_ROW); String text = getFileContent(TEST_02_SOURCE); Statement expected = JavaParserUtils.parseBlock(String.format(text, variableName)); assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue(); List<Class<?>> imports = Arrays.asList(Collectors.class, KiePMMLRow.class, Map.class, Stream.class); commonValidateCompilationWithImports(retrieved, imports); }
public void writeEncodedValue(EncodedValue encodedValue) throws IOException { switch (encodedValue.getValueType()) { case ValueType.BOOLEAN: writeBooleanEncodedValue((BooleanEncodedValue) encodedValue); break; case ValueType.BYTE: writeIntegralValue(((ByteEncodedValue) encodedValue).getValue(), 't'); break; case ValueType.CHAR: writeCharEncodedValue((CharEncodedValue) encodedValue); break; case ValueType.SHORT: writeIntegralValue(((ShortEncodedValue) encodedValue).getValue(), 's'); break; case ValueType.INT: writeIntegralValue(((IntEncodedValue) encodedValue).getValue(), null); break; case ValueType.LONG: writeIntegralValue(((LongEncodedValue)encodedValue).getValue(), 'L'); break; case ValueType.FLOAT: writeFloatEncodedValue((FloatEncodedValue) encodedValue); break; case ValueType.DOUBLE: writeDoubleEncodedValue((DoubleEncodedValue) encodedValue); break; case ValueType.ANNOTATION: writeAnnotation((AnnotationEncodedValue)encodedValue); break; case ValueType.ARRAY: writeArray((ArrayEncodedValue)encodedValue); break; case ValueType.STRING: writeQuotedString(((StringEncodedValue)encodedValue).getValue()); break; case ValueType.FIELD: writeFieldDescriptor(((FieldEncodedValue)encodedValue).getValue()); break; case ValueType.ENUM: writeEnum((EnumEncodedValue) encodedValue); break; case ValueType.METHOD: writeMethodDescriptor(((MethodEncodedValue)encodedValue).getValue()); break; case ValueType.TYPE: writeType(((TypeEncodedValue)encodedValue).getValue()); break; case ValueType.METHOD_TYPE: writeMethodProtoDescriptor(((MethodTypeEncodedValue)encodedValue).getValue()); break; case ValueType.METHOD_HANDLE: writeMethodHandle(((MethodHandleEncodedValue)encodedValue).getValue()); break; case ValueType.NULL: writer.write("null"); break; default: throw new IllegalArgumentException("Unknown encoded value type"); } }
@Test public void testWriteEncodedValue_array_withSpaces() throws IOException { BaksmaliWriter writer = new BaksmaliWriter(output); writer.writeEncodedValue(new ImmutableArrayEncodedValue(ImmutableList.of( new ImmutableFieldEncodedValue(getFieldReferenceWithSpaces()), new ImmutableMethodEncodedValue(getMethodReferenceWithSpaces())))); Assert.assertEquals( "{\n" + " Ldefining/class/`with spaces`;->`fieldName with spaces`:Lfield/`type with spaces`;,\n" + " Ldefining/class/`with spaces`;->`methodName with spaces`(L`param with spaces 1`;L`param with spaces 2`;)Lreturn/type/`with spaces`;\n" + "}", output.toString()); }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof WaitForOptions)) { return false; } WaitForOptions that = (WaitForOptions) o; return mIntervalMs == that.mIntervalMs && mTimeoutMs == that.mTimeoutMs; }
@Test public void equalsTest() throws Exception { CommonUtils.testEquals(WaitForOptions.class); }
public synchronized int sendFetches() { final Map<Node, FetchSessionHandler.FetchRequestData> fetchRequests = prepareFetchRequests(); sendFetchesInternal( fetchRequests, (fetchTarget, data, clientResponse) -> { synchronized (Fetcher.this) { handleFetchSuccess(fetchTarget, data, clientResponse); } }, (fetchTarget, data, error) -> { synchronized (Fetcher.this) { handleFetchFailure(fetchTarget, data, error); } }); return fetchRequests.size(); }
@Test public void testFetchErrorShouldClearPreferredReadReplica() { buildFetcher(new MetricConfig(), OffsetResetStrategy.EARLIEST, new BytesDeserializer(), new BytesDeserializer(), Integer.MAX_VALUE, IsolationLevel.READ_COMMITTED, Duration.ofMinutes(5).toMillis()); subscriptions.assignFromUser(singleton(tp0)); client.updateMetadata(RequestTestUtils.metadataUpdateWithIds(2, singletonMap(topicName, 4), tp -> validLeaderEpoch, topicIds, false)); subscriptions.seek(tp0, 0); assertEquals(1, sendFetches()); // Set preferred read replica to node=1 client.prepareResponse(fullFetchResponse(tidp0, records, Errors.NONE, 100L, FetchResponse.INVALID_LAST_STABLE_OFFSET, 0, Optional.of(1))); consumerClient.poll(time.timer(0)); assertTrue(fetcher.hasCompletedFetches()); fetchRecords(); // Verify Node selected = fetcher.selectReadReplica(tp0, Node.noNode(), time.milliseconds()); assertEquals(1, selected.id()); assertEquals(1, sendFetches()); assertFalse(fetcher.hasCompletedFetches()); // Error - preferred read replica should be cleared. An actual error response will contain -1 as the // preferred read replica. In the test we want to ensure that we are handling the error. client.prepareResponse(fullFetchResponse(tidp0, MemoryRecords.EMPTY, Errors.NOT_LEADER_OR_FOLLOWER, -1L, FetchResponse.INVALID_LAST_STABLE_OFFSET, 0, Optional.of(1))); consumerClient.poll(time.timer(0)); assertTrue(fetcher.hasCompletedFetches()); fetchRecords(); selected = fetcher.selectReadReplica(tp0, Node.noNode(), time.milliseconds()); assertEquals(-1, selected.id()); }
@Override public Object toConnectRow(final Object ksqlData) { /* * Reconstruct ksqlData struct with given schema and try to put original data in it. * Schema may have more fields than ksqlData, don't put those field by default. If needed by * some format like Avro, create new subclass to handle */ if (ksqlData instanceof Struct) { final Schema schema = getSchema(); validate(((Struct) ksqlData).schema(), schema); final Struct struct = new Struct(schema); final Struct source = (Struct) ksqlData; for (final Field sourceField : source.schema().fields()) { final Object value = source.get(sourceField); struct.put(sourceField.name(), value); } return struct; } return ksqlData; }
@Test public void shouldThrowIfConvertInvalidValue() { // Given: final Schema schema = SchemaBuilder.struct() .field("f1", SchemaBuilder.STRING_SCHEMA) .field("f2", SchemaBuilder.OPTIONAL_INT32_SCHEMA) .field("f3", SchemaBuilder.OPTIONAL_INT64_SCHEMA) .build(); final Struct struct = new Struct(ORIGINAL_SCHEMA) .put("f1", null) .put("f2", 12); // When: final Exception e = assertThrows( DataException.class, () -> new ConnectSRSchemaDataTranslator(schema).toConnectRow(struct) ); // Then: assertThat(e.getMessage(), is("Invalid value: null used for required field: \"f1\", " + "schema type: STRING")); }
@Override public void commit() { if (context == null || context.isEmpty()) { return; } LOGGER.info("Commit started"); if (context.containsKey(UnitActions.INSERT.getActionValue())) { commitInsert(); } if (context.containsKey(UnitActions.MODIFY.getActionValue())) { commitModify(); } if (context.containsKey(UnitActions.DELETE.getActionValue())) { commitDelete(); } LOGGER.info("Commit finished."); }
@Test void shouldNotWriteToDbIfContextIsNull() { var weaponRepository = new ArmsDealer(null, weaponDatabase); weaponRepository.commit(); verifyNoMoreInteractions(weaponDatabase); }
@Override public CompletableFuture<Void> offload(ReadHandle readHandle, UUID uuid, Map<String, String> extraMetadata) { final String managedLedgerName = extraMetadata.get(MANAGED_LEDGER_NAME); final String topicName = TopicName.fromPersistenceNamingEncoding(managedLedgerName); CompletableFuture<Void> promise = new CompletableFuture<>(); scheduler.chooseThread(readHandle.getId()).execute(() -> { final BlobStore writeBlobStore = getBlobStore(config.getBlobStoreLocation()); log.info("offload {} uuid {} extraMetadata {} to {} {}", readHandle.getId(), uuid, extraMetadata, config.getBlobStoreLocation(), writeBlobStore); if (readHandle.getLength() == 0 || !readHandle.isClosed() || readHandle.getLastAddConfirmed() < 0) { promise.completeExceptionally( new IllegalArgumentException("An empty or open ledger should never be offloaded")); return; } OffloadIndexBlockBuilder indexBuilder = OffloadIndexBlockBuilder.create() .withLedgerMetadata(readHandle.getLedgerMetadata()) .withDataBlockHeaderLength(BlockAwareSegmentInputStreamImpl.getHeaderSize()); String dataBlockKey = DataBlockUtils.dataBlockOffloadKey(readHandle.getId(), uuid); String indexBlockKey = DataBlockUtils.indexBlockOffloadKey(readHandle.getId(), uuid); log.info("ledger {} dataBlockKey {} indexBlockKey {}", readHandle.getId(), dataBlockKey, indexBlockKey); MultipartUpload mpu = null; List<MultipartPart> parts = Lists.newArrayList(); // init multi part upload for data block. try { BlobBuilder blobBuilder = writeBlobStore.blobBuilder(dataBlockKey); Map<String, String> objectMetadata = new HashMap<>(userMetadata); objectMetadata.put("role", "data"); if (extraMetadata != null) { objectMetadata.putAll(extraMetadata); } DataBlockUtils.addVersionInfo(blobBuilder, objectMetadata); Blob blob = blobBuilder.build(); log.info("initiateMultipartUpload bucket {}, metadata {} ", config.getBucket(), blob.getMetadata()); mpu = writeBlobStore.initiateMultipartUpload(config.getBucket(), blob.getMetadata(), new PutOptions()); } catch (Throwable t) { promise.completeExceptionally(t); return; } long dataObjectLength = 0; // start multi part upload for data block. try { long startEntry = 0; int partId = 1; long start = System.nanoTime(); long entryBytesWritten = 0; while (startEntry <= readHandle.getLastAddConfirmed()) { int blockSize = BlockAwareSegmentInputStreamImpl .calculateBlockSize(config.getMaxBlockSizeInBytes(), readHandle, startEntry, entryBytesWritten); try (BlockAwareSegmentInputStream blockStream = new BlockAwareSegmentInputStreamImpl( readHandle, startEntry, blockSize, this.offloaderStats, managedLedgerName)) { Payload partPayload = Payloads.newInputStreamPayload(blockStream); partPayload.getContentMetadata().setContentLength((long) blockSize); partPayload.getContentMetadata().setContentType("application/octet-stream"); parts.add(writeBlobStore.uploadMultipartPart(mpu, partId, partPayload)); log.debug("UploadMultipartPart. container: {}, blobName: {}, partId: {}, mpu: {}", config.getBucket(), dataBlockKey, partId, mpu.id()); indexBuilder.addBlock(startEntry, partId, blockSize); if (blockStream.getEndEntryId() != -1) { startEntry = blockStream.getEndEntryId() + 1; } else { // could not read entry from ledger. break; } entryBytesWritten += blockStream.getBlockEntryBytesCount(); partId++; this.offloaderStats.recordOffloadBytes(topicName, blockStream.getBlockEntryBytesCount()); } dataObjectLength += blockSize; } String etag = writeBlobStore.completeMultipartUpload(mpu, parts); log.info("Ledger {}, upload finished, etag {}", readHandle.getId(), etag); mpu = null; } catch (Throwable t) { try { if (mpu != null) { writeBlobStore.abortMultipartUpload(mpu); } } catch (Throwable throwable) { log.error("Failed abortMultipartUpload in bucket - {} with key - {}, uploadId - {}.", config.getBucket(), dataBlockKey, mpu.id(), throwable); } this.offloaderStats.recordWriteToStorageError(topicName); this.offloaderStats.recordOffloadError(topicName); promise.completeExceptionally(t); return; } // upload index block try (OffloadIndexBlock index = indexBuilder.withDataObjectLength(dataObjectLength).build(); IndexInputStream indexStream = index.toStream()) { // write the index block BlobBuilder blobBuilder = writeBlobStore.blobBuilder(indexBlockKey); Map<String, String> objectMetadata = new HashMap<>(userMetadata); objectMetadata.put("role", "index"); if (extraMetadata != null) { objectMetadata.putAll(extraMetadata); } DataBlockUtils.addVersionInfo(blobBuilder, objectMetadata); Payload indexPayload = Payloads.newInputStreamPayload(indexStream); indexPayload.getContentMetadata().setContentLength((long) indexStream.getStreamSize()); indexPayload.getContentMetadata().setContentType("application/octet-stream"); Blob blob = blobBuilder .payload(indexPayload) .contentLength((long) indexStream.getStreamSize()) .build(); writeBlobStore.putBlob(config.getBucket(), blob); promise.complete(null); } catch (Throwable t) { try { writeBlobStore.removeBlob(config.getBucket(), dataBlockKey); } catch (Throwable throwable) { log.error("Failed deleteObject in bucket - {} with key - {}.", config.getBucket(), dataBlockKey, throwable); } this.offloaderStats.recordWriteToStorageError(topicName); this.offloaderStats.recordOffloadError(topicName); promise.completeExceptionally(t); return; } }); return promise; }
@Test(timeOut = 600000) // 10 minutes. public void testBucketDoesNotExist() throws Exception { if (provider == JCloudBlobStoreProvider.TRANSIENT) { // Skip this test, since it isn't applicable. return; } LedgerOffloader offloader = getOffloader("some-non-existant-bucket-name"); try { offloader.offload(buildReadHandle(), UUID.randomUUID(), new HashMap<>()).get(); Assert.fail("Shouldn't be able to add to bucket"); } catch (ExecutionException e) { log.error("Exception: ", e); Assert.assertTrue(e.getMessage().toLowerCase().contains("not found")); } }
void resolveSelectors(EngineDiscoveryRequest request, CucumberEngineDescriptor engineDescriptor) { Predicate<String> packageFilter = buildPackageFilter(request); resolve(request, engineDescriptor, packageFilter); filter(engineDescriptor, packageFilter); pruneTree(engineDescriptor); }
@Test void resolveRequestWithUniqueIdSelectorFromJarFileUri() { URI uri = new File("src/test/resources/feature.jar").toURI(); DiscoverySelector resource = selectUri(uri); EngineDiscoveryRequest discoveryRequest = new SelectorRequest(resource); resolver.resolveSelectors(discoveryRequest, testDescriptor); assertEquals(1, testDescriptor.getChildren().size()); }
public abstract long observeWm(int queueIndex, long wmValue);
@Test public void when_i1_active_i2_active_then_wmForwardedImmediately() { assertEquals(Long.MIN_VALUE, wc.observeWm(0, 100)); assertEquals(100, wc.observeWm(1, 101)); assertEquals(101, wc.observeWm(0, 101)); }
public boolean hasLeaderInformation(String componentId) { return leaderInformationPerComponentId.containsKey(componentId); }
@Test void testHasLeaderInformation() { final String componentId = "component-id"; final LeaderInformation leaderInformation = LeaderInformation.known(UUID.randomUUID(), "address"); assertThat( LeaderInformationRegister.of(componentId, leaderInformation) .hasLeaderInformation(componentId)) .isTrue(); }