focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public Object getDefaultValue() { return defaultValue; }
@Test public void testGetDefaultValue() throws Exception { DropdownField f = new DropdownField("test", "Name", "fooval", new HashMap<String, String>(), ConfigurationField.Optional.NOT_OPTIONAL); assertEquals("fooval", f.getDefaultValue()); }
ConnectorStatus.Listener wrapStatusListener(ConnectorStatus.Listener delegateListener) { return new ConnectorStatusListener(delegateListener); }
@Test public void testTaskFailureBeforeStartupRecordedMetrics() { WorkerMetricsGroup workerMetricsGroup = new WorkerMetricsGroup(new HashMap<>(), new HashMap<>(), connectMetrics); final TaskStatus.Listener taskListener = workerMetricsGroup.wrapStatusListener(delegateTaskListener); taskListener.onFailure(task, exception); verify(delegateTaskListener).onFailure(task, exception); verifyRecordTaskFailure(); }
@Override public void filter(ContainerRequestContext requestContext) throws IOException { if (!requestContext.getUriInfo().getPath().endsWith(targetPath)) { return; } final List<MediaType> acceptedFormats = requestContext.getAcceptableMediaTypes(); final Map<MediaType, ExportFormat> exportFormatCandidates = supportedFormats.entrySet() .stream() .filter(entry -> acceptedFormats.stream().anyMatch(acceptedFormat -> entry.getKey().isCompatible(acceptedFormat))) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); if (exportFormatCandidates.isEmpty()) { requestContext.abortWith(Response.status(Response.Status.UNSUPPORTED_MEDIA_TYPE).build()); return; } final Map<MediaType, Optional<String>> candidateErrors = exportFormatCandidates.entrySet() .stream() .collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().hasError())); if (candidateErrors.values().stream().allMatch(Optional::isPresent)) { final String errorMessage = candidateErrors.values().stream() .map(optionalError -> optionalError.orElse("")) .collect(Collectors.joining("\n")); requestContext.abortWith(Response.status(Response.Status.UNSUPPORTED_MEDIA_TYPE) .entity(errorMessage) .type(MoreMediaTypes.TEXT_PLAIN_TYPE) .build()); return; } final List<String> allowedMediaTypes = candidateErrors.entrySet().stream() .filter(entry -> !entry.getValue().isPresent()) .map(Map.Entry::getKey) .map(MediaType::toString) .collect(Collectors.toList()); requestContext.getHeaders().put(HttpHeaders.ACCEPT, allowedMediaTypes); }
@Test void returns415IfNoAcceptedFormatIsSpecified() throws Exception { final ContainerRequestFilter filter = new MessageExportFormatFilter(Collections.singleton(() -> MoreMediaTypes.TEXT_PLAIN_TYPE)); final ContainerRequestContext requestContext = mockRequestContext(Collections.emptyList()); filter.filter(requestContext); verifyRequestAborted(requestContext); }
@Override public byte[] evaluateChallenge(byte[] challenge) throws SaslException { try { OAuthBearerTokenCallback callback = new OAuthBearerTokenCallback(); switch (state) { case SEND_CLIENT_FIRST_MESSAGE: if (challenge != null && challenge.length != 0) throw new SaslException("Expected empty challenge"); callbackHandler().handle(new Callback[] {callback}); SaslExtensions extensions = retrieveCustomExtensions(); setState(State.RECEIVE_SERVER_FIRST_MESSAGE); return new OAuthBearerClientInitialResponse(callback.token().value(), extensions).toBytes(); case RECEIVE_SERVER_FIRST_MESSAGE: if (challenge != null && challenge.length != 0) { String jsonErrorResponse = new String(challenge, StandardCharsets.UTF_8); if (log.isDebugEnabled()) log.debug("Sending %%x01 response to server after receiving an error: {}", jsonErrorResponse); setState(State.RECEIVE_SERVER_MESSAGE_AFTER_FAILURE); return new byte[] {BYTE_CONTROL_A}; } callbackHandler().handle(new Callback[] {callback}); if (log.isDebugEnabled()) log.debug("Successfully authenticated as {}", callback.token().principalName()); setState(State.COMPLETE); return null; default: throw new IllegalSaslStateException("Unexpected challenge in Sasl client state " + state); } } catch (SaslException e) { setState(State.FAILED); throw e; } catch (IOException | UnsupportedCallbackException e) { setState(State.FAILED); throw new SaslException(e.getMessage(), e); } }
@Test public void testNoExtensionsDoesNotAttachAnythingToFirstClientMessage() throws Exception { TEST_PROPERTIES.clear(); testExtensions = new SaslExtensions(TEST_PROPERTIES); String expectedToken = new String(new OAuthBearerClientInitialResponse("", new SaslExtensions(TEST_PROPERTIES)).toBytes(), StandardCharsets.UTF_8); OAuthBearerSaslClient client = new OAuthBearerSaslClient(new ExtensionsCallbackHandler(false)); String message = new String(client.evaluateChallenge("".getBytes()), StandardCharsets.UTF_8); assertEquals(expectedToken, message); }
Handler getReadinessHandler() { return new AbstractHandler() { @Override public void handle(String s, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException { response.setContentType("application/json"); response.setCharacterEncoding("UTF-8"); baseRequest.setHandled(true); if (brokerState != null) { byte observedState = (byte) brokerState.value(); boolean stateIsRunning = BROKER_RUNNING_STATE <= observedState && BROKER_UNKNOWN_STATE != observedState; if (stateIsRunning) { LOGGER.trace("Broker is in running according to {}. The current state is {}", brokerStateName, observedState); response.setStatus(HttpServletResponse.SC_NO_CONTENT); } else { LOGGER.trace("Broker is not running according to {}. The current state is {}", brokerStateName, observedState); response.setStatus(HttpServletResponse.SC_SERVICE_UNAVAILABLE); response.getWriter().print("Readiness failed: brokerState is " + observedState); } } else { LOGGER.warn("Broker state metric not found"); response.setStatus(HttpServletResponse.SC_NOT_FOUND); response.getWriter().print("Broker state metric not found"); } } }; }
@Test public void testReadinessFail() throws Exception { @SuppressWarnings({ "rawtypes" }) final Gauge brokerState = mock(Gauge.class); when(brokerState.value()).thenReturn((byte) 2); KafkaAgent agent = new KafkaAgent(brokerState, null, null, null); context.setHandler(agent.getReadinessHandler()); server.setHandler(context); server.start(); HttpResponse<String> response = HttpClient.newBuilder() .build() .send(req, HttpResponse.BodyHandlers.ofString()); assertThat(HttpServletResponse.SC_SERVICE_UNAVAILABLE, is(response.statusCode())); }
byte[] readFromChannel(StreamSourceChannel source) throws IOException { final ByteArrayOutputStream out = new ByteArrayOutputStream(); final ByteBuffer buffer = ByteBuffer.wrap(new byte[1024]); ReadableByteChannel blockingSource = new BlockingReadableByteChannel(source); for (;;) { int res = blockingSource.read(buffer); if (res == -1) { return out.toByteArray(); } else if (res == 0) { LOG.error("Channel did not block"); } else { cast(buffer).flip(); out.write(buffer.array(), buffer.arrayOffset() + cast(buffer).position(), buffer.arrayOffset() + cast(buffer).limit()); cast((Buffer) buffer).clear(); } } }
@Timeout(10) @Test public void readEntireDelayedPayload() throws Exception { String[] delayedPayloads = new String[] { "", "chunk", }; StreamSourceChannel source = source(delayedPayloads); DefaultUndertowHttpBinding binding = new DefaultUndertowHttpBinding(); String result = new String(binding.readFromChannel(source)); checkResult(result, delayedPayloads); }
public static boolean isGenericCall(String parameterTypesDesc, String method) { return ($INVOKE.equals(method) || $INVOKE_ASYNC.equals(method)) && GENERIC_PARAMETER_DESC.equals(parameterTypesDesc); }
@Test void testIsGenericCall() { Assertions.assertTrue( RpcUtils.isGenericCall("Ljava/lang/String;[Ljava/lang/String;[Ljava/lang/Object;", "$invoke")); Assertions.assertTrue( RpcUtils.isGenericCall("Ljava/lang/String;[Ljava/lang/String;[Ljava/lang/Object;", "$invokeAsync")); Assertions.assertFalse( RpcUtils.isGenericCall("Ljava/lang/String;[Ljava/lang/String;[Ljava/lang/Object;", "testMethod")); }
public Interval intersect(Interval other) { return between(Math.max(mStartMs, other.mStartMs), Math.min(mEndMs, other.mEndMs)); }
@Test public void intersect() { for (int i = 0; i < 100; i++) { List<Long> sortedTimes = ThreadLocalRandom.current().longs(0, Long.MAX_VALUE).limit(4).sorted().boxed() .collect(Collectors.toList()); // the intersection does exist Interval i1 = Interval.between(sortedTimes.get(0), sortedTimes.get(2)); Interval i2 = Interval.between(sortedTimes.get(1), sortedTimes.get(3)); Interval intersect = i1.intersect(i2); Assert.assertTrue(intersect.isValid()); Assert.assertEquals(sortedTimes.get(1).longValue(), intersect.getStartMs()); Assert.assertEquals(sortedTimes.get(2).longValue(), intersect.getEndMs()); intersect = i2.intersect(i1); Assert.assertTrue(intersect.isValid()); Assert.assertEquals(sortedTimes.get(1).longValue(), intersect.getStartMs()); Assert.assertEquals(sortedTimes.get(2).longValue(), intersect.getEndMs()); // the intersection does not exist i1 = Interval.between(sortedTimes.get(0), sortedTimes.get(1)); i2 = Interval.between(sortedTimes.get(2), sortedTimes.get(3)); intersect = i1.intersect(i2); Assert.assertFalse(intersect.isValid()); intersect = i2.intersect(i1); Assert.assertFalse(intersect.isValid()); // the intervals share the same value, intersection does not exist i1 = Interval.between(sortedTimes.get(0), sortedTimes.get(1)); i2 = Interval.between(sortedTimes.get(1), sortedTimes.get(3)); intersect = i1.intersect(i2); Assert.assertFalse(intersect.isValid()); intersect = i2.intersect(i1); Assert.assertFalse(intersect.isValid()); } }
@Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } if (fieldsDiffer((LineRange) obj)) { return false; } return true; }
@Test public void testEquals() { LineRange range = new LineRange(12, 15); assertThat(range) .isEqualTo(range) .isEqualTo(new LineRange(12, 15)) .isNotEqualTo(new LineRange(12, 2000)) .isNotEqualTo(new LineRange(1000, 2000)) .isNotNull() .isNotEqualTo(new StringBuffer()); }
@Override public JimfsPath relativize(Path other) { JimfsPath otherPath = checkPath(other); if (otherPath == null) { throw new ProviderMismatchException(other.toString()); } checkArgument( Objects.equals(root, otherPath.root), "Paths have different roots: %s, %s", this, other); if (equals(other)) { return pathService.emptyPath(); } if (isEmptyPath()) { return otherPath; } ImmutableList<Name> otherNames = otherPath.names; int sharedSubsequenceLength = 0; for (int i = 0; i < Math.min(getNameCount(), otherNames.size()); i++) { if (names.get(i).equals(otherNames.get(i))) { sharedSubsequenceLength++; } else { break; } } int extraNamesInThis = Math.max(0, getNameCount() - sharedSubsequenceLength); ImmutableList<Name> extraNamesInOther = (otherNames.size() <= sharedSubsequenceLength) ? ImmutableList.<Name>of() : otherNames.subList(sharedSubsequenceLength, otherNames.size()); List<Name> parts = new ArrayList<>(extraNamesInThis + extraNamesInOther.size()); // add .. for each extra name in this path parts.addAll(Collections.nCopies(extraNamesInThis, Name.PARENT)); // add each extra name in the other path parts.addAll(extraNamesInOther); return pathService.createRelativePath(parts); }
@Test public void testRelativize_oneAbsoluteOneRelative() { try { pathService.parsePath("/foo/bar").relativize(pathService.parsePath("foo")); fail(); } catch (IllegalArgumentException expected) { } try { pathService.parsePath("foo").relativize(pathService.parsePath("/foo/bar")); fail(); } catch (IllegalArgumentException expected) { } }
public List<SchemaChangeEvent> applySchemaChange(SchemaChangeEvent schemaChangeEvent) { List<SchemaChangeEvent> events = new ArrayList<>(); TableId originalTable = schemaChangeEvent.tableId(); boolean noRouteMatched = true; for (Tuple3<Selectors, String, String> route : routes) { // Check routing table if (!route.f0.isMatch(originalTable)) { continue; } noRouteMatched = false; // Matched a routing rule TableId derivedTable = resolveReplacement(originalTable, route); Set<TableId> originalTables = derivationMapping.computeIfAbsent(derivedTable, t -> new HashSet<>()); originalTables.add(originalTable); if (originalTables.size() == 1) { // single source mapping, replace the table ID directly SchemaChangeEvent derivedSchemaChangeEvent = ChangeEventUtils.recreateSchemaChangeEvent(schemaChangeEvent, derivedTable); events.add(derivedSchemaChangeEvent); } else { // multiple source mapping (merging tables) Schema derivedTableSchema = schemaManager.getLatestEvolvedSchema(derivedTable).get(); events.addAll( Objects.requireNonNull( SchemaChangeEventVisitor.visit( schemaChangeEvent, addColumnEvent -> handleAddColumnEvent( addColumnEvent, derivedTableSchema, derivedTable), alterColumnTypeEvent -> handleAlterColumnTypeEvent( alterColumnTypeEvent, derivedTableSchema, derivedTable), createTableEvent -> handleCreateTableEvent( createTableEvent, derivedTableSchema, derivedTable), dropColumnEvent -> Collections.emptyList(), // Column drop shouldn't be // spread to route // destination. dropTableEvent -> Collections.emptyList(), // Table drop shouldn't be // spread to route // destination. renameColumnEvent -> handleRenameColumnEvent( renameColumnEvent, derivedTableSchema, derivedTable), truncateTableEvent -> Collections.emptyList() // // Table truncation // shouldn't be spread to route // destination. ))); } } if (noRouteMatched) { // No routes are matched, leave it as-is return Collections.singletonList(schemaChangeEvent); } else { return events; } }
@Test void testMergingTableWithDifferentSchemas() { SchemaManager schemaManager = new SchemaManager(); SchemaDerivation schemaDerivation = new SchemaDerivation(schemaManager, ROUTES, new HashMap<>()); // Create table 1 List<SchemaChangeEvent> derivedChangesAfterCreateTable = schemaDerivation.applySchemaChange(new CreateTableEvent(TABLE_1, SCHEMA)); assertThat(derivedChangesAfterCreateTable).hasSize(1); assertThat(derivedChangesAfterCreateTable.get(0)) .asCreateTableEvent() .hasTableId(MERGED_TABLE) .hasSchema(SCHEMA); derivedChangesAfterCreateTable.forEach(schemaManager::applyEvolvedSchemaChange); // Create table 2 List<SchemaChangeEvent> derivedChangesAfterCreateTable2 = schemaDerivation.applySchemaChange( new CreateTableEvent(TABLE_2, COMPATIBLE_SCHEMA)); assertThat(derivedChangesAfterCreateTable2).hasSize(2); assertThat(derivedChangesAfterCreateTable2) .containsExactlyInAnyOrder( new AddColumnEvent( MERGED_TABLE, Collections.singletonList( new AddColumnEvent.ColumnWithPosition( new PhysicalColumn( "gender", DataTypes.STRING(), null)))), new AlterColumnTypeEvent( MERGED_TABLE, ImmutableMap.of("age", DataTypes.BIGINT()))); derivedChangesAfterCreateTable2.forEach(schemaManager::applyEvolvedSchemaChange); // Add column for table 1 AddColumnEvent.ColumnWithPosition newCol1 = new AddColumnEvent.ColumnWithPosition( new PhysicalColumn("new_col1", DataTypes.VARCHAR(255), null)); AddColumnEvent.ColumnWithPosition newCol2 = new AddColumnEvent.ColumnWithPosition( new PhysicalColumn("new_col2", DataTypes.VARCHAR(255), null)); List<AddColumnEvent.ColumnWithPosition> newColumns = Arrays.asList(newCol1, newCol2); List<SchemaChangeEvent> derivedChangesAfterAddColumn = schemaDerivation.applySchemaChange(new AddColumnEvent(TABLE_1, newColumns)); assertThat(derivedChangesAfterAddColumn).hasSize(1); assertThat(derivedChangesAfterAddColumn.get(0)) .asAddColumnEvent() .hasTableId(MERGED_TABLE) .containsAddedColumns(newCol1, newCol2); derivedChangesAfterAddColumn.forEach(schemaManager::applyEvolvedSchemaChange); // Add column for table 2 List<SchemaChangeEvent> derivedChangesAfterAddColumnForTable2 = schemaDerivation.applySchemaChange( new AddColumnEvent( TABLE_2, Arrays.asList( new AddColumnEvent.ColumnWithPosition( new PhysicalColumn( "new_col1", DataTypes.STRING(), null)), new AddColumnEvent.ColumnWithPosition( new PhysicalColumn( "new_col2", DataTypes.STRING(), null))))); assertThat(derivedChangesAfterAddColumnForTable2).hasSize(1); assertThat(derivedChangesAfterAddColumnForTable2.get(0)) .asAlterColumnTypeEvent() .containsTypeMapping( ImmutableMap.of( "new_col1", DataTypes.STRING(), "new_col2", DataTypes.STRING())); derivedChangesAfterAddColumnForTable2.forEach(schemaManager::applyEvolvedSchemaChange); // Alter column type for table 1 ImmutableMap<String, DataType> typeMapping = ImmutableMap.of("age", DataTypes.BIGINT()); List<SchemaChangeEvent> derivedChangesAfterAlterColumnType = schemaDerivation.applySchemaChange(new AlterColumnTypeEvent(TABLE_1, typeMapping)); assertThat(derivedChangesAfterAlterColumnType).isEmpty(); // Alter column type for table 2 List<SchemaChangeEvent> derivedChangesAfterAlterColumnTypeForTable2 = schemaDerivation.applySchemaChange( new AlterColumnTypeEvent( TABLE_2, ImmutableMap.of("age", DataTypes.TINYINT()))); assertThat(derivedChangesAfterAlterColumnTypeForTable2).isEmpty(); // Drop column for table 1 List<String> droppedColumns = Arrays.asList("new_col1", "new_col2"); assertThat(schemaDerivation.applySchemaChange(new DropColumnEvent(TABLE_1, droppedColumns))) .isEmpty(); // Drop column for table 2 assertThat(schemaDerivation.applySchemaChange(new DropColumnEvent(TABLE_2, droppedColumns))) .isEmpty(); // Rename column for table 1 Map<String, String> renamedColumns = ImmutableMap.of("name", "last_name"); List<SchemaChangeEvent> derivedChangesAfterRenameColumn = schemaDerivation.applySchemaChange(new RenameColumnEvent(TABLE_1, renamedColumns)); assertThat(derivedChangesAfterRenameColumn).hasSize(1); assertThat(derivedChangesAfterRenameColumn.get(0)) .asAddColumnEvent() .hasTableId(MERGED_TABLE) .containsAddedColumns( new AddColumnEvent.ColumnWithPosition( new PhysicalColumn("last_name", DataTypes.STRING(), null))); derivedChangesAfterRenameColumn.forEach(schemaManager::applyEvolvedSchemaChange); // Rename column for table 2 List<SchemaChangeEvent> derivedChangesAfterRenameColumnForTable2 = schemaDerivation.applySchemaChange( new RenameColumnEvent(TABLE_2, ImmutableMap.of("name", "first_name"))); assertThat(derivedChangesAfterRenameColumnForTable2).hasSize(1); assertThat(derivedChangesAfterRenameColumnForTable2.get(0)) .asAddColumnEvent() .hasTableId(MERGED_TABLE) .containsAddedColumns( new AddColumnEvent.ColumnWithPosition( new PhysicalColumn("first_name", DataTypes.STRING(), null))); derivedChangesAfterRenameColumnForTable2.forEach(schemaManager::applyEvolvedSchemaChange); assertThat(schemaManager.getLatestEvolvedSchema(MERGED_TABLE)) .contains( Schema.newBuilder() .column(Column.physicalColumn("id", DataTypes.BIGINT())) .column(Column.physicalColumn("name", DataTypes.STRING())) .column(Column.physicalColumn("age", DataTypes.BIGINT())) .column(Column.physicalColumn("gender", DataTypes.STRING())) .column(Column.physicalColumn("new_col1", DataTypes.STRING())) .column(Column.physicalColumn("new_col2", DataTypes.STRING())) .column(Column.physicalColumn("last_name", DataTypes.STRING())) .column(Column.physicalColumn("first_name", DataTypes.STRING())) .build()); }
public void write(CruiseConfig configForEdit, OutputStream output, boolean skipPreprocessingAndValidation) throws Exception { LOGGER.debug("[Serializing Config] Starting to write. Validation skipped? {}", skipPreprocessingAndValidation); MagicalGoConfigXmlLoader loader = new MagicalGoConfigXmlLoader(configCache, registry); if (!configForEdit.getOrigin().isLocal()) { throw new GoConfigInvalidException(configForEdit, "Attempted to save merged configuration with partials"); } if (!skipPreprocessingAndValidation) { loader.preprocessAndValidate(configForEdit); LOGGER.debug("[Serializing Config] Done with cruise config validators."); } Document document = createEmptyCruiseConfigDocument(); write(configForEdit, document.getRootElement(), configCache, registry); LOGGER.debug("[Serializing Config] XSD and DOM validation."); verifyXsdValid(document); MagicalGoConfigXmlLoader.validateDom(document.getRootElement(), registry); LOGGER.info("[Serializing Config] Generating config partial."); XmlUtils.writeXml(document, output); LOGGER.debug("[Serializing Config] Finished writing config partial."); }
@Test public void shouldNotAllowPackagesRepositoryWithInvalidName() throws Exception { Configuration packageConfiguration = new Configuration(getConfigurationProperty("name", false, "go-agent")); Configuration repositoryConfiguration = new Configuration(getConfigurationProperty("url", false, "http://go")); PackageRepository packageRepository = createPackageRepository("plugin-id", "version", "id", "name with space", repositoryConfiguration, new Packages(new PackageDefinition("id", "name", packageConfiguration))); cruiseConfig.setPackageRepositories(new PackageRepositories(packageRepository)); try { xmlWriter.write(cruiseConfig, output, false); fail("should not have allowed two repositories with same id"); } catch (GoConfigInvalidException e) { assertThat(e.getMessage(), is("Invalid PackageRepository name 'name with space'. This must be alphanumeric and can contain underscores, hyphens and periods (however, it cannot start with a period). The maximum allowed length is 255 characters.")); } }
public VaultRegistry create(final HostPasswordStore keychain, final PasswordCallback callback) { if(null == clazz) { throw new FactoryException(String.format("No implementation given for factory %s", this.getClass().getSimpleName())); } try { final Constructor<? extends VaultRegistry> constructor = ConstructorUtils .getMatchingAccessibleConstructor(clazz, keychain.getClass(), callback.getClass()); if(null == constructor) { log.warn(String.format("No matching constructor for parameters %s,%s", keychain.getClass(), callback.getClass())); // Call default constructor for disabled implementations return clazz.getDeclaredConstructor().newInstance(); } return constructor.newInstance(keychain, callback); } catch(InstantiationException | InvocationTargetException | IllegalAccessException | NoSuchMethodException e) { log.error(String.format("Failure loading callback class %s. %s", clazz, e.getMessage())); return VaultRegistry.DISABLED; } }
@Test public void testCreate() { assertNotNull(VaultRegistryFactory.get(new DisabledPasswordCallback())); assertNotSame(VaultRegistry.DISABLED, VaultRegistryFactory.get(new DisabledPasswordCallback())); }
@ApiOperation(value = "Make Asset Profile Default (setDefaultAssetProfile)", notes = "Marks asset profile as default within a tenant scope." + TENANT_AUTHORITY_PARAGRAPH) @PreAuthorize("hasAnyAuthority('TENANT_ADMIN')") @RequestMapping(value = "/assetProfile/{assetProfileId}/default", method = RequestMethod.POST) @ResponseBody public AssetProfile setDefaultAssetProfile( @Parameter(description = ASSET_PROFILE_ID_PARAM_DESCRIPTION) @PathVariable(ASSET_PROFILE_ID) String strAssetProfileId) throws ThingsboardException { checkParameter(ASSET_PROFILE_ID, strAssetProfileId); AssetProfileId assetProfileId = new AssetProfileId(toUUID(strAssetProfileId)); AssetProfile assetProfile = checkAssetProfileId(assetProfileId, Operation.WRITE); AssetProfile previousDefaultAssetProfile = assetProfileService.findDefaultAssetProfile(getTenantId()); return tbAssetProfileService.setDefaultAssetProfile(assetProfile, previousDefaultAssetProfile, getCurrentUser()); }
@Test public void testSetDefaultAssetProfile() throws Exception { AssetProfile assetProfile = this.createAssetProfile("Asset Profile 1"); AssetProfile savedAssetProfile = doPost("/api/assetProfile", assetProfile, AssetProfile.class); Mockito.reset(tbClusterService, auditLogService); AssetProfile defaultAssetProfile = doPost("/api/assetProfile/" + savedAssetProfile.getId().getId().toString() + "/default", AssetProfile.class); Assert.assertNotNull(defaultAssetProfile); AssetProfileInfo foundDefaultAssetProfile = doGet("/api/assetProfileInfo/default", AssetProfileInfo.class); Assert.assertNotNull(foundDefaultAssetProfile); Assert.assertEquals(savedAssetProfile.getName(), foundDefaultAssetProfile.getName()); Assert.assertEquals(savedAssetProfile.getId(), foundDefaultAssetProfile.getId()); testNotifyEntityOneTimeMsgToEdgeServiceNever(defaultAssetProfile, defaultAssetProfile.getId(), defaultAssetProfile.getId(), savedTenant.getId(), tenantAdmin.getCustomerId(), tenantAdmin.getId(), tenantAdmin.getEmail(), ActionType.UPDATED); }
@Override public void doUnregister(ServiceInstance serviceInstance) throws RuntimeException { execute(namingService, service -> { Instance instance = toInstance(serviceInstance); service.deregisterInstance(instance.getServiceName(), group, instance); }); }
@Test void testDoUnRegister() throws NacosException { // register DefaultServiceInstance serviceInstance = createServiceInstance(SERVICE_NAME, LOCALHOST, NetUtils.getAvailablePort()); // register nacosServiceDiscovery.doRegister(serviceInstance); // unRegister nacosServiceDiscovery.doUnregister(serviceInstance); ArgumentCaptor<Instance> instanceCaptor = ArgumentCaptor.forClass(Instance.class); verify(namingServiceWrapper, times(1)).deregisterInstance(any(), eq(group), instanceCaptor.capture()); Instance capture = instanceCaptor.getValue(); assertEquals(SERVICE_NAME, capture.getServiceName()); assertEquals(LOCALHOST, capture.getIp()); assertEquals(serviceInstance.getPort(), capture.getPort()); }
public Quantity<U> in(U unit) { return new Quantity<U>(value * (this.unit.factor() / unit.factor()), unit); }
@Test public void convertUnits() throws Exception { Quantity<Metrics> quantityInMeters = new Quantity<Metrics>(1, Metrics.m); Quantity<Metrics> quantityInCm = quantityInMeters.in(Metrics.cm); assertThat(quantityInCm).isEqualTo(new Quantity<Metrics>(100, Metrics.cm)); }
public static boolean isWeekend(Date date) { final Week week = dayOfWeekEnum(date); return Week.SATURDAY == week || Week.SUNDAY == week; }
@Test public void isWeekendTest() { DateTime parse = DateUtil.parse("2021-07-28"); assertFalse(DateUtil.isWeekend(parse)); parse = DateUtil.parse("2021-07-25"); assertTrue(DateUtil.isWeekend(parse)); parse = DateUtil.parse("2021-07-24"); assertTrue(DateUtil.isWeekend(parse)); }
public static byte[] compress(byte[] bytes) { if (bytes == null) { throw new NullPointerException("bytes is null"); } LZ4Compressor compressor = LZ4Factory.fastestInstance().fastCompressor(); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); try (LZ4BlockOutputStream lz4BlockOutputStream = new LZ4BlockOutputStream(outputStream, ARRAY_SIZE, compressor)) { lz4BlockOutputStream.write(bytes); } catch (IOException e) { LOGGER.error("compress bytes error", e); } return outputStream.toByteArray(); }
@Test public void testCompress() { Assertions.assertThrows(NullPointerException.class, () -> { Lz4Util.compress(null); }); }
public static void writeUnsignedVarlong(long v, ByteBuffer buffer) { while ((v & 0xffffffffffffff80L) != 0L) { byte b = (byte) ((v & 0x7f) | 0x80); buffer.put(b); v >>>= 7; } buffer.put((byte) v); }
@Test @Disabled // Enable this when we change the implementation of UnsignedVarlong public void testCorrectnessWriteUnsignedVarlong() { // The old well-known implementation for writeVarlong. LongFunction<ByteBuffer> simpleImplementation = (long value) -> { ByteBuffer buffer = ByteBuffer.allocate(MAX_LENGTH_VARLONG); while ((value & 0xffffffffffffff80L) != 0L) { byte b = (byte) ((value & 0x7f) | 0x80); buffer.put(b); value >>>= 7; } buffer.put((byte) value); return buffer; }; // compare the full range of values final ByteBuffer actual = ByteBuffer.allocate(MAX_LENGTH_VARLONG); for (long i = 1; i < Long.MAX_VALUE && i >= 0; i = i << 1) { ByteUtils.writeUnsignedVarlong(i, actual); final ByteBuffer expected = simpleImplementation.apply(i); assertArrayEquals(expected.array(), actual.array(), "Implementations do not match for number=" + i); actual.clear(); } }
public static Guess performGuess(List<Date> releaseDates) { if (releaseDates.size() <= 1) { return new Guess(Schedule.UNKNOWN, null, null); } else if (releaseDates.size() > MAX_DATA_POINTS) { releaseDates = releaseDates.subList(releaseDates.size() - MAX_DATA_POINTS, releaseDates.size()); } Stats stats = getStats(releaseDates); final int maxTotalWrongDays = Math.max(1, releaseDates.size() / 5); final int maxSingleDayOff = releaseDates.size() / 10; GregorianCalendar last = new GregorianCalendar(); last.setTime(releaseDates.get(releaseDates.size() - 1)); last.set(Calendar.HOUR_OF_DAY, (int) stats.medianHour); last.set(Calendar.MINUTE, (int) ((stats.medianHour - Math.floor(stats.medianHour)) * 60)); last.set(Calendar.SECOND, 0); last.set(Calendar.MILLISECOND, 0); if (Math.abs(stats.medianDistance - ONE_DAY) < 2 * ONE_HOUR && stats.avgDeltaToMedianDistance < 2 * ONE_HOUR) { addTime(last, ONE_DAY); return new Guess(Schedule.DAILY, Arrays.asList(Calendar.MONDAY, Calendar.TUESDAY, Calendar.WEDNESDAY, Calendar.THURSDAY, Calendar.FRIDAY, Calendar.SATURDAY, Calendar.SUNDAY), last.getTime()); } else if (Math.abs(stats.medianDistance - ONE_WEEK) < ONE_DAY && stats.avgDeltaToMedianDistance < 2 * ONE_DAY) { // Just using last.set(Calendar.DAY_OF_WEEK) could skip a week // when the last release is delayed over week boundaries addTime(last, 3 * ONE_DAY); do { addTime(last, ONE_DAY); } while (last.get(Calendar.DAY_OF_WEEK) != stats.mostOftenDayOfWeek); return new Guess(Schedule.WEEKLY, List.of(stats.mostOftenDayOfWeek), last.getTime()); } else if (Math.abs(stats.medianDistance - 2 * ONE_WEEK) < ONE_DAY && stats.avgDeltaToMedianDistance < 2 * ONE_DAY) { // Just using last.set(Calendar.DAY_OF_WEEK) could skip a week // when the last release is delayed over week boundaries addTime(last, 10 * ONE_DAY); do { addTime(last, ONE_DAY); } while (last.get(Calendar.DAY_OF_WEEK) != stats.mostOftenDayOfWeek); return new Guess(Schedule.BIWEEKLY, List.of(stats.mostOftenDayOfWeek), last.getTime()); } else if (Math.abs(stats.medianDistance - ONE_MONTH) < 5 * ONE_DAY && stats.avgDeltaToMedianDistance < 5 * ONE_DAY) { if (stats.daysOfMonth[stats.mostOftenDayOfMonth] >= releaseDates.size() - maxTotalWrongDays) { // Just using last.set(Calendar.DAY_OF_MONTH) could skip a week // when the last release is delayed over week boundaries addTime(last, 2 * ONE_WEEK); do { addTime(last, ONE_DAY); } while (last.get(Calendar.DAY_OF_MONTH) != stats.mostOftenDayOfMonth); return new Guess(Schedule.MONTHLY, null, last.getTime()); } addTime(last, 3 * ONE_WEEK + 3 * ONE_DAY); do { addTime(last, ONE_DAY); } while (last.get(Calendar.DAY_OF_WEEK) != stats.mostOftenDayOfWeek); return new Guess(Schedule.FOURWEEKLY, List.of(stats.mostOftenDayOfWeek), last.getTime()); } // Find release days List<Integer> largeDays = new ArrayList<>(); for (int i = Calendar.SUNDAY; i <= Calendar.SATURDAY; i++) { if (stats.daysOfWeek[i] > maxSingleDayOff) { largeDays.add(i); } } // Ensure that all release days are used similarly often int averageDays = releaseDates.size() / largeDays.size(); boolean matchesAverageDays = true; for (int day : largeDays) { if (stats.daysOfWeek[day] < averageDays - maxSingleDayOff) { matchesAverageDays = false; break; } } if (matchesAverageDays && stats.medianDistance < ONE_WEEK) { // Fixed daily release schedule (eg Mo, Thu, Fri) addUntil(last, largeDays); if (largeDays.size() == 5 && largeDays.containsAll(Arrays.asList( Calendar.MONDAY, Calendar.TUESDAY, Calendar.WEDNESDAY, Calendar.THURSDAY, Calendar.FRIDAY))) { return new Guess(Schedule.WEEKDAYS, largeDays, last.getTime()); } return new Guess(Schedule.SPECIFIC_DAYS, largeDays, last.getTime()); } else if (largeDays.size() == 1) { // Probably still weekly with more exceptions than others addUntil(last, largeDays); return new Guess(Schedule.WEEKLY, largeDays, last.getTime()); } addTime(last, (long) (0.6f * stats.medianDistance)); return new Guess(Schedule.UNKNOWN, null, last.getTime()); }
@Test public void testWeekdays() { ArrayList<Date> releaseDates = new ArrayList<>(); releaseDates.add(makeDate("2024-01-01 16:30")); // Monday releaseDates.add(makeDate("2024-01-02 16:25")); releaseDates.add(makeDate("2024-01-03 16:35")); releaseDates.add(makeDate("2024-01-04 16:40")); releaseDates.add(makeDate("2024-01-05 16:20")); // Friday releaseDates.add(makeDate("2024-01-08 16:20")); // Monday releaseDates.add(makeDate("2024-01-09 16:30")); releaseDates.add(makeDate("2024-01-10 16:40")); releaseDates.add(makeDate("2024-01-11 16:45")); // Thursday // Next day ReleaseScheduleGuesser.Guess guess = performGuess(releaseDates); assertEquals(ReleaseScheduleGuesser.Schedule.WEEKDAYS, guess.schedule); assertClose(makeDate("2024-01-12 16:30"), guess.nextExpectedDate, ONE_HOUR); // After weekend releaseDates.add(makeDate("2024-01-12 16:30")); // Friday guess = performGuess(releaseDates); assertClose(makeDate("2024-01-15 16:30"), guess.nextExpectedDate, ONE_HOUR); }
@Override public void delete(final Map<Path, TransferStatus> files, final PasswordCallback prompt, final Callback callback) throws BackgroundException { try { final EueApiClient client = new EueApiClient(session); // Move to trash first as precondition of delete this.delete(super.trash(files, prompt, callback)); for(Path f : files.keySet()) { fileid.cache(f, null); } } catch(ApiException e) { for(Path f : files.keySet()) { throw new EueExceptionMappingService().map("Cannot delete {0}", e, f); } } }
@Test(expected = NotfoundException.class) public void testNotfoundMultipleFiles() throws Exception { final EueResourceIdProvider fileid = new EueResourceIdProvider(session); new EueDeleteFeature(session, fileid).delete(Arrays.asList( new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)) ), new DisabledLoginCallback(), new Delete.DisabledCallback()); fail(); }
private static String approximateSimpleName(Class<?> clazz, boolean dropOuterClassNames) { checkArgument(!clazz.isAnonymousClass(), "Attempted to get simple name of anonymous class"); return approximateSimpleName(clazz.getName(), dropOuterClassNames); }
@Test public void testAnonSimpleName() throws Exception { assertEquals("Anonymous", NameUtils.approximateSimpleName(new EmbeddedDoFn() {})); }
@Deprecated @Override public V remove(final Object key) { throw new UnsupportedOperationException("Removing from registeredStores is not allowed"); }
@SuppressWarnings("deprecation") @Test public void shouldForbidRemove() { final FixedOrderMap<String, Integer> map = new FixedOrderMap<>(); map.put("a", 0); assertThrows(UnsupportedOperationException.class, () -> map.remove("a")); assertEquals(0, map.get("a")); }
public static BigDecimal cast(final Integer value, final int precision, final int scale) { if (value == null) { return null; } return cast(value.longValue(), precision, scale); }
@Test public void shouldCastDecimalRoundingDown() { // When: final BigDecimal decimal = DecimalUtil.cast(new BigDecimal("1.12"), 2, 1); // Then: assertThat(decimal, is(new BigDecimal("1.1"))); }
@Subscribe public void onScriptCallbackEvent(ScriptCallbackEvent event) { String eventName = event.getEventName(); int[] intStack = client.getIntStack(); String[] stringStack = client.getStringStack(); int intStackSize = client.getIntStackSize(); int stringStackSize = client.getStringStackSize(); switch (eventName) { case "setSearchBankInputText": stringStack[stringStackSize - 1] = SEARCH_BANK_INPUT_TEXT; break; case "setSearchBankInputTextFound": { int matches = intStack[intStackSize - 1]; stringStack[stringStackSize - 1] = String.format(SEARCH_BANK_INPUT_TEXT_FOUND, matches); break; } case "bankSearchFilter": final int itemId = intStack[intStackSize - 1]; String searchfilter = stringStack[stringStackSize - 1]; BankTag tag = activeTag; boolean tagSearch = true; // Shared storage uses ~bankmain_filteritem too. Allow using tag searches in it but don't // apply the tag search from the active tab. final boolean bankOpen = client.getItemContainer(InventoryID.BANK) != null; if (tag == null || !bankOpen) { if (searchfilter.isEmpty()) { return; } tagSearch = searchfilter.startsWith(TAG_SEARCH); if (tagSearch) { searchfilter = searchfilter.substring(TAG_SEARCH.length()).trim(); } // Build a temporary BankTag using the search filter tag = buildSearchFilterBankTag(searchfilter); } if (itemId == -1 && tag.layout() != null) { // item -1 always passes on a laid out tab so items can be dragged to it return; } if (itemId > -1 && tag.contains(itemId)) { // return true intStack[intStackSize - 2] = 1; } else if (tagSearch) { // if the item isn't tagged we return false to prevent the item matching if the item name happens // to contain the tag name. intStack[intStackSize - 2] = 0; } break; case "getSearchingTagTab": intStack[intStackSize - 1] = activeTag != null ? 1 : 0; break; case "bankBuildTab": // Use the per-tab view when we want to hide the separators to avoid having to reposition items & // recomputing the scroll height. if (activeTag != null && (tabInterface.isTagTabActive() || config.removeSeparators() || activeTag.layout() != null)) { var stack = client.getIntStack(); var sz = client.getIntStackSize(); stack[sz - 1] = 1; // use single tab view mode } break; } }
@Test public void testFallThrough() { when(client.getIntStack()).thenReturn(new int[]{1, ABYSSAL_WHIP}); when(client.getStringStack()).thenReturn(new String[]{"whip"}); when(configManager.getConfiguration(BankTagsPlugin.CONFIG_GROUP, TagManager.ITEM_KEY_PREFIX + ABYSSAL_WHIP)).thenReturn("herb,bossing"); assertFalse(tagManager.findTag(ABYSSAL_WHIP, "whip")); bankTagsPlugin.onScriptCallbackEvent(EVENT); assertEquals(1, client.getIntStack()[0]); }
@Override public void analyzeDependency(Dependency dependency, Engine engine) throws AnalysisException { final List<ClassNameInformation> classNames = collectClassNames(dependency); final String fileName = dependency.getFileName().toLowerCase(); if ((classNames.isEmpty() && (fileName.endsWith("-sources.jar") || fileName.endsWith("-javadoc.jar") || fileName.endsWith("-src.jar") || fileName.endsWith("-doc.jar") || isMacOSMetaDataFile(dependency, engine))) || !isZipFile(dependency)) { engine.removeDependency(dependency); return; } Exception exception = null; boolean hasManifest = false; try { hasManifest = parseManifest(dependency, classNames); } catch (IOException ex) { LOGGER.debug("Invalid Manifest", ex); exception = ex; } boolean hasPOM = false; try { hasPOM = analyzePOM(dependency, classNames, engine); } catch (AnalysisException ex) { LOGGER.debug("Error parsing pom.xml", ex); exception = ex; } final boolean addPackagesAsEvidence = !(hasManifest && hasPOM); analyzePackageNames(classNames, dependency, addPackagesAsEvidence); dependency.setEcosystem(DEPENDENCY_ECOSYSTEM); if (exception != null) { throw new AnalysisException(String.format("An error occurred extracting evidence from " + "%s, analysis may be incomplete; please see the log for more details.", dependency.getDisplayFileName()), exception); } }
@Test public void testAnalyzeDependency_SkipsMacOSMetaDataFile() throws Exception { JarAnalyzer instance = new JarAnalyzer(); Dependency macOSMetaDataFile = new Dependency(); macOSMetaDataFile .setActualFilePath(Paths.get("src", "test", "resources", "._avro-ipc-1.5.0.jar").toFile().getAbsolutePath()); macOSMetaDataFile.setFileName("._avro-ipc-1.5.0.jar"); Dependency actualJarFile = new Dependency(); actualJarFile.setActualFilePath(BaseTest.getResourceAsFile(this, "avro-ipc-1.5.0.jar").getAbsolutePath()); actualJarFile.setFileName("avro-ipc-1.5.0.jar"); try (Engine engine = new Engine(getSettings())) { engine.setDependencies(Arrays.asList(macOSMetaDataFile, actualJarFile)); instance.analyzeDependency(macOSMetaDataFile, engine); assertEquals(1, engine.getDependencies().length); } }
public FEELFnResult<BigDecimal> invoke(@ParameterName( "n" ) BigDecimal n) { return invoke(n, BigDecimal.ZERO); }
@Test void invokeRoundingOdd() { FunctionTestUtil.assertResult(roundDownFunction.invoke(BigDecimal.valueOf(10.35)), BigDecimal.valueOf(10)); FunctionTestUtil.assertResult(roundDownFunction.invoke(BigDecimal.valueOf(10.35), BigDecimal.ONE), BigDecimal.valueOf(10.3)); }
@Override public <S extends StateStore> S getStateStore(final String name) { throw new UnsupportedOperationException("StateStores can't access getStateStore."); }
@Test public void shouldThrowOnGetStateStore() { assertThrows(UnsupportedOperationException.class, () -> context.getStateStore("store")); }
public abstract Reader createReader() throws FileNotFoundException;
@Test public void testReadingFromInputStream() throws Exception { String data = IOUtils.readAll(getApplicationFile(Path.fromString("files/foo.json")).createReader()); assertTrue(data.contains("foo : foo")); }
public static <T> T invoke(Object obj, Method method, Object... args) { return invoke(false, obj, method, args); }
@Test public void invokeTest(){ // 测试执行普通方法 final int size = MethodHandleUtil.invokeSpecial(new BigDuck(), ReflectUtil.getMethod(BigDuck.class, "getSize")); assertEquals(36, size); }
public Set<String> excludedTopics(ClusterModel clusterModel, Pattern requestedExcludedTopics) { Pattern topicsToExclude = requestedExcludedTopics != null ? requestedExcludedTopics : _defaultExcludedTopics; return Utils.getTopicNamesMatchedWithPattern(topicsToExclude, clusterModel::topics); }
@Test public void testGetNoExcludedTopics() { GoalOptimizer goalOptimizer = createGoalOptimizer(); ClusterModel clusterModel = EasyMock.mock(ClusterModel.class); EasyMock.expect(clusterModel.topics()).andThrow(new AssertionFailedError("Not expect this method gets called")).anyTimes(); EasyMock.replay(clusterModel); Set<String> excludedTopics = goalOptimizer.excludedTopics(clusterModel, null); Assert.assertTrue(excludedTopics.isEmpty()); EasyMock.verify(clusterModel); EasyMock.reset(clusterModel); EasyMock.replay(clusterModel); Pattern matchNothingPattern = Pattern.compile(""); excludedTopics = goalOptimizer.excludedTopics(clusterModel, matchNothingPattern); Assert.assertTrue(excludedTopics.isEmpty()); EasyMock.verify(clusterModel); }
@Override public int aggrConfigInfoCount(String dataId, String group, String tenant) { String tenantTmp = StringUtils.isBlank(tenant) ? StringUtils.EMPTY : tenant; ConfigInfoAggrMapper configInfoAggrMapper = mapperManager.findMapper(dataSourceService.getDataSourceType(), TableConstant.CONFIG_INFO_AGGR); String sql = configInfoAggrMapper.count(Arrays.asList("data_id", "group_id", "tenant_id")); Integer result = jt.queryForObject(sql, Integer.class, new Object[] {dataId, group, tenantTmp}); return result.intValue(); }
@Test void testAggrConfigInfoCount() { String dataId = "dataId11122"; String group = "group"; String tenant = "tenant"; //mock select count of aggr. when(jdbcTemplate.queryForObject(anyString(), eq(Integer.class), eq(dataId), eq(group), eq(tenant))).thenReturn(new Integer(101)); int result = externalConfigInfoAggrPersistService.aggrConfigInfoCount(dataId, group, tenant); assertEquals(101, result); }
public static Exception lookupExceptionInCause(Throwable source, Class<? extends Exception>... clazzes) { while (source != null) { for (Class<? extends Exception> clazz : clazzes) { if (clazz.isAssignableFrom(source.getClass())) { return (Exception) source; } } source = source.getCause(); } return null; }
@Test void givenCause_whenLookupExceptionInCauseByMany_thenReturnFirstCause() { final Exception causeIAE = new IllegalAccessException(); assertThat(ExceptionUtil.lookupExceptionInCause(new Exception(causeIAE))).isNull(); assertThat(ExceptionUtil.lookupExceptionInCause(new Exception(causeIAE), IOException.class, NoSuchFieldException.class)).isNull(); assertThat(ExceptionUtil.lookupExceptionInCause(new Exception(causeIAE), IllegalAccessException.class, IOException.class, NoSuchFieldException.class)).isSameAs(causeIAE); assertThat(ExceptionUtil.lookupExceptionInCause(new Exception(causeIAE), IOException.class, NoSuchFieldException.class, IllegalAccessException.class)).isSameAs(causeIAE); final Exception causeIOE = new IOException(causeIAE); assertThat(ExceptionUtil.lookupExceptionInCause(new Exception(causeIOE))).isNull(); assertThat(ExceptionUtil.lookupExceptionInCause(new Exception(causeIAE), ClassNotFoundException.class, NoSuchFieldException.class)).isNull(); assertThat(ExceptionUtil.lookupExceptionInCause(new Exception(causeIOE), IOException.class, NoSuchFieldException.class)).isSameAs(causeIOE); assertThat(ExceptionUtil.lookupExceptionInCause(new Exception(causeIOE), IllegalAccessException.class, IOException.class, NoSuchFieldException.class)).isSameAs(causeIOE); assertThat(ExceptionUtil.lookupExceptionInCause(new Exception(causeIOE), IOException.class, NoSuchFieldException.class, IllegalAccessException.class)).isSameAs(causeIOE); }
public static Function<List<String>, List<String>> reorderFieldsFunction(List<String> fields, List<SortSpec> sorts) { if (!needsReorderingFields(fields, sorts)) { return Function.identity(); } final List<String> orderedBuckets = orderFields(fields, sorts); final Map<Integer, Integer> mapping = IntStream.range(0, fields.size()) .boxed() .collect(Collectors.toMap(Function.identity(), i -> orderedBuckets.indexOf(fields.get(i)))); return (keys) -> IntStream.range(0, fields.size()) .boxed() .map(i -> keys.get(mapping.get(i))) .collect(Collectors.toList()); }
@Test void reordersKeysBasedOnSortConfiguration() { final Function<List<String>, List<String>> reorderKeys = ValuesBucketOrdering.reorderFieldsFunction(List.of("foo", "bar", "baz"), List.of( PivotSort.create("baz", SortSpec.Direction.Descending), PivotSort.create("bar", SortSpec.Direction.Ascending) )); assertThat(reorderKeys.apply(List.of("baz", "bar", "foo"))).containsExactly("foo", "bar", "baz"); }
@Override public <KEY> URIMappingResult<KEY> mapUris(List<URIKeyPair<KEY>> requestUriKeyPairs) throws ServiceUnavailableException { if (requestUriKeyPairs == null || requestUriKeyPairs.isEmpty()) { return new URIMappingResult<>(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); } // API assumes that all requests will be made to the same service, just use the first request to get the service name and act as sample uri URI sampleURI = requestUriKeyPairs.get(0).getRequestUri(); String serviceName = LoadBalancerUtil.getServiceNameFromUri(sampleURI); // To achieve scatter-gather, we require the following information PartitionAccessor accessor = _partitionInfoProvider.getPartitionAccessor(serviceName); Map<Integer, Ring<URI>> rings = _hashRingProvider.getRings(sampleURI); HashFunction<Request> hashFunction = _hashRingProvider.getRequestHashFunction(serviceName); Map<Integer, Set<KEY>> unmapped = new HashMap<>(); // Pass One Map<Integer, List<URIKeyPair<KEY>>> requestsByPartition = distributeToPartitions(requestUriKeyPairs, accessor, unmapped); // Pass Two Map<URI, Integer> hostToParitionId = new HashMap<>(); Map<URI, Set<KEY>> hostToKeySet = distributeToHosts(requestsByPartition, rings, hashFunction, hostToParitionId, unmapped); return new URIMappingResult<>(hostToKeySet, unmapped, hostToParitionId); }
@Test public void testStickyAndPartitioning() throws ServiceUnavailableException { int partitionCount = 10; int requestPerPartition = 100; int totalHostCount = 100; HashRingProvider ringProvider = createStaticHashRingProvider(totalHostCount, partitionCount, getHashFunction(true)); PartitionInfoProvider infoProvider = createRangeBasedPartitionInfoProvider(partitionCount); URIMapper mapper = new RingBasedUriMapper(ringProvider, infoProvider); List<URIKeyPair<Integer>> requests = testUtil.generateRequests(partitionCount, requestPerPartition); URIMappingResult<Integer> results = mapper.mapUris(requests); Map<URI, Set<Integer>> mapping = results.getMappedKeys(); Map<Integer, Set<Integer>> unmappedKeys = results.getUnmappedKeys(); Map<URI, Integer> hostToPartition = results.getHostPartitionInfo(); Assert.assertTrue(unmappedKeys.isEmpty()); Assert.assertEquals(100, mapping.size()); Assert.assertEquals(100, hostToPartition.size()); }
public Object toIdObject(String baseId) throws AmqpProtocolException { if (baseId == null) { return null; } try { if (hasAmqpUuidPrefix(baseId)) { String uuidString = strip(baseId, AMQP_UUID_PREFIX_LENGTH); return UUID.fromString(uuidString); } else if (hasAmqpUlongPrefix(baseId)) { String longString = strip(baseId, AMQP_ULONG_PREFIX_LENGTH); return UnsignedLong.valueOf(longString); } else if (hasAmqpStringPrefix(baseId)) { return strip(baseId, AMQP_STRING_PREFIX_LENGTH); } else if (hasAmqpBinaryPrefix(baseId)) { String hexString = strip(baseId, AMQP_BINARY_PREFIX_LENGTH); byte[] bytes = convertHexStringToBinary(hexString); return new Binary(bytes); } else { // We have a string without any type prefix, transmit it as-is. return baseId; } } catch (IllegalArgumentException e) { throw new AmqpProtocolException("Unable to convert ID value"); } }
@Test public void testToIdObjectWithStringContainingStringEncodingPrefixAndThenUuidPrefix() throws Exception { String encodedUuidString = AMQPMessageIdHelper.AMQP_UUID_PREFIX + UUID.randomUUID().toString(); String stringId = AMQPMessageIdHelper.AMQP_STRING_PREFIX + encodedUuidString; Object idObject = messageIdHelper.toIdObject(stringId); assertNotNull("null object should not have been returned", idObject); assertEquals("expected id object was not returned", encodedUuidString, idObject); }
@Override public JSONObject toJson() { JSONObject obj = super.toJson(); try { obj.put("counter", getCounter()); } catch (JSONException e) { throw new RuntimeException(e); } return obj; }
@Test public void testHotpMd5Override() throws OtpInfoException { final byte[] secret = new byte[]{1, 2, 3, 4}; MotpInfo motpInfo = new MotpInfo(secret, "1234"); motpInfo = (MotpInfo) OtpInfo.fromJson("motp", motpInfo.toJson()); assertEquals("MD5", motpInfo.getAlgorithm(false)); HotpInfo info = new HotpInfo(secret); info.setAlgorithm("MD5"); info = (HotpInfo) OtpInfo.fromJson("hotp", info.toJson()); assertEquals(OtpInfo.DEFAULT_ALGORITHM, info.getAlgorithm(false)); info.setAlgorithm("SHA256"); info = (HotpInfo) OtpInfo.fromJson("hotp", info.toJson()); assertEquals("SHA256", info.getAlgorithm(false)); }
@Override protected void processOptions(LinkedList<String> args) { CommandFormat cf = new CommandFormat(1, Integer.MAX_VALUE, OPTION_QUOTA, OPTION_HUMAN, OPTION_HEADER, OPTION_QUOTA_AND_USAGE, OPTION_EXCLUDE_SNAPSHOT, OPTION_ECPOLICY, OPTION_SNAPSHOT_COUNT); cf.addOptionWithValue(OPTION_TYPE); cf.parse(args); if (args.isEmpty()) { // default path is the current working directory args.add("."); } showQuotas = cf.getOpt(OPTION_QUOTA); humanReadable = cf.getOpt(OPTION_HUMAN); showQuotasAndUsageOnly = cf.getOpt(OPTION_QUOTA_AND_USAGE); excludeSnapshots = cf.getOpt(OPTION_EXCLUDE_SNAPSHOT); displayECPolicy = cf.getOpt(OPTION_ECPOLICY); showSnapshot = cf.getOpt(OPTION_SNAPSHOT_COUNT); if (showQuotas || showQuotasAndUsageOnly) { String types = cf.getOptValue(OPTION_TYPE); if (null != types) { showQuotabyType = true; storageTypes = getAndCheckStorageTypes(types); } else { showQuotabyType = false; } if (excludeSnapshots) { out.println(OPTION_QUOTA + " or " + OPTION_QUOTA_AND_USAGE + " option " + "is given, the -x option is ignored."); excludeSnapshots = false; } } if (cf.getOpt(OPTION_HEADER)) { StringBuilder headString = new StringBuilder(); if (showQuotabyType) { headString.append(QuotaUsage.getStorageTypeHeader(storageTypes)); } else { if (showQuotasAndUsageOnly) { headString.append(QuotaUsage.getHeader()); } else { headString.append(ContentSummary.getHeader(showQuotas)); } } if (displayECPolicy) { headString.append(ContentSummary.getErasureCodingPolicyHeader()); } if (showSnapshot) { headString.append(ContentSummary.getSnapshotHeader()); } headString.append("PATHNAME"); out.println(headString.toString()); } }
@Test public void processPathWithQuotasByMultipleStorageTypes() throws Exception { Path path = new Path("mockfs:/test"); when(mockFs.getFileStatus(eq(path))).thenReturn(fileStat); PrintStream out = mock(PrintStream.class); Count count = new Count(); count.out = out; LinkedList<String> options = new LinkedList<String>(); options.add("-q"); options.add("-v"); options.add("-t"); options.add("SSD,DISK"); options.add("dummy"); count.processOptions(options); String withStorageTypeHeader = // <----14----> <------18--------> " SSD_QUOTA REM_SSD_QUOTA " + " DISK_QUOTA REM_DISK_QUOTA " + "PATHNAME"; verify(out).println(withStorageTypeHeader); verifyNoMoreInteractions(out); }
public void decode(ByteBuf buffer) { boolean last; int statusCode; while (true) { switch(state) { case READ_COMMON_HEADER: if (buffer.readableBytes() < SPDY_HEADER_SIZE) { return; } int frameOffset = buffer.readerIndex(); int flagsOffset = frameOffset + SPDY_HEADER_FLAGS_OFFSET; int lengthOffset = frameOffset + SPDY_HEADER_LENGTH_OFFSET; buffer.skipBytes(SPDY_HEADER_SIZE); boolean control = (buffer.getByte(frameOffset) & 0x80) != 0; int version; int type; if (control) { // Decode control frame common header version = getUnsignedShort(buffer, frameOffset) & 0x7FFF; type = getUnsignedShort(buffer, frameOffset + SPDY_HEADER_TYPE_OFFSET); streamId = 0; // Default to session Stream-ID } else { // Decode data frame common header version = spdyVersion; // Default to expected version type = SPDY_DATA_FRAME; streamId = getUnsignedInt(buffer, frameOffset); } flags = buffer.getByte(flagsOffset); length = getUnsignedMedium(buffer, lengthOffset); // Check version first then validity if (version != spdyVersion) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SPDY Version"); } else if (!isValidFrameHeader(streamId, type, flags, length)) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid Frame Error"); } else { state = getNextState(type, length); } break; case READ_DATA_FRAME: if (length == 0) { state = State.READ_COMMON_HEADER; delegate.readDataFrame(streamId, hasFlag(flags, SPDY_DATA_FLAG_FIN), Unpooled.buffer(0)); break; } // Generate data frames that do not exceed maxChunkSize int dataLength = Math.min(maxChunkSize, length); // Wait until entire frame is readable if (buffer.readableBytes() < dataLength) { return; } ByteBuf data = buffer.alloc().buffer(dataLength); data.writeBytes(buffer, dataLength); length -= dataLength; if (length == 0) { state = State.READ_COMMON_HEADER; } last = length == 0 && hasFlag(flags, SPDY_DATA_FLAG_FIN); delegate.readDataFrame(streamId, last, data); break; case READ_SYN_STREAM_FRAME: if (buffer.readableBytes() < 10) { return; } int offset = buffer.readerIndex(); streamId = getUnsignedInt(buffer, offset); int associatedToStreamId = getUnsignedInt(buffer, offset + 4); byte priority = (byte) (buffer.getByte(offset + 8) >> 5 & 0x07); last = hasFlag(flags, SPDY_FLAG_FIN); boolean unidirectional = hasFlag(flags, SPDY_FLAG_UNIDIRECTIONAL); buffer.skipBytes(10); length -= 10; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SYN_STREAM Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readSynStreamFrame(streamId, associatedToStreamId, priority, last, unidirectional); } break; case READ_SYN_REPLY_FRAME: if (buffer.readableBytes() < 4) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); last = hasFlag(flags, SPDY_FLAG_FIN); buffer.skipBytes(4); length -= 4; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SYN_REPLY Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readSynReplyFrame(streamId, last); } break; case READ_RST_STREAM_FRAME: if (buffer.readableBytes() < 8) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); statusCode = getSignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); if (streamId == 0 || statusCode == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid RST_STREAM Frame"); } else { state = State.READ_COMMON_HEADER; delegate.readRstStreamFrame(streamId, statusCode); } break; case READ_SETTINGS_FRAME: if (buffer.readableBytes() < 4) { return; } boolean clear = hasFlag(flags, SPDY_SETTINGS_CLEAR); numSettings = getUnsignedInt(buffer, buffer.readerIndex()); buffer.skipBytes(4); length -= 4; // Validate frame length against number of entries. Each ID/Value entry is 8 bytes. if ((length & 0x07) != 0 || length >> 3 != numSettings) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SETTINGS Frame"); } else { state = State.READ_SETTING; delegate.readSettingsFrame(clear); } break; case READ_SETTING: if (numSettings == 0) { state = State.READ_COMMON_HEADER; delegate.readSettingsEnd(); break; } if (buffer.readableBytes() < 8) { return; } byte settingsFlags = buffer.getByte(buffer.readerIndex()); int id = getUnsignedMedium(buffer, buffer.readerIndex() + 1); int value = getSignedInt(buffer, buffer.readerIndex() + 4); boolean persistValue = hasFlag(settingsFlags, SPDY_SETTINGS_PERSIST_VALUE); boolean persisted = hasFlag(settingsFlags, SPDY_SETTINGS_PERSISTED); buffer.skipBytes(8); --numSettings; delegate.readSetting(id, value, persistValue, persisted); break; case READ_PING_FRAME: if (buffer.readableBytes() < 4) { return; } int pingId = getSignedInt(buffer, buffer.readerIndex()); buffer.skipBytes(4); state = State.READ_COMMON_HEADER; delegate.readPingFrame(pingId); break; case READ_GOAWAY_FRAME: if (buffer.readableBytes() < 8) { return; } int lastGoodStreamId = getUnsignedInt(buffer, buffer.readerIndex()); statusCode = getSignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); state = State.READ_COMMON_HEADER; delegate.readGoAwayFrame(lastGoodStreamId, statusCode); break; case READ_HEADERS_FRAME: if (buffer.readableBytes() < 4) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); last = hasFlag(flags, SPDY_FLAG_FIN); buffer.skipBytes(4); length -= 4; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid HEADERS Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readHeadersFrame(streamId, last); } break; case READ_WINDOW_UPDATE_FRAME: if (buffer.readableBytes() < 8) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); int deltaWindowSize = getUnsignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); if (deltaWindowSize == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid WINDOW_UPDATE Frame"); } else { state = State.READ_COMMON_HEADER; delegate.readWindowUpdateFrame(streamId, deltaWindowSize); } break; case READ_HEADER_BLOCK: if (length == 0) { state = State.READ_COMMON_HEADER; delegate.readHeaderBlockEnd(); break; } if (!buffer.isReadable()) { return; } int compressedBytes = Math.min(buffer.readableBytes(), length); ByteBuf headerBlock = buffer.alloc().buffer(compressedBytes); headerBlock.writeBytes(buffer, compressedBytes); length -= compressedBytes; delegate.readHeaderBlock(headerBlock); break; case DISCARD_FRAME: int numBytes = Math.min(buffer.readableBytes(), length); buffer.skipBytes(numBytes); length -= numBytes; if (length == 0) { state = State.READ_COMMON_HEADER; break; } return; case FRAME_ERROR: buffer.skipBytes(buffer.readableBytes()); return; default: throw new Error("Shouldn't reach here."); } } }
@Test public void testSpdyHeadersFrame() throws Exception { short type = 8; byte flags = 0; int length = 4; int streamId = RANDOM.nextInt() & 0x7FFFFFFF | 0x01; ByteBuf buf = Unpooled.buffer(SPDY_HEADER_SIZE + length); encodeControlFrameHeader(buf, type, flags, length); buf.writeInt(streamId); decoder.decode(buf); verify(delegate).readHeadersFrame(streamId, false); verify(delegate).readHeaderBlockEnd(); assertFalse(buf.isReadable()); buf.release(); }
public void applyConfig(ClientBwListDTO configDTO) { requireNonNull(configDTO, "Client filtering config must not be null"); requireNonNull(configDTO.mode, "Config mode must not be null"); requireNonNull(configDTO.entries, "Config entries must not be null"); ClientSelector selector; switch (configDTO.mode) { case DISABLED: selector = ClientSelectors.any(); break; case WHITELIST: selector = createSelector(configDTO.entries); break; case BLACKLIST: selector = ClientSelectors.inverse(createSelector(configDTO.entries)); break; default: throw new IllegalArgumentException("Unknown client B/W list mode: " + configDTO.mode); } clientEngine.applySelector(selector); }
@Test public void testApplyConfig_nullEntryType_throws() { ClientBwListDTO config = createConfig(Mode.WHITELIST, new ClientBwListEntryDTO(null, "127.0.0.*")); assertThrows(NullPointerException.class, () -> handler.applyConfig(config)); }
static Builder newBuilder() { return new AutoValue_HttpEventPublisher.Builder(); }
@Test public void invalidRootCaTest() { CertificateException thrown = assertThrows( CertificateException.class, () -> { HttpEventPublisher.newBuilder() .withUrl("https://example.com") .withToken("test-token") .withDisableCertificateValidation(false) .withRootCaCertificate("invalid_ca".getBytes(StandardCharsets.UTF_8)) .withEnableGzipHttpCompression(true) .build(); }); assertThat(thrown.getMessage(), containsString("parse certificate")); }
@SuppressWarnings({"CyclomaticComplexity"}) @Override public void process(ApplicationEvent event) { switch (event.type()) { case COMMIT_ASYNC: process((AsyncCommitEvent) event); return; case COMMIT_SYNC: process((SyncCommitEvent) event); return; case POLL: process((PollEvent) event); return; case FETCH_COMMITTED_OFFSETS: process((FetchCommittedOffsetsEvent) event); return; case NEW_TOPICS_METADATA_UPDATE: process((NewTopicsMetadataUpdateRequestEvent) event); return; case ASSIGNMENT_CHANGE: process((AssignmentChangeEvent) event); return; case TOPIC_METADATA: process((TopicMetadataEvent) event); return; case ALL_TOPICS_METADATA: process((AllTopicsMetadataEvent) event); return; case LIST_OFFSETS: process((ListOffsetsEvent) event); return; case RESET_POSITIONS: process((ResetPositionsEvent) event); return; case VALIDATE_POSITIONS: process((ValidatePositionsEvent) event); return; case SUBSCRIPTION_CHANGE: process((SubscriptionChangeEvent) event); return; case UNSUBSCRIBE: process((UnsubscribeEvent) event); return; case CONSUMER_REBALANCE_LISTENER_CALLBACK_COMPLETED: process((ConsumerRebalanceListenerCallbackCompletedEvent) event); return; case COMMIT_ON_CLOSE: process((CommitOnCloseEvent) event); return; case SHARE_FETCH: process((ShareFetchEvent) event); return; case SHARE_ACKNOWLEDGE_SYNC: process((ShareAcknowledgeSyncEvent) event); return; case SHARE_ACKNOWLEDGE_ASYNC: process((ShareAcknowledgeAsyncEvent) event); return; case SHARE_SUBSCRIPTION_CHANGE: process((ShareSubscriptionChangeEvent) event); return; case SHARE_UNSUBSCRIBE: process((ShareUnsubscribeEvent) event); return; case SHARE_ACKNOWLEDGE_ON_CLOSE: process((ShareAcknowledgeOnCloseEvent) event); return; default: log.warn("Application event type {} was not expected", event.type()); } }
@Test public void testProcessUnsubscribeEventWithGroupId() { setupProcessor(true); when(heartbeatRequestManager.membershipManager()).thenReturn(membershipManager); when(membershipManager.leaveGroup()).thenReturn(CompletableFuture.completedFuture(null)); processor.process(new UnsubscribeEvent(0)); verify(membershipManager).leaveGroup(); }
public Object eval(String expr, Map<String, Object> params) { try { Extension ext = extensionRepo == null ? null : extensionRepo.get(); SelType result = evaluator.evaluate(sanitize(expr), params, ext); switch (result.type()) { case STRING: case LONG: case DOUBLE: case BOOLEAN: return result.getInternalVal(); case STRING_ARRAY: case LONG_ARRAY: case DOUBLE_ARRAY: case BOOLEAN_ARRAY: case MAP: return result.unbox(); case ERROR: throw new MaestroInvalidExpressionException( "Expression throws an error [%s] for expr=[%s]", result, expr); default: throw new MaestroInvalidExpressionException( "Invalid return type [%s] for expr=[%s]", result.type(), expr); } } catch (MaestroRuntimeException me) { throw me; } catch (ExecutionException ee) { throw new MaestroInvalidExpressionException( ee, "Expression evaluation throws an exception for expr=[%s]", expr); } catch (Exception e) { throw new MaestroInternalError( e, "Expression evaluation is failed with an exception for expr=[%s]", expr); } }
@Test public void testMissingSemicolon() { assertEquals(11L, evaluator.eval("x + 1", Collections.singletonMap("x", 10))); }
@Override public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay way, IntsRef relationFlags) { String highway = way.getTag("highway"); String vehicle = way.getTag("vehicle", ""); boolean notIntended = !way.hasTag("bicycle", INTENDED) && (GET_OFF_BIKE.contains(highway) || way.hasTag("railway", "platform") || !"cycleway".equals(highway) && way.hasTag("vehicle", "no") || vehicle.contains("forestry") || vehicle.contains("agricultural") || "path".equals(highway) && way.hasTag("foot", "designated") && !way.hasTag("segregated", "yes")); if ("steps".equals(highway) || way.hasTag("bicycle", "dismount") || notIntended) { getOffBikeEnc.setBool(false, edgeId, edgeIntAccess, true); getOffBikeEnc.setBool(true, edgeId, edgeIntAccess, true); } boolean fwd = bikeAccessEnc.getBool(false, edgeId, edgeIntAccess); boolean bwd = bikeAccessEnc.getBool(true, edgeId, edgeIntAccess); // get off bike for reverse oneways if (fwd != bwd) { if (!fwd) getOffBikeEnc.setBool(false, edgeId, edgeIntAccess, true); if (!bwd) getOffBikeEnc.setBool(true, edgeId, edgeIntAccess, true); } }
@Test public void testOneway() { ReaderWay way = new ReaderWay(1); way.setTag("highway", "primary"); way.setTag("oneway", "yes"); EdgeIntAccess edgeIntAccess = new ArrayEdgeIntAccess(1); int edgeId = 0; IntsRef rel = new IntsRef(1); accessParser.handleWayTags(edgeId, edgeIntAccess, way, new IntsRef(1)); getOffParser.handleWayTags(edgeId, edgeIntAccess, way, new IntsRef(1)); assertFalse(offBikeEnc.getBool(false, edgeId, edgeIntAccess)); assertTrue(offBikeEnc.getBool(true, edgeId, edgeIntAccess)); }
public List<String> toList(boolean trim) { return toList((str) -> trim ? StrUtil.trim(str) : str); }
@Test public void splitByCharTest(){ String str1 = "a, ,,efedsfs, ddf,"; //不忽略"" SplitIter splitIter = new SplitIter(str1, new CharFinder(',', false), Integer.MAX_VALUE, false ); assertEquals(6, splitIter.toList(false).size()); }
void resolveSelectors(EngineDiscoveryRequest request, CucumberEngineDescriptor engineDescriptor) { Predicate<String> packageFilter = buildPackageFilter(request); resolve(request, engineDescriptor, packageFilter); filter(engineDescriptor, packageFilter); pruneTree(engineDescriptor); }
@Test void resolveRequestWithUniqueIdSelectorFromFileUri() { DiscoverySelector resource = selectDirectory("src/test/resources/io/cucumber/junit/platform/engine"); EngineDiscoveryRequest discoveryRequest = new SelectorRequest(resource); resolver.resolveSelectors(discoveryRequest, testDescriptor); Set<? extends TestDescriptor> descendants = testDescriptor.getDescendants(); descendants.forEach(targetDescriptor -> { resetTestDescriptor(); resolveRequestWithUniqueIdSelector(targetDescriptor.getUniqueId()); assertEquals(1, testDescriptor.getChildren().size()); assertThat(testDescriptor, allDescriptorsPrefixedBy(targetDescriptor.getUniqueId())); }); }
static String getRelativeFileInternal(File canonicalBaseFile, File canonicalFileToRelativize) { List<String> basePath = getPathComponents(canonicalBaseFile); List<String> pathToRelativize = getPathComponents(canonicalFileToRelativize); //if the roots aren't the same (i.e. different drives on a windows machine), we can't construct a relative //path from one to the other, so just return the canonical file if (!basePath.get(0).equals(pathToRelativize.get(0))) { return canonicalFileToRelativize.getPath(); } int commonDirs; StringBuilder sb = new StringBuilder(); for (commonDirs=1; commonDirs<basePath.size() && commonDirs<pathToRelativize.size(); commonDirs++) { if (!basePath.get(commonDirs).equals(pathToRelativize.get(commonDirs))) { break; } } boolean first = true; for (int i=commonDirs; i<basePath.size(); i++) { if (!first) { sb.append(File.separatorChar); } else { first = false; } sb.append(".."); } first = true; for (int i=commonDirs; i<pathToRelativize.size(); i++) { if (first) { if (sb.length() != 0) { sb.append(File.separatorChar); } first = false; } else { sb.append(File.separatorChar); } sb.append(pathToRelativize.get(i)); } if (sb.length() == 0) { return "."; } return sb.toString(); }
@Test public void pathUtilTest6() { File[] roots = File.listRoots(); File basePath = new File(roots[0] + "some" + File.separatorChar + "dir" + File.separatorChar); File relativePath = new File(roots[0] + "some" + File.separatorChar + "dir"); String path = PathUtil.getRelativeFileInternal(basePath, relativePath); Assert.assertEquals(path, "."); }
public static String cleanColumn(String column) { if (column == null) { return null; } if (column.contains("`")) { column = column.replaceAll("`", ""); } if (column.contains("'")) { column = column.replaceAll("'", ""); } if (column.contains("\"")) { column = column.replaceAll("\"", ""); } return column; }
@Test public void cleanColumnInputNullOutputNull() { // Arrange final String column = null; // Act final String actual = Util.cleanColumn(column); // Assert result Assert.assertNull(actual); }
@Override public void execute(Exchange exchange) throws SmppException { SubmitSm[] submitSms = createSubmitSm(exchange); List<String> messageIDs = new ArrayList<>(submitSms.length); String messageID = null; for (int i = 0; i < submitSms.length; i++) { SubmitSm submitSm = submitSms[i]; messageID = null; if (log.isDebugEnabled()) { log.debug("Sending short message {} for exchange id '{}'...", i, exchange.getExchangeId()); } try { SubmitSmResult result = session.submitShortMessage( submitSm.getServiceType(), TypeOfNumber.valueOf(submitSm.getSourceAddrTon()), NumberingPlanIndicator.valueOf(submitSm.getSourceAddrNpi()), submitSm.getSourceAddr(), TypeOfNumber.valueOf(submitSm.getDestAddrTon()), NumberingPlanIndicator.valueOf(submitSm.getDestAddrNpi()), submitSm.getDestAddress(), new ESMClass(submitSm.getEsmClass()), submitSm.getProtocolId(), submitSm.getPriorityFlag(), submitSm.getScheduleDeliveryTime(), submitSm.getValidityPeriod(), new RegisteredDelivery(submitSm.getRegisteredDelivery()), submitSm.getReplaceIfPresent(), DataCodings.newInstance(submitSm.getDataCoding()), (byte) 0, submitSm.getShortMessage(), submitSm.getOptionalParameters()); if (result != null) { messageID = result.getMessageId(); } } catch (Exception e) { throw new SmppException(e); } if (messageID != null) { messageIDs.add(messageID); } } if (log.isDebugEnabled()) { log.debug("Sent short message for exchange id '{}' and received message ids '{}'", exchange.getExchangeId(), messageIDs); } Message message = ExchangeHelper.getResultMessage(exchange); message.setHeader(SmppConstants.ID, messageIDs); message.setHeader(SmppConstants.SENT_MESSAGE_COUNT, messageIDs.size()); }
@Test public void executeWithOptionalParameterNewStyle() throws Exception { Exchange exchange = new DefaultExchange(new DefaultCamelContext(), ExchangePattern.InOut); exchange.getIn().setHeader(SmppConstants.COMMAND, "SubmitSm"); exchange.getIn().setHeader(SmppConstants.ID, "1"); exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR_TON, TypeOfNumber.NATIONAL.value()); exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR_NPI, NumberingPlanIndicator.NATIONAL.value()); exchange.getIn().setHeader(SmppConstants.SOURCE_ADDR, "1818"); exchange.getIn().setHeader(SmppConstants.DEST_ADDR_TON, TypeOfNumber.INTERNATIONAL.value()); exchange.getIn().setHeader(SmppConstants.DEST_ADDR_NPI, NumberingPlanIndicator.INTERNET.value()); exchange.getIn().setHeader(SmppConstants.DEST_ADDR, "1919"); exchange.getIn().setHeader(SmppConstants.SCHEDULE_DELIVERY_TIME, new Date(1111111)); exchange.getIn().setHeader(SmppConstants.VALIDITY_PERIOD, new Date(2222222)); exchange.getIn().setHeader(SmppConstants.PROTOCOL_ID, (byte) 1); exchange.getIn().setHeader(SmppConstants.PRIORITY_FLAG, (byte) 2); exchange.getIn().setHeader(SmppConstants.REGISTERED_DELIVERY, new RegisteredDelivery(SMSCDeliveryReceipt.FAILURE).value()); exchange.getIn().setHeader(SmppConstants.REPLACE_IF_PRESENT_FLAG, ReplaceIfPresentFlag.REPLACE.value()); exchange.getIn().setBody("short message body"); Map<Short, Object> optionalParameters = new LinkedHashMap<>(); // standard optional parameter optionalParameters.put((short) 0x0202, "1292".getBytes("UTF-8")); optionalParameters.put((short) 0x001D, "urgent"); optionalParameters.put((short) 0x0005, Byte.valueOf("4")); optionalParameters.put((short) 0x0008, (short) 2); optionalParameters.put((short) 0x0017, 3600000); optionalParameters.put((short) 0x130C, null); // vendor specific optional parameter optionalParameters.put((short) 0x2150, "0815".getBytes("UTF-8")); optionalParameters.put((short) 0x2151, "0816"); optionalParameters.put((short) 0x2152, Byte.valueOf("6")); optionalParameters.put((short) 0x2153, (short) 9); optionalParameters.put((short) 0x2154, 7400000); optionalParameters.put((short) 0x2155, null); exchange.getIn().setHeader(SmppConstants.OPTIONAL_PARAMETER, optionalParameters); when(session.submitShortMessage(eq("CMT"), eq(TypeOfNumber.NATIONAL), eq(NumberingPlanIndicator.NATIONAL), eq("1818"), eq(TypeOfNumber.INTERNATIONAL), eq(NumberingPlanIndicator.INTERNET), eq("1919"), eq(new ESMClass()), eq((byte) 1), eq((byte) 2), eq("-300101001831100+"), eq("-300101003702200+"), eq(new RegisteredDelivery(SMSCDeliveryReceipt.FAILURE)), eq(ReplaceIfPresentFlag.REPLACE.value()), eq(DataCodings.newInstance((byte) 0)), eq((byte) 0), eq("short message body".getBytes()), eq(new OptionalParameter.OctetString(Tag.SOURCE_SUBADDRESS, "1292")), eq(new OptionalParameter.COctetString(Tag.ADDITIONAL_STATUS_INFO_TEXT.code(), "urgent")), eq(new OptionalParameter.Byte(Tag.DEST_ADDR_SUBUNIT, (byte) 4)), eq(new OptionalParameter.Short(Tag.DEST_TELEMATICS_ID.code(), (short) 2)), eq(new OptionalParameter.Int(Tag.QOS_TIME_TO_LIVE, 3600000)), eq(new OptionalParameter.Null(Tag.ALERT_ON_MESSAGE_DELIVERY)), eq(new OptionalParameter.OctetString((short) 0x2150, "1292", "UTF-8")), eq(new OptionalParameter.COctetString((short) 0x2151, "0816")), eq(new OptionalParameter.Byte((short) 0x2152, (byte) 6)), eq(new OptionalParameter.Short((short) 0x2153, (short) 9)), eq(new OptionalParameter.Int((short) 0x2154, 7400000)), eq(new OptionalParameter.Null((short) 0x2155)))) .thenReturn(new SubmitSmResult(new MessageId("1"), null)); command.execute(exchange); assertEquals(Collections.singletonList("1"), exchange.getMessage().getHeader(SmppConstants.ID)); assertEquals(1, exchange.getMessage().getHeader(SmppConstants.SENT_MESSAGE_COUNT)); }
public static Deserializer<ICMP6> deserializer() { return (data, offset, length) -> { checkInput(data, offset, length, HEADER_LENGTH); ICMP6 icmp6 = new ICMP6(); ByteBuffer bb = ByteBuffer.wrap(data, offset, length); icmp6.icmpType = bb.get(); icmp6.icmpCode = bb.get(); icmp6.checksum = bb.getShort(); Deserializer<? extends IPacket> deserializer; if (ICMP6.TYPE_DESERIALIZER_MAP.containsKey(icmp6.icmpType)) { deserializer = TYPE_DESERIALIZER_MAP.get(icmp6.icmpType); } else { deserializer = Data.deserializer(); } icmp6.payload = deserializer.deserialize(data, bb.position(), bb.limit() - bb.position()); icmp6.payload.setParent(icmp6); return icmp6; }; }
@Test public void testDeserializeTruncated() throws Exception { PacketTestUtils.testDeserializeTruncated(ICMP6.deserializer(), bytePacket); }
@Override protected int command() { if (!validateConfigFilePresent()) { return 1; } final MigrationConfig config; try { config = MigrationConfig.load(getConfigFile()); } catch (KsqlException | MigrationException e) { LOGGER.error(e.getMessage()); return 1; } return command( config, MigrationsUtil::getKsqlClient, getMigrationsDir(getConfigFile(), config) ); }
@Test public void shouldFailOnChecksumMismatch() throws Exception { // Given: final List<String> versions = ImmutableList.of("1", "2", "3"); final List<String> checksums = givenExistingMigrationFiles(versions); givenAppliedMigrations(versions, ImmutableList.of(checksums.get(0), "mismatched_checksum", checksums.get(2))); // When: final int result = command.command(config, cfg -> ksqlClient, migrationsDir); // Then: assertThat(result, is(1)); // verification stops on failure, so version "1" is never queried verifyClientCallsForVersions(ImmutableList.of("2", "3")); }
abstract boolean addStorage(DatanodeStorageInfo storage, Block reportedBlock);
@Test(expected=IllegalArgumentException.class) public void testAddStorageWithDifferentBlock() throws Exception { BlockInfo blockInfo1 = new BlockInfoContiguous(new Block(1000L), (short) 3); BlockInfo blockInfo2 = new BlockInfoContiguous(new Block(1001L), (short) 3); final DatanodeStorageInfo storage = DFSTestUtil.createDatanodeStorageInfo( "storageID", "127.0.0.1"); blockInfo1.addStorage(storage, blockInfo2); }
public static String prepareUrl(@NonNull String url) { url = url.trim(); String lowerCaseUrl = url.toLowerCase(Locale.ROOT); // protocol names are case insensitive if (lowerCaseUrl.startsWith("feed://")) { Log.d(TAG, "Replacing feed:// with http://"); return prepareUrl(url.substring("feed://".length())); } else if (lowerCaseUrl.startsWith("pcast://")) { Log.d(TAG, "Removing pcast://"); return prepareUrl(url.substring("pcast://".length())); } else if (lowerCaseUrl.startsWith("pcast:")) { Log.d(TAG, "Removing pcast:"); return prepareUrl(url.substring("pcast:".length())); } else if (lowerCaseUrl.startsWith("itpc")) { Log.d(TAG, "Replacing itpc:// with http://"); return prepareUrl(url.substring("itpc://".length())); } else if (lowerCaseUrl.startsWith(AP_SUBSCRIBE)) { Log.d(TAG, "Removing antennapod-subscribe://"); return prepareUrl(url.substring(AP_SUBSCRIBE.length())); } else if (lowerCaseUrl.contains(AP_SUBSCRIBE_DEEPLINK)) { Log.d(TAG, "Removing " + AP_SUBSCRIBE_DEEPLINK); String query = Uri.parse(url).getQueryParameter("url"); try { return prepareUrl(URLDecoder.decode(query, "UTF-8")); } catch (UnsupportedEncodingException e) { return prepareUrl(query); } } else if (!(lowerCaseUrl.startsWith("http://") || lowerCaseUrl.startsWith("https://"))) { Log.d(TAG, "Adding http:// at the beginning of the URL"); return "http://" + url; } else { return url; } }
@Test public void testProtocolRelativeUrlBaseUrlNull() { final String in = "example.com"; final String out = UrlChecker.prepareUrl(in, null); assertEquals("http://example.com", out); }
public static KStreamHolder<GenericKey> build( final KStreamHolder<?> stream, final StreamSelectKeyV1 selectKey, final RuntimeBuildContext buildContext ) { final LogicalSchema sourceSchema = stream.getSchema(); final CompiledExpression expression = buildExpressionEvaluator( selectKey, buildContext, sourceSchema ); final ProcessingLogger processingLogger = buildContext .getProcessingLogger(selectKey.getProperties().getQueryContext()); final String errorMsg = "Error extracting new key using expression " + selectKey.getKeyExpression(); final Function<GenericRow, Object> evaluator = val -> expression .evaluate(val, null, processingLogger, () -> errorMsg); final LogicalSchema resultSchema = new StepSchemaResolver(buildContext.getKsqlConfig(), buildContext.getFunctionRegistry()).resolve(selectKey, sourceSchema); final KStream<?, GenericRow> kstream = stream.getStream(); final KStream<GenericKey, GenericRow> rekeyed = kstream .filter((key, val) -> val != null && evaluator.apply(val) != null) .selectKey((key, val) -> GenericKey.genericKey(evaluator.apply(val))); return new KStreamHolder<>( rekeyed, resultSchema, ExecutionKeyFactory.unwindowed(buildContext) ); }
@Test public void shouldNotFilterOutNonNullKeyColumns() { // When: selectKey.build(planBuilder, planInfo); // Then: verify(kstream).filter(predicateCaptor.capture()); final Predicate<GenericKey, GenericRow> predicate = getPredicate(); assertThat( predicate.test(SOURCE_KEY, value(A_BIG, A_BOI, 0, "dre")), is(true) ); }
@Override public void updateDictData(DictDataSaveReqVO updateReqVO) { // 校验自己存在 validateDictDataExists(updateReqVO.getId()); // 校验字典类型有效 validateDictTypeExists(updateReqVO.getDictType()); // 校验字典数据的值的唯一性 validateDictDataValueUnique(updateReqVO.getId(), updateReqVO.getDictType(), updateReqVO.getValue()); // 更新字典类型 DictDataDO updateObj = BeanUtils.toBean(updateReqVO, DictDataDO.class); dictDataMapper.updateById(updateObj); }
@Test public void testUpdateDictData_success() { // mock 数据 DictDataDO dbDictData = randomDictDataDO(); dictDataMapper.insert(dbDictData);// @Sql: 先插入出一条存在的数据 // 准备参数 DictDataSaveReqVO reqVO = randomPojo(DictDataSaveReqVO.class, o -> { o.setId(dbDictData.getId()); // 设置更新的 ID o.setStatus(randomCommonStatus()); }); // mock 方法,字典类型 when(dictTypeService.getDictType(eq(reqVO.getDictType()))).thenReturn(randomDictTypeDO(reqVO.getDictType())); // 调用 dictDataService.updateDictData(reqVO); // 校验是否更新正确 DictDataDO dictData = dictDataMapper.selectById(reqVO.getId()); // 获取最新的 assertPojoEquals(reqVO, dictData); }
public void close(final boolean closeQueries) { primaryContext.getQueryRegistry().close(closeQueries); try { cleanupService.stopAsync().awaitTerminated( this.primaryContext.getKsqlConfig() .getLong(KsqlConfig.KSQL_QUERY_CLEANUP_SHUTDOWN_TIMEOUT_MS), TimeUnit.MILLISECONDS); } catch (final TimeoutException e) { log.warn("Timed out while closing cleanup service. " + "External resources for the following applications may be orphaned: {}", cleanupService.pendingApplicationIds() ); } engineMetrics.close(); aggregateMetricsCollector.shutdown(); }
@Test public void shouldCleanUpTransientConsumerGroupsOnClose() { // Given: setupKsqlEngineWithSharedRuntimeDisabled(); final QueryMetadata query = KsqlEngineTestUtil.executeQuery( serviceContext, ksqlEngine, "select * from test1 EMIT CHANGES;", ksqlConfig, Collections.emptyMap() ); query.start(); // When: query.close(); // Then: awaitCleanupComplete(); final Set<String> deletedConsumerGroups = ( (FakeKafkaConsumerGroupClient) serviceContext.getConsumerGroupClient() ).getDeletedConsumerGroups(); assertThat( Iterables.getOnlyElement(deletedConsumerGroups), containsString("_confluent-ksql-default_transient_")); }
public static List<InetSocketAddress> getJobMasterRpcAddresses(AlluxioConfiguration conf) { // First check whether job rpc addresses are explicitly configured. if (conf.isSet(PropertyKey.JOB_MASTER_RPC_ADDRESSES)) { return parseInetSocketAddresses( conf.getList(PropertyKey.JOB_MASTER_RPC_ADDRESSES)); } int jobRpcPort = NetworkAddressUtils.getPort(NetworkAddressUtils.ServiceType.JOB_MASTER_RPC, conf); // Fall back on explicitly configured regular master rpc addresses. if (conf.isSet(PropertyKey.MASTER_RPC_ADDRESSES)) { List<InetSocketAddress> addrs = parseInetSocketAddresses(conf.getList(PropertyKey.MASTER_RPC_ADDRESSES)); return overridePort(addrs, jobRpcPort); } // Fall back on server-side journal configuration. return overridePort(getEmbeddedJournalAddresses(conf, ServiceType.JOB_MASTER_RAFT), jobRpcPort); }
@Test public void getJobMasterRpcAddressesServerFallback() { AlluxioConfiguration conf = createConf(ImmutableMap.of( PropertyKey.JOB_MASTER_EMBEDDED_JOURNAL_ADDRESSES, "host1:99,host2:100", PropertyKey.JOB_MASTER_RPC_PORT, 50)); assertEquals( Arrays.asList(InetSocketAddress.createUnresolved("host1", 50), InetSocketAddress.createUnresolved("host2", 50)), ConfigurationUtils.getJobMasterRpcAddresses(conf)); }
public List<MavenArtifact> search(Dependency dependency) throws IOException { final String sha1sum = dependency.getSha1sum(); final URL url = buildUrl(sha1sum); final HttpURLConnection conn = connect(url); final int responseCode = conn.getResponseCode(); if (responseCode == 200) { return processResponse(dependency, conn); } throw new IOException("Could not connect to Artifactory " + url + " (" + responseCode + "): " + conn.getResponseMessage()); }
@Test public void shouldFailWhenHostUnknown() throws IOException { // Given Dependency dependency = new Dependency(); dependency.setSha1sum("c5b4c491aecb72e7c32a78da0b5c6b9cda8dee0f"); dependency.setSha256sum("512b4bf6927f4864acc419b8c5109c23361c30ed1f5798170248d33040de068e"); dependency.setMd5sum("2d1dd0fc21ee96bccfab4353d5379649"); final Settings settings = getSettings(); settings.setString(Settings.KEYS.ANALYZER_ARTIFACTORY_URL, "https://artifactory.techno.ingenico.com.non-existing/artifactory"); final ArtifactorySearch artifactorySearch = new ArtifactorySearch(settings); // When try { artifactorySearch.search(dependency); fail(); } catch (UnknownHostException exception) { // Then assertEquals("artifactory.techno.ingenico.com.non-existing", exception.getMessage()); } catch (SocketTimeoutException exception) { // Then assertEquals("connect timed out", exception.getMessage()); } catch (IOException ex) { assertEquals("Connection refused (Connection refused)", ex.getMessage()); } }
public static String decodeObjectIdentifier(byte[] data) { return decodeObjectIdentifier(data, 0, data.length); }
@Test public void decodeObjectIdentifierWithOffsetAndLength() { assertEquals("1.2.3", Asn1Utils.decodeObjectIdentifier( new byte[] { (byte) 0xff, (byte) 0x2a, 3, (byte) 0xff }, 1, 2 )); }
public String getName() { return name; }
@Test public void testGetName() { assertEquals(TestParameters.entryName, dle.getName()); }
@Override public int getLinkCount() { return links.size(); }
@Test public final void testGetLinkCount() { assertEquals("initialy empty", 0, linkStore.getLinkCount()); putLink(DID1, P1, DID2, P2, DIRECT); putLink(DID2, P2, DID1, P1, DIRECT); putLink(DID1, P1, DID2, P2, DIRECT); assertEquals("expecting 2 unique link", 2, linkStore.getLinkCount()); }
@Nonnull public static <K, V> BatchSource<Entry<K, V>> map(@Nonnull String mapName) { return batchFromProcessor("mapSource(" + mapName + ')', readMapP(mapName)); }
@Test public void map_withProjectionToNull_then_nullsSkipped() { // given String mapName = randomName(); IMap<Integer, Entry<Integer, String>> sourceMap = hz().getMap(mapName); range(0, itemCount).forEach(i -> sourceMap.put(i, entry(i, i % 2 == 0 ? null : String.valueOf(i)))); // when BatchSource<String> source = Sources.map(mapName, truePredicate(), singleAttribute("value")); // then p.readFrom(source).writeTo(sink); hz().getJet().newJob(p); assertTrueEventually(() -> assertEquals( range(0, itemCount) .filter(i -> i % 2 != 0) .mapToObj(String::valueOf) .sorted() .collect(joining("\n")), hz().<String>getList(sinkName) .stream() .sorted() .collect(joining("\n")) )); }
public static void checkArgument(boolean expression, Object errorMessage) { if (Objects.isNull(errorMessage)) { throw new IllegalArgumentException("errorMessage cannot be null."); } if (!expression) { throw new IllegalArgumentException(String.valueOf(errorMessage)); } }
@Test void testCheckArgument3Args1false2null() { assertThrows(IllegalArgumentException.class, () -> { Preconditions.checkArgument(false, null, ARG); }); }
public void abortTransaction(long transactionId, boolean abortPrepared, String reason, TxnCommitAttachment txnCommitAttachment, List<TabletCommitInfo> finishedTablets, List<TabletFailInfo> failedTablets) throws UserException { if (transactionId < 0) { LOG.info("transaction id is {}, less than 0, maybe this is an old type load job, ignore abort operation", transactionId); return; } TransactionState transactionState = null; readLock(); try { transactionState = idToRunningTransactionState.get(transactionId); } finally { readUnlock(); } if (transactionState == null) { // If the transaction state does not exist, this task might have been aborted by // the txntimeoutchecker thread. We need to perform some additional work. processNotFoundTxn(transactionId, reason, txnCommitAttachment); throw new TransactionNotFoundException(transactionId); } // update transaction state extra if exists if (txnCommitAttachment != null) { transactionState.setTxnCommitAttachment(txnCommitAttachment); } // before state transform TxnStateChangeCallback callback = transactionState.beforeStateTransform(TransactionStatus.ABORTED); boolean txnOperated = false; transactionState.writeLock(); try { writeLock(); try { txnOperated = unprotectAbortTransaction(transactionId, abortPrepared, reason); } finally { writeUnlock(); transactionState.afterStateTransform(TransactionStatus.ABORTED, txnOperated, callback, reason); } persistTxnStateInTxnLevelLock(transactionState); } finally { transactionState.writeUnlock(); } if (!txnOperated || transactionState.getTransactionStatus() != TransactionStatus.ABORTED) { return; } LOG.info("transaction:[{}] successfully rollback", transactionState); Database db = GlobalStateMgr.getCurrentState().getDb(dbId); if (db == null) { return; } for (Long tableId : transactionState.getTableIdList()) { Table table = db.getTable(tableId); if (table == null) { continue; } TransactionStateListener listener = stateListenerFactory.create(this, table); if (listener != null) { listener.postAbort(transactionState, finishedTablets, failedTablets); } } }
@Test public void testAbortTransactionWithAttachment() throws UserException { DatabaseTransactionMgr masterDbTransMgr = masterTransMgr.getDatabaseTransactionMgr(GlobalStateMgrTestUtil.testDbId1); long txnId1 = lableToTxnId.get(GlobalStateMgrTestUtil.testTxnLable1); expectedEx.expect(UserException.class); expectedEx.expectMessage("transaction not found"); TxnCommitAttachment txnCommitAttachment = new RLTaskTxnCommitAttachment(); masterDbTransMgr.abortTransaction(txnId1, "test abort transaction", txnCommitAttachment); }
@Override public String toString() { StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append("width="); stringBuilder.append(this.width); stringBuilder.append(", height="); stringBuilder.append(this.height); return stringBuilder.toString(); }
@Test public void toStringTest() { Dimension dimension = new Dimension(1, 2); Assert.assertEquals(POINT_TO_STRING, dimension.toString()); }
@Transactional public long createChecklist(User user, ChecklistRequest checklistRequest) { Room room = roomRepository.save(checklistRequest.toRoomEntity()); Checklist checklist = checklistRequest.toChecklistEntity(room, user); checklistRepository.save(checklist); createChecklistOptions(checklistRequest, checklist); createChecklistQuestions(checklistRequest, checklist); createChecklistIncludedMaintenances(checklistRequest, checklist); return checklist.getId(); }
@DisplayName("체크리스트 작성 실패: 옵션 id가 중복일 경우") @Test void createChecklist_duplicatedOptionId_exception() { //given & when & then assertThatThrownBy( () -> checklistService.createChecklist(UserFixture.USER1, ChecklistFixture.CHECKLIST_CREATE_REQUEST_DUPLICATED_OPTION_ID)) .isInstanceOf(BangggoodException.class) .hasMessage(ExceptionCode.OPTION_DUPLICATED.getMessage()); }
public static String toURLKey(String key) { return key.toLowerCase().replaceAll(SEPARATOR_REGEX, HIDE_KEY_PREFIX); }
@Test void testToURLKey() { assertEquals("dubbo.tag1", StringUtils.toURLKey("dubbo_tag1")); assertEquals("dubbo.tag1.tag11", StringUtils.toURLKey("dubbo-tag1_tag11")); }
public Filter parseSingleExpression(final String filterExpression, final List<EntityAttribute> attributes) { if (!filterExpression.contains(FIELD_AND_VALUE_SEPARATOR)) { throw new IllegalArgumentException(WRONG_FILTER_EXPR_FORMAT_ERROR_MSG); } final String[] split = filterExpression.split(FIELD_AND_VALUE_SEPARATOR, 2); final String fieldPart = split[0]; if (fieldPart == null || fieldPart.isEmpty()) { throw new IllegalArgumentException(WRONG_FILTER_EXPR_FORMAT_ERROR_MSG); } final String valuePart = split[1]; if (valuePart == null || valuePart.isEmpty()) { throw new IllegalArgumentException(WRONG_FILTER_EXPR_FORMAT_ERROR_MSG); } final EntityAttribute attributeMetaData = getAttributeMetaData(attributes, fieldPart); final SearchQueryField.Type fieldType = attributeMetaData.type(); if (isRangeValueExpression(valuePart, fieldType)) { if (valuePart.startsWith(RANGE_VALUES_SEPARATOR)) { return new RangeFilter(attributeMetaData.id(), null, extractValue(fieldType, valuePart.substring(RANGE_VALUES_SEPARATOR.length())) ); } else if (valuePart.endsWith(RANGE_VALUES_SEPARATOR)) { return new RangeFilter(attributeMetaData.id(), extractValue(fieldType, valuePart.substring(0, valuePart.length() - RANGE_VALUES_SEPARATOR.length())), null ); } else { final String[] ranges = valuePart.split(RANGE_VALUES_SEPARATOR); return new RangeFilter(attributeMetaData.id(), extractValue(fieldType, ranges[0]), extractValue(fieldType, ranges[1]) ); } } else { return new SingleValueFilter(attributeMetaData.id(), extractValue(fieldType, valuePart)); } }
@Test void parsesFilterExpressionCorrectlyForIntType() { assertEquals(new SingleValueFilter("num", 42), toTest.parseSingleExpression("num:42", List.of(EntityAttribute.builder() .id("num") .title("Num") .type(SearchQueryField.Type.INT) .filterable(true) .build()) )); }
public Optional<VersionedProfile> get(UUID uuid, String version) { Optional<VersionedProfile> profile = redisGet(uuid, version); if (profile.isEmpty()) { profile = profiles.get(uuid, version); profile.ifPresent(versionedProfile -> redisSet(uuid, versionedProfile)); } return profile; }
@Test public void testGetProfileNotInCache() { final UUID uuid = UUID.randomUUID(); final byte[] name = TestRandomUtil.nextBytes(81); final VersionedProfile profile = new VersionedProfile("someversion", name, "someavatar", null, null, null, null, "somecommitment".getBytes()); when(commands.hget(eq("profiles::" + uuid), eq("someversion"))).thenReturn(null); when(profiles.get(eq(uuid), eq("someversion"))).thenReturn(Optional.of(profile)); Optional<VersionedProfile> retrieved = profilesManager.get(uuid, "someversion"); assertTrue(retrieved.isPresent()); assertSame(retrieved.get(), profile); verify(commands, times(1)).hget(eq("profiles::" + uuid), eq("someversion")); verify(commands, times(1)).hset(eq("profiles::" + uuid), eq("someversion"), anyString()); verifyNoMoreInteractions(commands); verify(profiles, times(1)).get(eq(uuid), eq("someversion")); verifyNoMoreInteractions(profiles); }
public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context) throws IOException, SAXException, TikaException { // Get the mime4j configuration, or use a default one MimeConfig config = new MimeConfig.Builder().setMaxLineLen(100000).setMaxHeaderLen(100000).build(); config = context.get(MimeConfig.class, config); Detector localDetector = context.get(Detector.class); if (localDetector == null) { //lazily load this if necessary if (detector == null) { EmbeddedDocumentUtil embeddedDocumentUtil = new EmbeddedDocumentUtil(context); detector = embeddedDocumentUtil.getDetector(); } localDetector = detector; } MimeStreamParser parser = new MimeStreamParser(config, null, new DefaultBodyDescriptorBuilder()); XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata); MailContentHandler mch = new MailContentHandler(xhtml, localDetector, metadata, context, config.isStrictParsing(), extractAllAlternatives); parser.setContentHandler(mch); parser.setContentDecoding(true); parser.setNoRecurse(); xhtml.startDocument(); TikaInputStream tstream = TikaInputStream.get(stream); try { parser.parse(tstream); } catch (IOException e) { tstream.throwIfCauseOf(e); throw new TikaException("Failed to parse an email message", e); } catch (MimeException e) { // Unwrap the exception in case it was not thrown by mime4j Throwable cause = e.getCause(); if (cause instanceof TikaException) { throw (TikaException) cause; } else if (cause instanceof SAXException) { throw (SAXException) cause; } else { throw new TikaException("Failed to parse an email message", e); } } xhtml.endDocument(); }
@Test public void testMultipleSubjects() throws Exception { //adapted from govdocs1 303710.txt String s = "From: Shawn Jones [chiroshawn@yahoo.com]\n" + "Subject: 2006N-3502\n" + "Subject: I Urge You to Require Notice of Mercury"; Parser p = new RFC822Parser(); Metadata m = new Metadata(); p.parse(TikaInputStream.get(s.getBytes(StandardCharsets.UTF_8)), new DefaultHandler(), m, new ParseContext()); assertEquals("I Urge You to Require Notice of Mercury", m.get(TikaCoreProperties.TITLE)); }
@Override public Processor<KO, SubscriptionWrapper<K>, CombinedKey<KO, K>, Change<ValueAndTimestamp<SubscriptionWrapper<K>>>> get() { return new ContextualProcessor<KO, SubscriptionWrapper<K>, CombinedKey<KO, K>, Change<ValueAndTimestamp<SubscriptionWrapper<K>>>>() { private TimestampedKeyValueStore<Bytes, SubscriptionWrapper<K>> store; private Sensor droppedRecordsSensor; @Override public void init(final ProcessorContext<CombinedKey<KO, K>, Change<ValueAndTimestamp<SubscriptionWrapper<K>>>> context) { super.init(context); final InternalProcessorContext<?, ?> internalProcessorContext = (InternalProcessorContext<?, ?>) context; droppedRecordsSensor = TaskMetrics.droppedRecordsSensor( Thread.currentThread().getName(), internalProcessorContext.taskId().toString(), internalProcessorContext.metrics() ); store = internalProcessorContext.getStateStore(storeName); keySchema.init(context); } @Override public void process(final Record<KO, SubscriptionWrapper<K>> record) { if (record.key() == null && !SubscriptionWrapper.Instruction.PROPAGATE_NULL_IF_NO_FK_VAL_AVAILABLE.equals(record.value().getInstruction())) { dropRecord(); return; } if (record.value().getVersion() > SubscriptionWrapper.CURRENT_VERSION) { //Guard against modifications to SubscriptionWrapper. Need to ensure that there is compatibility //with previous versions to enable rolling upgrades. Must develop a strategy for upgrading //from older SubscriptionWrapper versions to newer versions. throw new UnsupportedVersionException("SubscriptionWrapper is of an incompatible version."); } context().forward( record.withKey(new CombinedKey<>(record.key(), record.value().getPrimaryKey())) .withValue(inferChange(record)) .withTimestamp(record.timestamp()) ); } private Change<ValueAndTimestamp<SubscriptionWrapper<K>>> inferChange(final Record<KO, SubscriptionWrapper<K>> record) { if (record.key() == null) { return new Change<>(ValueAndTimestamp.make(record.value(), record.timestamp()), null); } else { return inferBasedOnState(record); } } private Change<ValueAndTimestamp<SubscriptionWrapper<K>>> inferBasedOnState(final Record<KO, SubscriptionWrapper<K>> record) { final Bytes subscriptionKey = keySchema.toBytes(record.key(), record.value().getPrimaryKey()); final ValueAndTimestamp<SubscriptionWrapper<K>> newValue = ValueAndTimestamp.make(record.value(), record.timestamp()); final ValueAndTimestamp<SubscriptionWrapper<K>> oldValue = store.get(subscriptionKey); //This store is used by the prefix scanner in ForeignTableJoinProcessorSupplier if (record.value().getInstruction().equals(SubscriptionWrapper.Instruction.DELETE_KEY_AND_PROPAGATE) || record.value().getInstruction().equals(SubscriptionWrapper.Instruction.DELETE_KEY_NO_PROPAGATE)) { store.delete(subscriptionKey); } else { store.put(subscriptionKey, newValue); } return new Change<>(newValue, oldValue); } private void dropRecord() { if (context().recordMetadata().isPresent()) { final RecordMetadata recordMetadata = context().recordMetadata().get(); LOG.warn( "Skipping record due to null foreign key. " + "topic=[{}] partition=[{}] offset=[{}]", recordMetadata.topic(), recordMetadata.partition(), recordMetadata.offset() ); } else { LOG.warn( "Skipping record due to null foreign key. Topic, partition, and offset not known." ); } droppedRecordsSensor.record(); } }; }
@Test public void shouldPropagateOnlyIfFKValAvailableV1() { final StoreBuilder<TimestampedKeyValueStore<Bytes, SubscriptionWrapper<String>>> storeBuilder = storeBuilder(); final SubscriptionReceiveProcessorSupplier<String, String> supplier = supplier(storeBuilder); final Processor<String, SubscriptionWrapper<String>, CombinedKey<String, String>, Change<ValueAndTimestamp<SubscriptionWrapper<String>>>> processor = supplier.get(); stateStore = storeBuilder.build(); context.addStateStore(stateStore); stateStore.init((StateStoreContext) context, stateStore); final SubscriptionWrapper<String> oldWrapper = new SubscriptionWrapper<>( new long[]{1L, 2L}, Instruction.PROPAGATE_ONLY_IF_FK_VAL_AVAILABLE, PK2, SubscriptionWrapper.VERSION_1, 1 ); final ValueAndTimestamp<SubscriptionWrapper<String>> oldValue = ValueAndTimestamp.make(oldWrapper, 0); final Bytes key = COMBINED_KEY_SCHEMA.toBytes(FK, PK1); stateStore.put(key, oldValue); processor.init(context); final SubscriptionWrapper<String> newWrapper = new SubscriptionWrapper<>( new long[]{1L, 2L}, Instruction.PROPAGATE_ONLY_IF_FK_VAL_AVAILABLE, PK1, SubscriptionWrapper.VERSION_1, 1 ); final ValueAndTimestamp<SubscriptionWrapper<String>> newValue = ValueAndTimestamp.make( newWrapper, 1L); final Record<String, SubscriptionWrapper<String>> record = new Record<>( FK, newWrapper, 1L ); processor.process(record); final List<CapturedForward<? extends CombinedKey<String, String>, ? extends Change<ValueAndTimestamp<SubscriptionWrapper<String>>>>> forwarded = context.forwarded(); assertEquals(newValue, stateStore.get(key)); assertEquals(1, forwarded.size()); assertEquals( record.withKey(new CombinedKey<>(FK, PK1)) .withValue(new Change<>(newValue, oldValue)), forwarded.get(0).record() ); }
public MediaType detect(InputStream input, Metadata metadata) throws IOException { if (input == null) { return MediaType.OCTET_STREAM; } input.mark(bytesToTest); try { TextStatistics stats = new TextStatistics(); byte[] buffer = new byte[1024]; int n = 0; int m = input.read(buffer, 0, Math.min(bytesToTest, buffer.length)); while (m != -1 && n < bytesToTest) { stats.addData(buffer, 0, m); n += m; m = input.read(buffer, 0, Math.min(bytesToTest - n, buffer.length)); } if (stats.isMostlyAscii() || stats.looksLikeUTF8()) { return MediaType.TEXT_PLAIN; } else { return MediaType.OCTET_STREAM; } } finally { input.reset(); } }
@Test public void testDetectNull() throws Exception { assertEquals(MediaType.OCTET_STREAM, detector.detect(null, new Metadata())); }
@VisibleForTesting void handleResponse(DiscoveryResponseData response) { ResourceType resourceType = response.getResourceType(); switch (resourceType) { case NODE: handleD2NodeResponse(response); break; case D2_URI_MAP: handleD2URIMapResponse(response); break; case D2_URI: handleD2URICollectionResponse(response); break; default: throw new AssertionError("Missing case in enum switch: " + resourceType); } }
@Test(dataProvider = "badNodeUpdateTestCases") public void testHandleD2NodeUpdateWithBadData(DiscoveryResponseData badData, boolean nackExpected) { XdsClientImplFixture fixture = new XdsClientImplFixture(); fixture._nodeSubscriber.setData(null); fixture._xdsClientImpl.handleResponse(badData); fixture.verifyAckOrNack(nackExpected, 1); verify(fixture._resourceWatcher).onChanged(eq(NODE.emptyData())); XdsClient.NodeUpdate actualData = (XdsClient.NodeUpdate) fixture._nodeSubscriber.getData(); Assert.assertNull(Objects.requireNonNull(actualData).getNodeData()); fixture._nodeSubscriber.setData(NODE_UPDATE1); fixture._xdsClientImpl.handleResponse(badData); fixture.verifyAckOrNack(nackExpected, 2); verify(fixture._resourceWatcher).onChanged(eq(NODE_UPDATE1)); actualData = (XdsClient.NodeUpdate) fixture._nodeSubscriber.getData(); // bad data will not overwrite the original valid data Assert.assertEquals(actualData.getNodeData(), NODE_UPDATE1.getNodeData()); }
public short toShort() { int s = (mOwnerBits.ordinal() << 6) | (mGroupBits.ordinal() << 3) | mOtherBits.ordinal(); return (short) s; }
@Test public void umask() { assertEquals(0700, ModeUtils.getUMask("0700").toShort()); assertEquals(0755, ModeUtils.getUMask("0755").toShort()); assertEquals(0644, ModeUtils.getUMask("0644").toShort()); }
@Override public void check(Thread currentThread) throws CeTaskInterruptedException { super.check(currentThread); computeTimeOutOf(taskOf(currentThread)) .ifPresent(timeout -> { throw new CeTaskTimeoutException(format("Execution of task timed out after %s ms", timeout)); }); }
@Test public void check_fails_with_ISE_if_thread_is_executing_a_CeTask_but_on_start_has_not_been_called_on_it() { String taskUuid = randomAlphabetic(15); Thread t = new Thread(); mockWorkerOnThread(t, ceWorker); mockWorkerWithTask(ceTask); when(ceTask.getUuid()).thenReturn(taskUuid); assertThatThrownBy(() -> underTest.check(t)) .isInstanceOf(IllegalStateException.class) .hasMessage("No start time recorded for task " + taskUuid); }
@Override public void accumulate(Object value) { if (value == null) { return; } if (this.value == null || compare(this.value, value) < 0) { this.value = value; } }
@Test public void test_serialization() { MaxSqlAggregation original = new MaxSqlAggregation(); original.accumulate(1); InternalSerializationService ss = new DefaultSerializationServiceBuilder().build(); MaxSqlAggregation serialized = ss.toObject(ss.toData(original)); assertThat(serialized).isEqualToComparingFieldByField(original); }
@Override public void close() throws JMSException { if (ignoreClose) { return; } if (closed.compareAndSet(false, true)) { boolean invalidate = false; try { // lets reset the session getInternalSession().setMessageListener(null); // Close any consumers and browsers that may have been created. for (Iterator<MessageConsumer> iter = consumers.iterator(); iter.hasNext();) { MessageConsumer consumer = iter.next(); consumer.close(); } for (Iterator<QueueBrowser> iter = browsers.iterator(); iter.hasNext();) { QueueBrowser browser = iter.next(); browser.close(); } if (transactional && !isXa) { try { getInternalSession().rollback(); } catch (JMSException e) { invalidate = true; LOG.warn("Caught exception trying rollback() when putting session back into the pool, will invalidate. " + e, e); } } } catch (JMSException ex) { invalidate = true; LOG.warn("Caught exception trying close() when putting session back into the pool, will invalidate. " + ex, ex); } finally { consumers.clear(); browsers.clear(); for (PooledSessionEventListener listener : this.sessionEventListeners) { listener.onSessionClosed(this); } sessionEventListeners.clear(); } if (invalidate) { // lets close the session and not put the session back into the pool // instead invalidate it so the pool can create a new one on demand. if (sessionHolder != null) { try { sessionHolder.close(); } catch (JMSException e1) { LOG.trace("Ignoring exception on close as discarding session: " + e1, e1); } } try { sessionPool.invalidateObject(key, sessionHolder); } catch (Exception e) { LOG.trace("Ignoring exception on invalidateObject as discarding session: " + e, e); } } else { try { sessionPool.returnObject(key, sessionHolder); } catch (Exception e) { jakarta.jms.IllegalStateException illegalStateException = new jakarta.jms.IllegalStateException(e.toString()); illegalStateException.initCause(e); throw illegalStateException; } } sessionHolder = null; } }
@Test(timeout = 60000) public void testPooledSessionStats() throws Exception { PooledConnection connection = (PooledConnection) pooledFactory.createConnection(); assertEquals(0, connection.getNumActiveSessions()); Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); assertEquals(1, connection.getNumActiveSessions()); session.close(); assertEquals(0, connection.getNumActiveSessions()); assertEquals(1, connection.getNumtIdleSessions()); assertEquals(1, connection.getNumSessions()); connection.close(); }
public static String join(List<?> list, String delim) { int len = list.size(); if (len == 0) return ""; final StringBuilder result = new StringBuilder(toString(list.get(0), delim)); for (int i = 1; i < len; i++) { result.append(delim); result.append(toString(list.get(i), delim)); } return result.toString(); }
@Test public void testTwoElementWithLeadingNullJoin() throws IOException { assertEquals(",foo", KeyNode.join(Arrays.asList(null, "foo"), ",")); }
@Override public BuiltInScalarFunctionImplementation specialize(BoundVariables boundVariables, int arity, FunctionAndTypeManager functionAndTypeManager) { ImmutableList.Builder<ScalarFunctionImplementationChoice> implementationChoices = ImmutableList.builder(); for (PolymorphicScalarFunctionChoice choice : choices) { implementationChoices.add(getScalarFunctionImplementationChoice(boundVariables, functionAndTypeManager, choice)); } return new BuiltInScalarFunctionImplementation(implementationChoices.build()); }
@Test public void testSelectsMethodBasedOnReturnType() throws Throwable { SqlScalarFunction function = SqlScalarFunction.builder(TestMethods.class) .signature(SIGNATURE) .deterministic(true) .calledOnNullInput(false) .choice(choice -> choice .implementation(methodsGroup -> methodsGroup.methods("varcharToVarcharCreateSliceWithExtraParameterLength")) .implementation(methodsGroup -> methodsGroup .methods("varcharToBigintReturnExtraParameter") .withExtraParameters(context -> ImmutableList.of(42)))) .build(); BuiltInScalarFunctionImplementation functionImplementation = function.specialize(BOUND_VARIABLES, 1, FUNCTION_AND_TYPE_MANAGER); assertEquals(functionImplementation.getMethodHandle().invoke(INPUT_SLICE), VARCHAR_TO_BIGINT_RETURN_VALUE); }
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) { return api.send(request); }
@Test public void editMessageCaption() { String text = "Update " + System.currentTimeMillis() + " <b>bold</b>"; SendResponse sendResponse = (SendResponse) bot.execute(new EditMessageCaption(chatId, 8124) .caption(text) .parseMode(ParseMode.HTML) .replyMarkup(new InlineKeyboardMarkup())); assertTrue(sendResponse.isOk()); Message message = sendResponse.message(); assertEquals(text.replace("<b>", "").replace("</b>", ""), message.caption()); MessageEntity captionEntity = message.captionEntities()[0]; assertEquals(MessageEntity.Type.bold, captionEntity.type()); assertEquals((Integer) 21, captionEntity.offset()); assertEquals((Integer) 4, captionEntity.length()); BaseResponse response = bot.execute(new EditMessageCaption(channelName, 511).caption(text)); assertTrue(response.isOk()); response = bot.execute(new EditMessageCaption("AgAAAPrwAQCj_Q4D2s-51_8jsuU").caption(text)); if (!response.isOk()) { assertEquals(400, response.errorCode()); assertEquals("Bad Request: MESSAGE_ID_INVALID", response.description()); } }
public InstructionOffsetMap(@Nonnull List<? extends Instruction> instructions) { this.instructionCodeOffsets = new int[instructions.size()]; int codeOffset = 0; for (int i=0; i<instructions.size(); i++) { instructionCodeOffsets[i] = codeOffset; codeOffset += instructions.get(i).getCodeUnits(); } }
@Test public void testInstructionOffsetMap() { ImmutableList<ImmutableInstruction> instructions = ImmutableList.of( /*00: 0x00*/ new ImmutableInstruction10t(Opcode.GOTO, 1), /*01: 0x01*/ new ImmutableInstruction10x(Opcode.NOP), /*02: 0x02*/ new ImmutableInstruction11n(Opcode.CONST_4, 2, 3), /*03: 0x03*/ new ImmutableInstruction11x(Opcode.RETURN, 4), /*04: 0x04*/ new ImmutableInstruction12x(Opcode.ARRAY_LENGTH, 5, 6), /*05: 0x05*/ new ImmutableInstruction20t(Opcode.GOTO_16, 7), /*06: 0x07*/ new ImmutableInstruction21c(Opcode.CONST_STRING, 8, new ImmutableStringReference("blah")), /*07: 0x09*/ new ImmutableInstruction21ih(Opcode.CONST_HIGH16, 9, 0x10000), /*08: 0x0b*/ new ImmutableInstruction21lh(Opcode.CONST_WIDE_HIGH16, 10, 0x1000000000000L), /*09: 0x0d*/ new ImmutableInstruction21s(Opcode.CONST_16, 11, 12), /*10: 0x0f*/ new ImmutableInstruction21t(Opcode.IF_EQZ, 12, 13), /*11: 0x11*/ new ImmutableInstruction22b(Opcode.ADD_INT_LIT8, 14, 15, 16), /*12: 0x13*/ new ImmutableInstruction22c(Opcode.INSTANCE_OF, 0, 1, new ImmutableTypeReference("Ltype;")), /*13: 0x15*/ new ImmutableInstruction22s(Opcode.ADD_INT_LIT16, 2, 3, 17), /*14: 0x17*/ new ImmutableInstruction22t(Opcode.IF_EQ, 4, 5, 18), /*15: 0x19*/ new ImmutableInstruction22x(Opcode.MOVE_FROM16, 19, 20), /*16: 0x1b*/ new ImmutableInstruction23x(Opcode.AGET, 21, 22, 23), /*17: 0x1d*/ new ImmutableInstruction30t(Opcode.GOTO_32, 24), /*18: 0x20*/ new ImmutableInstruction31c(Opcode.CONST_STRING_JUMBO, 25, new ImmutableStringReference("this is a string")), /*19: 0x23*/ new ImmutableInstruction31i(Opcode.CONST, 26, 27), /*20: 0x26*/ new ImmutableInstruction31t(Opcode.FILL_ARRAY_DATA, 28, 29), /*21: 0x29*/ new ImmutableInstruction32x(Opcode.MOVE_16, 30, 31), /*22: 0x2c*/ new ImmutableInstruction35c(Opcode.FILLED_NEW_ARRAY, 0, 0, 0, 0, 0, 0, new ImmutableTypeReference("Ltype;")), /*23: 0x2f*/ new ImmutableInstruction3rc(Opcode.FILLED_NEW_ARRAY_RANGE, 0, 0, new ImmutableTypeReference("Ltype;")), /*24: 0x32*/ new ImmutableInstruction51l(Opcode.CONST_WIDE, 32, 33), /*25: 0x37*/ new ImmutableInstruction10t(Opcode.GOTO, 1) ); ImmutableMethodImplementation impl = new ImmutableMethodImplementation(33, instructions, null, null); InstructionOffsetMap instructionOffsetMap = new InstructionOffsetMap(instructions); int[] expectedOffsets = new int[] { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x07, 0x09, 0x0b, 0x0d, 0x0f, 0x11, 0x13, 0x15, 0x17, 0x19, 0x1b, 0x1d, 0x20, 0x23, 0x26, 0x29, 0x2c, 0x2f, 0x32, 0x37}; for (int i=0; i<instructions.size(); i++) { Assert.assertEquals(expectedOffsets[i], instructionOffsetMap.getInstructionCodeOffset(i)); Assert.assertEquals(i, instructionOffsetMap.getInstructionIndexAtCodeOffset(expectedOffsets[i], true)); Assert.assertEquals(i, instructionOffsetMap.getInstructionIndexAtCodeOffset(expectedOffsets[i], false)); } int instructionIndex = -1; for (int codeOffset=0; codeOffset<=expectedOffsets[expectedOffsets.length-1]; codeOffset++) { if (codeOffset == expectedOffsets[instructionIndex+1]) { // this offset is at the beginning of an instruction instructionIndex++; } else { // this offset is in the middle of an instruction Assert.assertEquals(instructionIndex, instructionOffsetMap.getInstructionIndexAtCodeOffset(codeOffset, false)); try { instructionOffsetMap.getInstructionIndexAtCodeOffset(codeOffset, true); Assert.fail(String.format("Exception exception didn't occur for code offset 0x%x", codeOffset)); } catch (ExceptionWithContext ex) { // expected exception } } } Assert.assertEquals(expectedOffsets.length-1, instructionOffsetMap.getInstructionIndexAtCodeOffset(expectedOffsets[expectedOffsets.length-1]+1, false)); Assert.assertEquals(expectedOffsets.length-1, instructionOffsetMap.getInstructionIndexAtCodeOffset(expectedOffsets[expectedOffsets.length-1]+10, false)); }
@SuppressWarnings("unused") // Part of required API. public void execute( final ConfiguredStatement<InsertValues> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext ) { final InsertValues insertValues = statement.getStatement(); final MetaStore metaStore = executionContext.getMetaStore(); final KsqlConfig config = statement.getSessionConfig().getConfig(true); final DataSource dataSource = getDataSource(config, metaStore, insertValues); validateInsert(insertValues.getColumns(), dataSource); final ProducerRecord<byte[], byte[]> record = buildRecord(statement, metaStore, dataSource, serviceContext); try { producer.sendRecord(record, serviceContext, config.getProducerClientConfigProps()); } catch (final TopicAuthorizationException e) { // TopicAuthorizationException does not give much detailed information about why it failed, // except which topics are denied. Here we just add the ACL to make the error message // consistent with other authorization error messages. final Exception rootCause = new KsqlTopicAuthorizationException( AclOperation.WRITE, e.unauthorizedTopics() ); throw new KsqlException(createInsertFailedExceptionMessage(insertValues), rootCause); } catch (final ClusterAuthorizationException e) { // ClusterAuthorizationException is thrown when using idempotent producers // and either a topic write permission or a cluster-level idempotent write // permission (only applicable for broker versions no later than 2.8) is // missing. In this case, we include additional context to help the user // distinguish this type of failure from other permissions exceptions // such as the ones thrown above when TopicAuthorizationException is caught. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } catch (final KafkaException e) { if (e.getCause() != null && e.getCause() instanceof ClusterAuthorizationException) { // The error message thrown when an idempotent producer is missing permissions // is (nondeterministically) inconsistent: it is either a raw ClusterAuthorizationException, // as checked for above, or a ClusterAuthorizationException wrapped inside a KafkaException. // ksqlDB handles these two the same way, accordingly. // See https://issues.apache.org/jira/browse/KAFKA-14138 for more. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } else { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } } catch (final Exception e) { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } }
@Test public void shouldThrowOnProducerSendError() throws ExecutionException, InterruptedException { // Given: final ConfiguredStatement<InsertValues> statement = givenInsertValues( allAndPseudoColumnNames(SCHEMA), ImmutableList.of( new LongLiteral(1L), new StringLiteral("str"), new StringLiteral("str"), new LongLiteral(2L) ) ); final Future<?> failure = mock(Future.class); when(failure.get()).thenThrow(ExecutionException.class); doReturn(failure).when(producer).send(any()); // When: final Exception e = assertThrows( KsqlException.class, () -> executor.execute(statement, mock(SessionProperties.class), engine, serviceContext) ); // Then: assertThat(e.getMessage(), containsString( "Failed to insert values into ")); }
public static String decode(Long sqlMode) throws DdlException { // 0 parse to empty string if (sqlMode == 0) { return ""; } if ((sqlMode & ~MODE_ALLOWED_MASK) != 0) { ErrorReport.reportDdlException(ErrorCode.ERR_WRONG_VALUE_FOR_VAR, SessionVariable.SQL_MODE, sqlMode); } List<String> names = new ArrayList<String>(); for (Map.Entry<String, Long> mode : getSupportedSqlMode().entrySet()) { if ((sqlMode & mode.getValue()) != 0) { names.add(mode.getKey()); } } return Joiner.on(',').join(names); }
@Test(expected = DdlException.class) public void testInvalidDecode() throws DdlException { long sqlMode = SqlModeHelper.MODE_LAST; SqlModeHelper.decode(sqlMode); Assert.fail("No exception throws"); }
public static ServiceListResponse buildFailResponse(String message) { ServiceListResponse result = new ServiceListResponse(); result.setErrorInfo(ResponseCode.FAIL.getCode(), message); return result; }
@Test void testSerializeFailResponse() throws JsonProcessingException { ServiceListResponse response = ServiceListResponse.buildFailResponse("test"); String json = mapper.writeValueAsString(response); assertTrue(json.contains("\"resultCode\":500")); assertTrue(json.contains("\"errorCode\":500")); assertTrue(json.contains("\"message\":\"test\"")); assertTrue(json.contains("\"success\":false")); }
@Override public void pickSuggestionManually( int index, CharSequence suggestion, boolean withAutoSpaceEnabled) { if (getCurrentComposedWord().isAtTagsSearchState()) { if (index == 0) { // this is a special case for tags-searcher // since we append a magnifying glass to the suggestions, the "suggestion" // value is not a valid output suggestion suggestion = getCurrentComposedWord().getTypedWord().toString(); } else { // regular emoji. Storing in history. getQuickKeyHistoryRecords().store(suggestion.toString(), suggestion.toString()); } } super.pickSuggestionManually(index, suggestion, withAutoSpaceEnabled); }
@Test public void testPickingEmojiOutputsToInput() throws Exception { verifyNoSuggestionsInteractions(); mAnySoftKeyboardUnderTest.simulateTextTyping(":face"); mAnySoftKeyboardUnderTest.pickSuggestionManually(1, "\uD83D\uDE00"); verifySuggestions(true); Assert.assertEquals("\uD83D\uDE00", mAnySoftKeyboardUnderTest.getCurrentInputConnectionText()); // deleting // correctly, this is a bug with TestInputConnection: it reports that there is one character // in the input // but that's because it does not support deleting multi-character emojis. Assert.assertEquals(2, mAnySoftKeyboardUnderTest.getCurrentInputConnectionText().length()); mAnySoftKeyboardUnderTest.simulateKeyPress(KeyCodes.DELETE); // so, it was two characters, and now it's one Assert.assertEquals(1, mAnySoftKeyboardUnderTest.getCurrentInputConnectionText().length()); }
@Override public boolean tableExists(String dbName, String tableName) { return paimonNativeCatalog.tableExists(Identifier.create(dbName, tableName)); }
@Test public void testTableExists(@Mocked FileStoreTable paimonNativeTable) { new Expectations() { { paimonNativeCatalog.tableExists((Identifier) any); result = true; } }; Assert.assertTrue(metadata.tableExists("db1", "tbl1")); }
static Map<Integer, Schema.Field> mapFieldPositions(CSVFormat format, Schema schema) { List<String> header = Arrays.asList(format.getHeader()); Map<Integer, Schema.Field> indexToFieldMap = new HashMap<>(); for (Schema.Field field : schema.getFields()) { int index = getIndex(header, field); if (index >= 0) { indexToFieldMap.put(index, field); } } return indexToFieldMap; }
@Test public void givenSchemaContainsNullableFieldTypes() { Schema schema = Schema.builder() .addNullableStringField("a_string") .addDoubleField("a_double") .addInt32Field("an_integer") .addDateTimeField("a_datetime") .addNullableStringField("another_string") .build(); ImmutableMap<Integer, Schema.Field> want = ImmutableMap.of( 0, schema.getField("an_integer"), 1, schema.getField("a_double"), 2, schema.getField("a_datetime")); Map<Integer, Schema.Field> got = CsvIOParseHelpers.mapFieldPositions( csvFormat().withHeader("an_integer", "a_double", "a_datetime"), schema); assertEquals(want, got); }
public static IntArrayList range(int startIncl, int endExcl) { IntArrayList result = new IntArrayList(endExcl - startIncl); result.elementsCount = endExcl - startIncl; for (int i = 0; i < result.size(); ++i) result.set(i, startIncl + i); return result; }
@Test public void testRange() { assertEquals(from(3, 4, 5, 6), ArrayUtil.range(3, 7)); assertEquals(from(-3, -2), ArrayUtil.range(-3, -1)); assertEquals(from(), ArrayUtil.range(5, 5)); }
@Override public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay readerWay, IntsRef relationFlags) { String trackTypeTag = readerWay.getTag("tracktype"); TrackType trackType = TrackType.find(trackTypeTag); if (trackType != MISSING) trackTypeEnc.setEnum(false, edgeId, edgeIntAccess, trackType); }
@Test public void testSimpleTags() { ReaderWay readerWay = new ReaderWay(1); EdgeIntAccess edgeIntAccess = new ArrayEdgeIntAccess(1); int edgeId = 0; readerWay.setTag("tracktype", "grade1"); parser.handleWayTags(edgeId, edgeIntAccess, readerWay, relFlags); assertEquals(TrackType.GRADE1, ttEnc.getEnum(false, edgeId, edgeIntAccess)); edgeIntAccess = new ArrayEdgeIntAccess(1); readerWay.setTag("tracktype", "grade2"); parser.handleWayTags(edgeId, edgeIntAccess, readerWay, relFlags); assertEquals(TrackType.GRADE2, ttEnc.getEnum(false, edgeId, edgeIntAccess)); edgeIntAccess = new ArrayEdgeIntAccess(1); readerWay.setTag("tracktype", "grade3"); parser.handleWayTags(edgeId, edgeIntAccess, readerWay, relFlags); assertEquals(TrackType.GRADE3, ttEnc.getEnum(false, edgeId, edgeIntAccess)); edgeIntAccess = new ArrayEdgeIntAccess(1); readerWay.setTag("tracktype", "grade4"); parser.handleWayTags(edgeId, edgeIntAccess, readerWay, relFlags); assertEquals(TrackType.GRADE4, ttEnc.getEnum(false, edgeId, edgeIntAccess)); edgeIntAccess = new ArrayEdgeIntAccess(1); readerWay.setTag("tracktype", "grade5"); parser.handleWayTags(edgeId, edgeIntAccess, readerWay, relFlags); assertEquals(TrackType.GRADE5, ttEnc.getEnum(false, edgeId, edgeIntAccess)); }
protected static Stream<Node> asStream(NodeList nodeList) { if (nodeList == null) { return new ArrayList<Node>().stream(); } else { AtomicInteger n = new AtomicInteger(0); return Stream.generate(() -> nodeList.item(n.getAndIncrement())).limit(nodeList.getLength()); } }
@Test public void asStream() throws Exception { Document document = DOMParserUtil.getDocument(XML); final NodeList mainNodeList = document.getElementsByTagName("Main"); commonCheckNodeStream(mainNodeList); final NodeList childNodesList = mainNodeList.item(0).getChildNodes(); commonCheckNodeStream(childNodesList); final NodeList innerNodesList = childNodesList.item(0).getChildNodes(); commonCheckNodeStream(innerNodesList); }
public static long nextStartTimestamp(TimeUnit timeUnit, long start) { long nextTimestamp = start; switch (timeUnit) { case MILLISECONDS: break; case SECONDS: nextTimestamp = 1000 + 1000 * (start / 1000); // the next second is the start timestamp. break; case MINUTES: nextTimestamp = 60000 + 60000 * (start / 60000); // next minute is the start timestamp break; case HOURS: nextTimestamp = 3600000 + 3600000 * (start / 3600000); // next hour is the start timestamp break; case DAYS: nextTimestamp = 86400000 + 86400000 * (start / 86400000); // next day is the start timestamp break; } return nextTimestamp; }
@Test public void testNextDay() { long start = System.currentTimeMillis(); System.out.println("start = " + start); long nextTimestamp = TimeUtil.nextStartTimestamp(TimeUnit.DAYS, start); System.out.println("next day = " + nextTimestamp); }
@Override public void checkBeforeUpdate(final LoadSingleTableStatement sqlStatement) { checkStorageUnits(sqlStatement); String defaultSchemaName = new DatabaseTypeRegistry(database.getProtocolType()).getDefaultSchemaName(database.getName()); checkDuplicatedTables(sqlStatement, defaultSchemaName); checkActualTableExist(sqlStatement, defaultSchemaName); }
@Test void assertCheckWithInvalidStorageUnit() { when(database.getName()).thenReturn("foo_db"); executor.setDatabase(database); LoadSingleTableStatement sqlStatement = new LoadSingleTableStatement(Collections.singleton(new SingleTableSegment("ds_0", null, "foo"))); assertThrows(MissingRequiredStorageUnitsException.class, () -> executor.checkBeforeUpdate(sqlStatement)); }
@RequestMapping("/names") @Secured(action = ActionTypes.READ) public ObjectNode searchService(@RequestParam(defaultValue = StringUtils.EMPTY) String namespaceId, @RequestParam(defaultValue = StringUtils.EMPTY) String expr) throws NacosException { Map<String, Collection<String>> serviceNameMap = new HashMap<>(16); int totalCount = 0; if (StringUtils.isNotBlank(namespaceId)) { Collection<String> names = getServiceOperator().searchServiceName(namespaceId, expr); serviceNameMap.put(namespaceId, names); totalCount = names.size(); } else { for (String each : getServiceOperator().listAllNamespace()) { Collection<String> names = getServiceOperator().searchServiceName(each, expr); serviceNameMap.put(each, names); totalCount += names.size(); } } ObjectNode result = JacksonUtils.createEmptyJsonNode(); result.replace("META-INF/services", JacksonUtils.transferToJsonNode(serviceNameMap)); result.put("count", totalCount); return result; }
@Test void testSearchService() { try { Mockito.when(serviceOperatorV2.searchServiceName(Mockito.anyString(), Mockito.anyString())) .thenReturn(Collections.singletonList("result")); ObjectNode objectNode = serviceController.searchService(TEST_NAMESPACE, ""); assertEquals(1, objectNode.get("count").asInt()); } catch (NacosException e) { e.printStackTrace(); fail(e.getMessage()); } try { Mockito.when(serviceOperatorV2.searchServiceName(Mockito.anyString(), Mockito.anyString())) .thenReturn(Arrays.asList("re1", "re2")); Mockito.when(serviceOperatorV2.listAllNamespace()).thenReturn(Arrays.asList("re1", "re2")); ObjectNode objectNode = serviceController.searchService(null, ""); assertEquals(4, objectNode.get("count").asInt()); } catch (NacosException e) { e.printStackTrace(); fail(e.getMessage()); } }
@Override protected CompletableFuture<JobSubmitResponseBody> handleRequest( @Nonnull HandlerRequest<JobSubmitRequestBody> request, @Nonnull DispatcherGateway gateway) throws RestHandlerException { final Collection<File> uploadedFiles = request.getUploadedFiles(); final Map<String, Path> nameToFile = uploadedFiles.stream() .collect(Collectors.toMap(File::getName, Path::fromLocalFile)); if (uploadedFiles.size() != nameToFile.size()) { throw new RestHandlerException( String.format( "The number of uploaded files was %s than the expected count. Expected: %s Actual %s", uploadedFiles.size() < nameToFile.size() ? "lower" : "higher", nameToFile.size(), uploadedFiles.size()), HttpResponseStatus.BAD_REQUEST); } final JobSubmitRequestBody requestBody = request.getRequestBody(); if (requestBody.jobGraphFileName == null) { throw new RestHandlerException( String.format( "The %s field must not be omitted or be null.", JobSubmitRequestBody.FIELD_NAME_JOB_GRAPH), HttpResponseStatus.BAD_REQUEST); } CompletableFuture<JobGraph> jobGraphFuture = loadJobGraph(requestBody, nameToFile); Collection<Path> jarFiles = getJarFilesToUpload(requestBody.jarFileNames, nameToFile); Collection<Tuple2<String, Path>> artifacts = getArtifactFilesToUpload(requestBody.artifactFileNames, nameToFile); CompletableFuture<JobGraph> finalizedJobGraphFuture = uploadJobGraphFiles(gateway, jobGraphFuture, jarFiles, artifacts, configuration); CompletableFuture<Acknowledge> jobSubmissionFuture = finalizedJobGraphFuture.thenCompose( jobGraph -> gateway.submitJob(jobGraph, timeout)); return jobSubmissionFuture.thenCombine( jobGraphFuture, (ack, jobGraph) -> new JobSubmitResponseBody("/jobs/" + jobGraph.getJobID())); }
@TestTemplate void testRejectionOnCountMismatch() throws Exception { final Path jobGraphFile = TempDirUtils.newFile(temporaryFolder).toPath(); try (ObjectOutputStream objectOut = new ObjectOutputStream(Files.newOutputStream(jobGraphFile))) { objectOut.writeObject(JobGraphTestUtils.emptyJobGraph()); } final Path countExceedingFile = TempDirUtils.newFile(temporaryFolder).toPath(); TestingDispatcherGateway.Builder builder = TestingDispatcherGateway.newBuilder(); builder.setBlobServerPort(blobServer.getPort()) .setSubmitFunction(jobGraph -> CompletableFuture.completedFuture(Acknowledge.get())) .setHostname("localhost"); DispatcherGateway mockGateway = builder.build(); JobSubmitHandler handler = new JobSubmitHandler( () -> CompletableFuture.completedFuture(mockGateway), RpcUtils.INF_TIMEOUT, Collections.emptyMap(), Executors.directExecutor(), configuration); JobSubmitRequestBody request = new JobSubmitRequestBody( jobGraphFile.getFileName().toString(), Collections.emptyList(), Collections.emptyList()); try { handler.handleRequest( HandlerRequest.create( request, EmptyMessageParameters.getInstance(), Arrays.asList( jobGraphFile.toFile(), countExceedingFile.toFile())), mockGateway) .get(); } catch (Exception e) { ExceptionUtils.findThrowable( e, candidate -> candidate instanceof RestHandlerException && candidate.getMessage().contains("count")); } }
public static TemplateEngine createEngine() { return TemplateFactory.create(); }
@Test public void rythmEngineTest() { // 字符串模板 TemplateEngine engine = TemplateUtil.createEngine( new TemplateConfig("templates").setCustomEngine(RythmEngine.class)); Template template = engine.getTemplate("hello,@name"); String result = template.render(Dict.create().set("name", "hutool")); assertEquals("hello,hutool", result); // classpath中获取模板 Template template2 = engine.getTemplate("rythm_test.tmpl"); String result2 = template2.render(Dict.create().set("name", "hutool")); assertEquals("hello,hutool", result2); }
public boolean isAllBindingTables(final Collection<String> logicTableNames) { if (logicTableNames.isEmpty()) { return false; } Optional<BindingTableRule> bindingTableRule = findBindingTableRule(logicTableNames); if (!bindingTableRule.isPresent()) { return false; } Collection<String> result = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); result.addAll(bindingTableRule.get().getAllLogicTables()); return !result.isEmpty() && result.containsAll(logicTableNames); }
@Test void assertIsAllBindingTableWithJoinQueryWithDatabaseTableJoinCondition() { ColumnSegment leftDatabaseJoin = createColumnSegment("user_id", "logic_Table"); ColumnSegment rightDatabaseJoin = createColumnSegment("user_id", "sub_Logic_Table"); BinaryOperationExpression databaseJoin = createBinaryOperationExpression(leftDatabaseJoin, rightDatabaseJoin, EQUAL); ColumnSegment leftTableJoin = createColumnSegment("order_id", "logic_Table"); ColumnSegment rightTableJoin = createColumnSegment("order_id", "sub_Logic_Table"); BinaryOperationExpression tableJoin = createBinaryOperationExpression(leftTableJoin, rightTableJoin, EQUAL); JoinTableSegment joinTable = mock(JoinTableSegment.class); BinaryOperationExpression condition = createBinaryOperationExpression(databaseJoin, tableJoin, AND); when(joinTable.getCondition()).thenReturn(condition); MySQLSelectStatement selectStatement = mock(MySQLSelectStatement.class); when(selectStatement.getFrom()).thenReturn(Optional.of(joinTable)); SelectStatementContext sqlStatementContext = mock(SelectStatementContext.class, RETURNS_DEEP_STUBS); when(sqlStatementContext.getSqlStatement()).thenReturn(selectStatement); when(sqlStatementContext.isContainsJoinQuery()).thenReturn(true); when(sqlStatementContext.getDatabaseType()).thenReturn(TypedSPILoader.getService(DatabaseType.class, "FIXTURE")); when(sqlStatementContext.getTablesContext().getSchemaName()).thenReturn(Optional.empty()); when(sqlStatementContext.getWhereSegments()).thenReturn(Collections.singleton(new WhereSegment(0, 0, condition))); ShardingSphereSchema schema = mock(ShardingSphereSchema.class); when(sqlStatementContext.getTablesContext().findTableNames(Arrays.asList(leftDatabaseJoin, rightDatabaseJoin), schema)).thenReturn(createColumnTableNameMap()); when(sqlStatementContext.getTablesContext().findTableNames(Arrays.asList(leftTableJoin, rightTableJoin), schema)).thenReturn(createColumnTableNameMap()); ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS); when(database.getName()).thenReturn(DefaultDatabase.LOGIC_NAME); when(database.getSchema(DefaultDatabase.LOGIC_NAME)).thenReturn(schema); assertTrue(createMaximumShardingRule().isAllBindingTables(database, sqlStatementContext, Arrays.asList("logic_Table", "sub_Logic_Table"))); }
@Override public byte[] serialize(final String topic, final List<?> data) { if (data == null) { return null; } try { final StringWriter stringWriter = new StringWriter(); final CSVPrinter csvPrinter = new CSVPrinter(stringWriter, csvFormat); csvPrinter.printRecord(() -> new FieldIterator(data, schema)); final String result = stringWriter.toString(); return result.substring(0, result.length() - 2).getBytes(StandardCharsets.UTF_8); } catch (final Exception e) { throw new SerializationException("Error serializing CSV message", e); } }
@Test public void shouldSerializeDecimal() { // Given: givenSingleColumnSerializer(SqlTypes.decimal(4, 2)); final List<?> values = Collections.singletonList(new BigDecimal("11.12")); // When: final byte[] bytes = serializer.serialize("", values); // Then: assertThat(new String(bytes, StandardCharsets.UTF_8), is("11.12")); }
@Override public CommandLineImpl parse(final List<String> originalArgs, final Logger logger) { return CommandLineImpl.of(originalArgs, logger); }
@Test public void testInvalidOption() throws Exception { final CommandLineParserImpl parser = new CommandLineParserImpl(); final CommandLineImpl commandLine = parse(parser, "-p"); assertEquals(Command.NONE, commandLine.getCommand()); assertEquals( "Usage: embulk [common options] <command> [command options]" + NEWLINE + NEWLINE + "Commands:" + NEWLINE + " run Run a bulk load transaction." + NEWLINE + " cleanup Cleanup resume state." + NEWLINE + " preview Dry-run a bulk load transaction, and preview it." + NEWLINE + " guess Guess missing parameters to complete configuration." + NEWLINE + " install Installs a Maven artifact, typically an Embulk plugin." + NEWLINE + " example Create example files for a quick trial of Embulk." + NEWLINE + " license Print out the license notice." + NEWLINE + " selfupdate Upgrade Embulk to the specified version." + NEWLINE + " gem Run \"gem\" to install a RubyGem plugin." + NEWLINE + " mkbundle Create a new plugin bundle environment." + NEWLINE + " bundle Update a plugin bundle environment." + NEWLINE + NEWLINE + "Common options:" + NEWLINE + " -h, --help Print help" + NEWLINE + " -version, --version Show Embulk version" + NEWLINE + " -l, --log-level LEVEL Set log level (error, warn, info, debug, trace)" + NEWLINE + " --log-path PATH Output log messages to a file (default: -)" + NEWLINE + " -X KEY=VALUE Set Embulk system properties" + NEWLINE + " -R OPTION Command-line option for JRuby. (Only '--dev')" + NEWLINE + NEWLINE, commandLine.getStdOut()); assertEquals("embulk: Unrecognized option: -p" + NEWLINE, commandLine.getStdErr()); }
@Override public void execute(Context context) { Set<String> qpKeys = analysisMetadataHolder.getQProfilesByLanguage().values().stream().map(QualityProfile::getQpKey).collect(Collectors.toSet()); try (DbSession dbSession = dbClient.openSession(false)) { Set<RuleKey> prioritizedRules = dbClient.activeRuleDao().selectPrioritizedRules(dbSession, qpKeys); prioritizedRulesHolder.setPrioritizedRules(prioritizedRules); } }
@Test void execute_whenPrioritizedRules_shouldHaveNonEmptyHolder() { when(dbClient.activeRuleDao()).thenReturn(mock()); when(dbClient.activeRuleDao().selectPrioritizedRules(any(), any())).thenReturn(Set.of(RuleKey.of("repositoryKey", "ruleKey"))); underTest.execute(mock()); assertThat(prioritizedRulesHolder.getPrioritizedRules()).isNotEmpty(); }
@Override public boolean tryAdd(V... values) { return get(tryAddAsync(values)); }
@Test public void testTryAdd() { RSet<String> set = redisson.getSet("list", IntegerCodec.INSTANCE); Set<String> names = new HashSet<>(); int elements = 200000; for (int i = 0; i < elements; i++) { names.add("name" + i); } boolean s = set.tryAdd(names.toArray(new String[]{})); assertThat(s).isTrue(); assertThat(set.size()).isEqualTo(elements); Set<String> names2 = new HashSet<>(); for (int i = elements+1; i < elements + 10000; i++) { names2.add("name" + i); } names2.add("name10"); boolean r = set.tryAdd(names2.toArray(new String[]{})); assertThat(r).isFalse(); assertThat(set.size()).isEqualTo(elements); }