focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public boolean isEmpty() { return size() == 0; }
@Test public void shouldReturnTrueWhenAllPartsEmpty() { PipelineConfigs group = new MergePipelineConfigs( new BasicPipelineConfigs(), new BasicPipelineConfigs()); assertThat(group.isEmpty(), is(true)); }
public static long getLong(String envName, long defaultValue) { long value = defaultValue; String envValue = System.getenv(envName); if (envValue != null) { try { value = Long.parseLong(envValue); } catch (NumberFormatException e) { } } return value; }
@Test public void getLong() { assertEquals(12345678901234567L, EnvUtil.getLong("myLong", 123L)); assertEquals(987654321987654321L, EnvUtil.getLong("wrongLong", 987654321987654321L)); }
@Override public ObjectNode encode(Load load, CodecContext context) { checkNotNull(load, "Load cannot be null"); return context.mapper().createObjectNode() .put(RATE, load.rate()) .put(LATEST, load.latest()) .put(VALID, load.isValid()) .put(TIME, load.time()); }
@Test public void testLoadEncode() { final long startTime = System.currentTimeMillis(); final Load load = new DefaultLoad(20, 10, 1); final JsonNode node = new LoadCodec() .encode(load, new MockCodecContext()); assertThat(node.get("valid").asBoolean(), is(true)); assertThat(node.get("latest").asLong(), is(20L)); assertThat(node.get("rate").asLong(), is(10L)); assertThat(node.get("time").asLong(), greaterThanOrEqualTo(startTime)); }
@Override public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain) throws IOException, ServletException { HttpServletRequest request = (HttpServletRequest)req; HttpServletResponse response = (HttpServletResponse)res; // Do not allow framing; OF-997 response.setHeader("X-Frame-Options", JiveGlobals.getProperty("adminConsole.frame-options", "SAMEORIGIN")); // Reset the defaultLoginPage variable String loginPage = defaultLoginPage; if (loginPage == null) { loginPage = request.getContextPath() + (AuthFactory.isOneTimeAccessTokenEnabled() ? "/loginToken.jsp" : "/login.jsp" ); } // Get the page we're on: String url = request.getRequestURI().substring(1); if (url.startsWith("plugins/")) { url = url.substring("plugins/".length()); } // See if it's contained in the exclude list. If so, skip filter execution boolean doExclude = false; for (String exclude : excludes) { if (testURLPassesExclude(url, exclude)) { doExclude = true; break; } } if (!doExclude || IP_ACCESS_IGNORE_EXCLUDES.getValue()) { if (!passesBlocklist(req) || !passesAllowList(req)) { response.sendError(HttpServletResponse.SC_FORBIDDEN); return; } } if (!doExclude) { WebManager manager = new WebManager(); manager.init(request, response, request.getSession(), context); boolean haveOneTimeToken = manager.getAuthToken() instanceof AuthToken.OneTimeAuthToken; User loggedUser = manager.getUser(); boolean loggedAdmin = loggedUser == null ? false : adminManager.isUserAdmin(loggedUser.getUsername(), true); if (!haveOneTimeToken && !loggedAdmin && !authUserFromRequest(request)) { response.sendRedirect(getRedirectURL(request, loginPage, null)); return; } } chain.doFilter(req, res); }
@Test public void willRedirectARequestIfTheServletRequestAuthenticatorReturnsNoUser() throws Exception { AuthCheckFilter.SERVLET_REQUEST_AUTHENTICATOR.setValue(NoUserServletAuthenticatorClass.class); final AuthCheckFilter filter = new AuthCheckFilter(adminManager, loginLimitManager); filter.doFilter(request, response, filterChain); verify(response).sendRedirect(anyString()); }
static public void copyFile(String source, String target) throws IOException { File sf = new File(source); if (!sf.exists()) { throw new IllegalArgumentException("source file does not exist."); } File tf = new File(target); tf.getParentFile().mkdirs(); if (!tf.exists() && !tf.createNewFile()) { throw new RuntimeException("failed to create target file."); } FileChannel sc = null; FileChannel tc = null; try { tc = new FileOutputStream(tf).getChannel(); sc = new FileInputStream(sf).getChannel(); sc.transferTo(0, sc.size(), tc); } finally { if (null != sc) { sc.close(); } if (null != tc) { tc.close(); } } }
@Test public void testCopyFile() throws Exception { File source = new File(testRootDir, "source"); String target = testRootDir + File.separator + "dest"; IOTinyUtils.writeStringToFile(source, "testCopyFile", StandardCharsets.UTF_8.name()); IOTinyUtils.copyFile(source.getCanonicalPath(), target); File dest = new File(target); assertTrue(dest.exists()); }
@Operation( summary = "Search for the given search keys in the key transparency log", description = """ Enforced unauthenticated endpoint. Returns a response if all search keys exist in the key transparency log. """ ) @ApiResponse(responseCode = "200", description = "All search key lookups were successful", useReturnTypeSchema = true) @ApiResponse(responseCode = "403", description = "At least one search key lookup to value mapping was invalid") @ApiResponse(responseCode = "404", description = "At least one search key lookup did not find the key") @ApiResponse(responseCode = "413", description = "Ratelimited") @ApiResponse(responseCode = "422", description = "Invalid request format") @POST @Path("/search") @RateLimitedByIp(RateLimiters.For.KEY_TRANSPARENCY_SEARCH_PER_IP) @Produces(MediaType.APPLICATION_JSON) public KeyTransparencySearchResponse search( @ReadOnly @Auth final Optional<AuthenticatedDevice> authenticatedAccount, @NotNull @Valid final KeyTransparencySearchRequest request) { // Disallow clients from making authenticated requests to this endpoint requireNotAuthenticated(authenticatedAccount); try { final CompletableFuture<byte[]> aciSearchKeyResponseFuture = keyTransparencyServiceClient.search( getFullSearchKeyByteString(ACI_PREFIX, request.aci().toCompactByteArray()), request.lastTreeHeadSize(), request.distinguishedTreeHeadSize(), KEY_TRANSPARENCY_RPC_TIMEOUT); final CompletableFuture<byte[]> e164SearchKeyResponseFuture = request.e164() .map(e164 -> keyTransparencyServiceClient.search( getFullSearchKeyByteString(E164_PREFIX, e164.getBytes(StandardCharsets.UTF_8)), request.lastTreeHeadSize(), request.distinguishedTreeHeadSize(), KEY_TRANSPARENCY_RPC_TIMEOUT)) .orElse(CompletableFuture.completedFuture(null)); final CompletableFuture<byte[]> usernameHashSearchKeyResponseFuture = request.usernameHash() .map(usernameHash -> keyTransparencyServiceClient.search( getFullSearchKeyByteString(USERNAME_PREFIX, request.usernameHash().get()), request.lastTreeHeadSize(), request.distinguishedTreeHeadSize(), KEY_TRANSPARENCY_RPC_TIMEOUT)) .orElse(CompletableFuture.completedFuture(null)); return CompletableFuture.allOf(aciSearchKeyResponseFuture, e164SearchKeyResponseFuture, usernameHashSearchKeyResponseFuture) .thenApply(ignored -> new KeyTransparencySearchResponse(aciSearchKeyResponseFuture.join(), Optional.ofNullable(e164SearchKeyResponseFuture.join()), Optional.ofNullable(usernameHashSearchKeyResponseFuture.join()))) .join(); } catch (final CancellationException exception) { LOGGER.error("Unexpected cancellation from key transparency service", exception); throw new ServerErrorException(Response.Status.SERVICE_UNAVAILABLE, exception); } catch (final CompletionException exception) { handleKeyTransparencyServiceError(exception); } // This is unreachable return null; }
@Test void searchRatelimited() { MockUtils.updateRateLimiterResponseToFail( rateLimiters, RateLimiters.For.KEY_TRANSPARENCY_SEARCH_PER_IP, "127.0.0.1", Duration.ofMinutes(10), true); final Invocation.Builder request = resources.getJerseyTest() .target("/v1/key-transparency/search") .request(); try (Response response = request.post(Entity.json(createSearchRequestJson(ACI, Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty())))) { assertEquals(429, response.getStatus()); verify(keyTransparencyServiceClient, never()).search(any(), any(), any(), any()); } }
public static int getDefaultReplyTimeout() { // The timeout value must be greater than 0 otherwise we will answer the default value if (defaultPacketReplyTimeout <= 0) { defaultPacketReplyTimeout = 5000; } return defaultPacketReplyTimeout; }
@Test public void testSmackConfiguration() { SmackConfiguration.getDefaultReplyTimeout(); }
void rewrapConnection(Connection connection) throws IllegalAccessException { assert connection != null; if (jboss && connection.getClass().getSimpleName().startsWith("WrappedConnection")) { // pour jboss, // result instance de WrappedConnectionJDK6 ou WrappedConnectionJDK5 // (attribut "mc" sur classe parente) final Object baseWrapperManagedConnection = JdbcWrapperHelper.getFieldValue(connection, "mc"); final String conFieldName = "con"; Connection con = (Connection) JdbcWrapperHelper .getFieldValue(baseWrapperManagedConnection, conFieldName); // on teste isProxyAlready ici pour raison de perf if (!isProxyAlready(con)) { con = createConnectionProxy(con); JdbcWrapperHelper.setFieldValue(baseWrapperManagedConnection, conFieldName, con); } } else if (glassfish && ("com.sun.gjc.spi.jdbc40.ConnectionHolder40" .equals(connection.getClass().getName()) || "com.sun.gjc.spi.jdbc40.ConnectionWrapper40" .equals(connection.getClass().getName()) || "com.sun.gjc.spi.jdbc40.ProfiledConnectionWrapper40" .equals(connection.getClass().getName()))) { // pour glassfish, // result instance de com.sun.gjc.spi.jdbc40.ConnectionHolder40 // ou com.sun.gjc.spi.jdbc40.ConnectionWrapper40 selon message dans users' group // (attribut "con" sur classe parente) final String conFieldName = "con"; Connection con = (Connection) JdbcWrapperHelper.getFieldValue(connection, conFieldName); // on teste isProxyAlready ici pour raison de perf if (!isProxyAlready(con)) { con = createConnectionProxy(con); JdbcWrapperHelper.setFieldValue(connection, conFieldName, con); } } }
@Test public void testRewrapConnection() throws SQLException, IllegalAccessException { DriverManager.registerDriver(driver); // nécessite la dépendance vers la base de données H2 final Connection connection = DriverManager.getConnection(H2_DATABASE_URL); jdbcWrapper.rewrapConnection(connection); connection.close(); }
public static DatabaseType getStorageType(final DataSource dataSource) { try (Connection connection = dataSource.getConnection()) { return DatabaseTypeFactory.get(connection.getMetaData().getURL()); } catch (final SQLFeatureNotSupportedException sqlFeatureNotSupportedException) { if (dataSource instanceof CatalogSwitchableDataSource) { return DatabaseTypeFactory.get(((CatalogSwitchableDataSource) dataSource).getUrl()); } if (dataSource.getClass().getName().equals(new HikariDataSourcePoolMetaData().getType())) { HikariDataSourcePoolFieldMetaData dataSourcePoolFieldMetaData = new HikariDataSourcePoolFieldMetaData(); String jdbcUrlFieldName = ReflectionUtils.<String>getFieldValue(dataSource, dataSourcePoolFieldMetaData.getJdbcUrlFieldName()) .orElseThrow(() -> new SQLWrapperException(sqlFeatureNotSupportedException)); return DatabaseTypeFactory.get(jdbcUrlFieldName); } throw new SQLWrapperException(sqlFeatureNotSupportedException); } catch (final SQLException ex) { throw new SQLWrapperException(ex); } }
@Test void assertGetStorageTypeWhenGetConnectionError() throws SQLException { DataSource dataSource = mock(DataSource.class); when(dataSource.getConnection()).thenThrow(SQLException.class); assertThrows(SQLWrapperException.class, () -> DatabaseTypeEngine.getStorageType(dataSource)); }
@Override public boolean equals(Object object) { if (object instanceof DefaultDeviceDescription) { if (!super.equals(object)) { return false; } DefaultDeviceDescription that = (DefaultDeviceDescription) object; return Objects.equal(this.uri, that.uri) && Objects.equal(this.type, that.type) && Objects.equal(this.manufacturer, that.manufacturer) && Objects.equal(this.hwVersion, that.hwVersion) && Objects.equal(this.swVersion, that.swVersion) && Objects.equal(this.serialNumber, that.serialNumber) && Objects.equal(this.chassisId, that.chassisId) && Objects.equal(this.defaultAvailable, that.defaultAvailable); } return false; }
@Test public void testEquals() { DeviceDescription device1 = new DefaultDeviceDescription(DURI, SWITCH, MFR, HW, SW, SN, CID, DA); DeviceDescription sameAsDevice1 = new DefaultDeviceDescription(DURI, SWITCH, MFR, HW, SW, SN, CID, DA); DeviceDescription device2 = new DefaultDeviceDescription(DURI2, SWITCH, MFR, HW, SW, SN, CID, DA); DeviceDescription device3 = new DefaultDeviceDescription(DURI, ROUTER, MFR2, HW, SW, SN, CID, DA); DeviceDescription device4 = new DefaultDeviceDescription(DURI, SWITCH, MFR, HW2, SW, SN, CID, DA); DeviceDescription device5 = new DefaultDeviceDescription(DURI, SWITCH, MFR, HW, SW2, SN, CID, DA); DeviceDescription device6 = new DefaultDeviceDescription(DURI, SWITCH, MFR, HW, SW, SN2, CID, DA); DeviceDescription device7 = new DefaultDeviceDescription(DURI, SWITCH, MFR, HW, SW, SN, CID2, DA); DeviceDescription device8 = new DefaultDeviceDescription(DURI, SWITCH, MFR, HW, SW, SN, CID, DA2); new EqualsTester() .addEqualityGroup(device1, sameAsDevice1) .addEqualityGroup(device2) .addEqualityGroup(device3) .addEqualityGroup(device4) .addEqualityGroup(device5) .addEqualityGroup(device6) .addEqualityGroup(device7) .addEqualityGroup(device8) .testEquals(); }
@Override protected InputStream readInputStreamParam(String key) { Part part = readPart(key); return (part == null) ? null : part.getInputStream(); }
@Test public void return_no_input_stream_when_content_type_is_null() { when(source.getContentType()).thenReturn(null); assertThat(underTest.readInputStreamParam("param1")).isNull(); }
@Override public void discoverSchemaTransform( ExpansionApi.DiscoverSchemaTransformRequest request, StreamObserver<ExpansionApi.DiscoverSchemaTransformResponse> responseObserver) { if (!checkedAllServices) { try { waitForAllServicesToBeReady(); } catch (TimeoutException e) { throw new RuntimeException(e); } checkedAllServices = true; } try { responseObserver.onNext(processDiscover(request)); responseObserver.onCompleted(); } catch (RuntimeException exn) { responseObserver.onNext( ExpansionApi.DiscoverSchemaTransformResponse.newBuilder() .setError(Throwables.getStackTraceAsString(exn)) .build()); responseObserver.onCompleted(); } }
@Test public void testObserverOneEndpointReturns() { ExpansionServiceClient expansionServiceClient = Mockito.mock(ExpansionServiceClient.class); Mockito.when(clientFactory.getExpansionServiceClient(Mockito.any())) .thenReturn(expansionServiceClient); Mockito.when(expansionServiceClient.discover(Mockito.any())) .thenReturn( DiscoverSchemaTransformResponse.newBuilder() .putSchemaTransformConfigs( "schematransform_key_1", SchemaTransformConfig.newBuilder().build()) .build()) .thenReturn( DiscoverSchemaTransformResponse.newBuilder().setError("discovery error 1").build()); DiscoverSchemaTransformRequest request = DiscoverSchemaTransformRequest.newBuilder().build(); StreamObserver<DiscoverSchemaTransformResponse> responseObserver = Mockito.mock(StreamObserver.class); expansionService.discoverSchemaTransform(request, responseObserver); Mockito.verify(expansionServiceClient, Mockito.times(2)).discover(request); ArgumentCaptor<DiscoverSchemaTransformResponse> discoverResponseCapture = ArgumentCaptor.forClass(DiscoverSchemaTransformResponse.class); Mockito.verify(responseObserver).onNext(discoverResponseCapture.capture()); assertEquals(1, discoverResponseCapture.getValue().getSchemaTransformConfigsCount()); assertTrue( discoverResponseCapture .getValue() .getSchemaTransformConfigsMap() .containsKey("schematransform_key_1")); }
public static List<StreamedRow> toRows( final Buffer buff, final Function<StreamedRow, StreamedRow> addHostInfo ) { final List<StreamedRow> rows = new ArrayList<>(); int begin = 0; for (int i = 0; i <= buff.length(); i++) { if ((i == buff.length() && (i - begin > 1)) || (i < buff.length() && buff.getByte(i) == (byte) '\n')) { if (begin != i) { // Ignore random newlines - the server can send these final Buffer sliced = buff.slice(begin, i); final Buffer tidied = toJsonMsg(sliced, true); if (tidied.length() > 0) { final StreamedRow row = deserialize(tidied, StreamedRow.class); rows.add(addHostInfo.apply(row)); } } begin = i + 1; } } return rows; }
@Test public void toRows_errorParsingNotAtEnd() { // When: final Exception e = assertThrows( KsqlRestClientException.class, () -> KsqlTargetUtil.toRows(Buffer.buffer( "[{\"header\":{\"queryId\":\"query_id_10\",\"schema\":\"`col1` STRING\"}},\n" + "{\"row\":{\"columns\"\n" + "{\"row\":{\"columns\":[\"Row2\"]}},\n"), Functions.identity()) ); // Then: assertThat(e.getMessage(), is(("Failed to deserialise object"))); }
@Override public PubsubMessage apply(Row row) { return new PubsubMessage(payloadExtractor.apply(row), attributesExtractor.apply(row)); }
@Test public void rowPayloadTransformFailure() { Schema payloadSchema = Schema.builder().addStringField("fieldName").build(); Row payload = Row.withSchema(payloadSchema).attachValues("abc"); Schema schema = Schema.builder() .addRowField(PAYLOAD_FIELD, payloadSchema) .addField(ATTRIBUTES_FIELD, ATTRIBUTE_MAP_FIELD_TYPE) .build(); Row row = Row.withSchema(schema).attachValues(payload, ATTRIBUTES); when(SERIALIZER.serialize(payload)).thenThrow(new IllegalArgumentException()); assertThrows( IllegalArgumentException.class, () -> new NestedRowToMessage(SERIALIZER, schema).apply(row)); }
static int loadRequiredIntProp( Properties props, String keyName ) { String value = props.getProperty(keyName); if (value == null) { throw new RuntimeException("Failed to find " + keyName); } try { return Integer.parseInt(value); } catch (NumberFormatException e) { throw new RuntimeException("Unable to read " + keyName + " as a base-10 number.", e); } }
@Test public void loadMissingRequiredIntProp() { Properties props = new Properties(); assertEquals("Failed to find foo.bar", assertThrows(RuntimeException.class, () -> PropertiesUtils.loadRequiredIntProp(props, "foo.bar")). getMessage()); }
@Override public String toString(final RouteUnit routeUnit) { String quotedIndexName = identifier.getQuoteCharacter().wrap(getIndexValue(routeUnit)); return isGeneratedIndex() ? " " + quotedIndexName + " " : quotedIndexName; }
@Test void assertToString() { IndexToken indexToken = new IndexToken(0, 0, new IdentifierValue("t_order_index"), mock(SQLStatementContext.class, withSettings().extraInterfaces(TableAvailable.class).defaultAnswer(RETURNS_DEEP_STUBS)), mock(ShardingRule.class), mock(ShardingSphereSchema.class)); RouteUnit routeUnit = mock(RouteUnit.class); when(routeUnit.getTableMappers()).thenReturn(Collections.singletonList(new RouteMapper("t_order", "t_order_0"))); when(routeUnit.getDataSourceMapper()).thenReturn(new RouteMapper(DefaultDatabase.LOGIC_NAME, "ds_0")); assertThat(indexToken.toString(routeUnit), is("t_order_index_t_order_0")); }
static SortKey[] rangeBounds( int numPartitions, Comparator<StructLike> comparator, SortKey[] samples) { // sort the keys first Arrays.sort(samples, comparator); int numCandidates = numPartitions - 1; SortKey[] candidates = new SortKey[numCandidates]; int step = (int) Math.ceil((double) samples.length / numPartitions); int position = step - 1; int numChosen = 0; while (position < samples.length && numChosen < numCandidates) { SortKey candidate = samples[position]; // skip duplicate values if (numChosen > 0 && candidate.equals(candidates[numChosen - 1])) { // linear probe for the next distinct value position += 1; } else { candidates[numChosen] = candidate; position += step; numChosen += 1; } } return candidates; }
@Test public void testRangeBoundsDivisible() { assertThat( SketchUtil.rangeBounds( 3, SORT_ORDER_COMPARTOR, new SortKey[] { CHAR_KEYS.get("a"), CHAR_KEYS.get("b"), CHAR_KEYS.get("c"), CHAR_KEYS.get("d"), CHAR_KEYS.get("e"), CHAR_KEYS.get("f") })) .containsExactly(CHAR_KEYS.get("b"), CHAR_KEYS.get("d")); }
@Override public void createLoginLog(LoginLogCreateReqDTO reqDTO) { LoginLogDO loginLog = BeanUtils.toBean(reqDTO, LoginLogDO.class); loginLogMapper.insert(loginLog); }
@Test public void testCreateLoginLog() { LoginLogCreateReqDTO reqDTO = randomPojo(LoginLogCreateReqDTO.class); // 调用 loginLogService.createLoginLog(reqDTO); // 断言 LoginLogDO loginLogDO = loginLogMapper.selectOne(null); assertPojoEquals(reqDTO, loginLogDO); }
@SuppressWarnings("unchecked") public <T extends Statement> ConfiguredStatement<T> inject( final ConfiguredStatement<T> statement ) { if (!(statement.getStatement() instanceof CreateSource) && !(statement.getStatement() instanceof CreateAsSelect)) { return statement; } try { if (statement.getStatement() instanceof CreateAsSelect) { return (ConfiguredStatement<T>) injectForCreateAsSelect( (ConfiguredStatement<? extends CreateAsSelect>) statement); } else { return (ConfiguredStatement<T>) injectForCreateSource( (ConfiguredStatement<? extends CreateSource>) statement); } } catch (final KsqlStatementException e) { throw e; } catch (final KsqlException e) { throw new KsqlStatementException( ErrorMessageUtil.buildErrorMessage(e), statement.getMaskedStatementText(), e.getCause()); } }
@Test public void shouldInjectForCreateAsSelect() { // When: final ConfiguredStatement<CreateAsSelect> configured = injector.inject(csasStatement); // Then: assertThat(configured.getStatement(), is(csasWithUnwrapping)); }
public static MapperManager instance(boolean isDataSourceLogEnable) { INSTANCE.dataSourceLogEnable = isDataSourceLogEnable; return INSTANCE; }
@Test void testInstance() { MapperManager instance = MapperManager.instance(false); assertNotNull(instance); }
public static byte[] getNullableSizePrefixedArray(final ByteBuffer buffer) { final int size = buffer.getInt(); return getNullableArray(buffer, size); }
@Test public void getNullableSizePrefixedArrayUnderflow() { // Integer.MAX_VALUE byte[] input = {127, -1, -1, -1}; final ByteBuffer buffer = ByteBuffer.wrap(input); // note, we get a buffer underflow exception instead of an OOME, even though the encoded size // would be 2,147,483,647 aka 2.1 GB, probably larger than the available heap assertThrows(BufferUnderflowException.class, () -> Utils.getNullableSizePrefixedArray(buffer)); }
@Override public ConfigFileList getConfigFiles(String pluginId, final String destinationFolder, final Collection<CRConfigurationProperty> configurations) { String resolvedExtensionVersion = pluginManager.resolveExtensionVersion(pluginId, CONFIG_REPO_EXTENSION, goSupportedVersions); if (resolvedExtensionVersion.equals("1.0") || resolvedExtensionVersion.equals("2.0")) { return ConfigFileList.withError("Unsupported Operation", "This plugin version does not support list config files"); } return pluginRequestHelper.submitRequest(pluginId, REQUEST_CONFIG_FILES, new DefaultPluginInteractionCallback<>() { @Override public String requestBody(String resolvedExtensionVersion) { return messageHandlerMap.get(resolvedExtensionVersion).requestMessageConfigFiles(destinationFolder, configurations); } @Override public ConfigFileList onSuccess(String responseBody, Map<String, String> responseHeaders, String resolvedExtensionVersion) { return messageHandlerMap.get(resolvedExtensionVersion).responseMessageForConfigFiles(responseBody); } }); }
@Test public void shouldTalkToPluginToGetConfigFiles() { List<String> deserializedResponse = new ArrayList<>(); deserializedResponse.add("file.yaml"); ConfigFileList files = new ConfigFileList(deserializedResponse, null); when(jsonMessageHandler3.responseMessageForConfigFiles(responseBody)).thenReturn(files); when(pluginManager.resolveExtensionVersion(PLUGIN_ID, CONFIG_REPO_EXTENSION, new ArrayList<>(List.of("1.0", "2.0", "3.0")))).thenReturn("3.0"); ConfigFileList response = extension.getConfigFiles(PLUGIN_ID, "dir", null); assertRequest(requestArgumentCaptor.getValue(), CONFIG_REPO_EXTENSION, "3.0", ConfigRepoExtension.REQUEST_CONFIG_FILES, null); verify(jsonMessageHandler3).responseMessageForConfigFiles(responseBody); assertSame(response, files); }
static public Entry buildMenuStructure(String xml) { final Reader reader = new StringReader(xml); return buildMenuStructure(reader); }
@Test public void givenXmlWithourContent_createsEmptyStructure() { String xmlWithoutContent = "<FreeplaneUIEntries/>"; Entry builtMenuStructure = XmlEntryStructureBuilder.buildMenuStructure(xmlWithoutContent); Entry menuStructureWithChildEntry = new Entry(); assertThat(builtMenuStructure, equalTo(menuStructureWithChildEntry)); }
@Override public Reference getReference() throws NamingException { return JNDIReferenceFactory.createReference(this.getClass().getName(), this); }
@Test(timeout=240000) public void testGetReference() throws Exception { PooledConnectionFactory factory = createPooledConnectionFactory(); Reference ref = factory.getReference(); assertNotNull(ref); }
public static String getDatabaseRuleNode(final String databaseName, final String ruleName, final String key) { return String.join("/", getDatabaseRuleNode(databaseName, ruleName), key); }
@Test void assertGetDatabaseRuleNode() { assertThat(DatabaseRuleMetaDataNode.getDatabaseRuleNode("foo_db", "foo_rule", "sharding"), is("/metadata/foo_db/rules/foo_rule/sharding")); }
public static Map<String, String> alterCurrentAttributes(boolean create, Map<String, Attribute> all, ImmutableMap<String, String> currentAttributes, ImmutableMap<String, String> newAttributes) { Map<String, String> init = new HashMap<>(); Map<String, String> add = new HashMap<>(); Map<String, String> update = new HashMap<>(); Map<String, String> delete = new HashMap<>(); Set<String> keys = new HashSet<>(); for (Map.Entry<String, String> attribute : newAttributes.entrySet()) { String key = attribute.getKey(); String realKey = realKey(key); String value = attribute.getValue(); validate(realKey); duplicationCheck(keys, realKey); if (create) { if (key.startsWith("+")) { init.put(realKey, value); } else { throw new RuntimeException("only add attribute is supported while creating topic. key: " + realKey); } } else { if (key.startsWith("+")) { if (!currentAttributes.containsKey(realKey)) { add.put(realKey, value); } else { update.put(realKey, value); } } else if (key.startsWith("-")) { if (!currentAttributes.containsKey(realKey)) { throw new RuntimeException("attempt to delete a nonexistent key: " + realKey); } delete.put(realKey, value); } else { throw new RuntimeException("wrong format key: " + realKey); } } } validateAlter(all, init, true, false); validateAlter(all, add, false, false); validateAlter(all, update, false, false); validateAlter(all, delete, false, true); log.info("add: {}, update: {}, delete: {}", add, update, delete); HashMap<String, String> finalAttributes = new HashMap<>(currentAttributes); finalAttributes.putAll(init); finalAttributes.putAll(add); finalAttributes.putAll(update); for (String s : delete.keySet()) { finalAttributes.remove(s); } return finalAttributes; }
@Test public void alterCurrentAttributes_CreateMode_ShouldReturnOnlyAddedAttributes() { ImmutableMap<String, String> newAttributes = ImmutableMap.of("+attr1", "new_value1", "+attr2", "value2", "+attr3", "value3"); Map<String, String> result = AttributeUtil.alterCurrentAttributes(true, allAttributes, currentAttributes, newAttributes); assertEquals(3, result.size()); assertTrue(result.containsKey("attr1")); assertEquals("new_value1", result.get("attr1")); assertTrue(result.containsKey("attr3")); assertEquals("value3", result.get("attr3")); assertTrue(result.containsKey("attr2")); }
public static List<String> split(String path) { PathUtils.validatePath(path); return PATH_SPLITTER.splitToList(path); }
@Test public void testSplit() { assertEquals(ZKPaths.split("/"), Collections.emptyList()); assertEquals(ZKPaths.split("/test"), Collections.singletonList("test")); assertEquals(ZKPaths.split("/test/one"), Arrays.asList("test", "one")); assertEquals(ZKPaths.split("/test/one/two"), Arrays.asList("test", "one", "two")); }
@Override public boolean isState(State expectedState) { return expectedState == this.state; }
@Test void testIsState() throws Exception { final AdaptiveScheduler scheduler = new AdaptiveSchedulerBuilder( createJobGraph(), mainThreadExecutor, EXECUTOR_RESOURCE.getExecutor()) .build(); final State state = scheduler.getState(); assertThat(scheduler.isState(state)).isTrue(); assertThat(scheduler.isState(new DummyState())).isFalse(); }
public static PointList sample(PointList input, double maxDistance, DistanceCalc distCalc, ElevationProvider elevation) { PointList output = new PointList(input.size() * 2, input.is3D()); if (input.isEmpty()) return output; int nodes = input.size(); double lastLat = input.getLat(0), lastLon = input.getLon(0), lastEle = input.getEle(0), thisLat, thisLon, thisEle; for (int i = 0; i < nodes; i++) { thisLat = input.getLat(i); thisLon = input.getLon(i); thisEle = input.getEle(i); if (i > 0) { double segmentLength = distCalc.calcDist3D(lastLat, lastLon, lastEle, thisLat, thisLon, thisEle); int segments = (int) Math.round(segmentLength / maxDistance); // for small distances, we use a simple and fast approximation to interpolate between points // for longer distances (or when crossing international date line) we use great circle interpolation boolean exact = segmentLength > GREAT_CIRCLE_SEGMENT_LENGTH || distCalc.isCrossBoundary(lastLon, thisLon); for (int segment = 1; segment < segments; segment++) { double ratio = (double) segment / segments; double lat, lon; if (exact) { GHPoint point = distCalc.intermediatePoint(ratio, lastLat, lastLon, thisLat, thisLon); lat = point.getLat(); lon = point.getLon(); } else { lat = lastLat + (thisLat - lastLat) * ratio; lon = lastLon + (thisLon - lastLon) * ratio; } double ele = elevation.getEle(lat, lon); if (!Double.isNaN(ele)) { output.add(lat, lon, ele); } } } output.add(thisLat, thisLon, thisEle); lastLat = thisLat; lastLon = thisLon; lastEle = thisEle; } return output; }
@Test public void addsTwoPointsAboveThreshold() { PointList in = new PointList(2, true); in.add(0, 0, 0); in.add(0.75, 0, 0); PointList out = EdgeSampling.sample( in, DistanceCalcEarth.METERS_PER_DEGREE / 4, new DistanceCalcEarth(), elevation ); assertEquals("(0.0,0.0,0.0), (0.25,0.0,10.0), (0.5,0.0,10.0), (0.75,0.0,0.0)", round(out).toString()); }
@Override public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay way, IntsRef relationFlags) { // TODO for now the node tag overhead is not worth the effort due to very few data points // List<Map<String, Object>> nodeTags = way.getTag("node_tags", null); Boolean b = getConditional(way.getTags()); if (b != null) restrictionSetter.setBoolean(edgeId, edgeIntAccess, b); }
@Test public void testBasics() { String today = "2023 May 17"; ArrayEdgeIntAccess edgeIntAccess = new ArrayEdgeIntAccess(1); int edgeId = 0; assertEquals(CarTemporalAccess.MISSING, restricted.getEnum(false, edgeId, edgeIntAccess)); ReaderWay way = new ReaderWay(0L); way.setTag("highway", "road"); way.setTag("access:conditional", "no @ (" + today + ")"); parser.handleWayTags(edgeId, edgeIntAccess, way, IntsRef.EMPTY); assertEquals(CarTemporalAccess.NO, restricted.getEnum(false, edgeId, edgeIntAccess)); edgeIntAccess = new ArrayEdgeIntAccess(1); way.setTag("access:conditional", "no @ ( 2023 Mar 23 - " + today + " )"); parser.handleWayTags(edgeId, edgeIntAccess, way, IntsRef.EMPTY); assertEquals(CarTemporalAccess.NO, restricted.getEnum(false, edgeId, edgeIntAccess)); edgeIntAccess = new ArrayEdgeIntAccess(1); way.clearTags(); way.setTag("highway", "road"); way.setTag("access", "no"); way.setTag("access:conditional", "yes @ (" + today + ")"); parser.handleWayTags(edgeId, edgeIntAccess, way, IntsRef.EMPTY); assertEquals(CarTemporalAccess.YES, restricted.getEnum(false, edgeId, edgeIntAccess)); // for now consider if seasonal range edgeIntAccess = new ArrayEdgeIntAccess(1); way.setTag("access:conditional", "no @ ( Mar 23 - Aug 23 )"); parser.handleWayTags(edgeId, edgeIntAccess, way, IntsRef.EMPTY); assertEquals(CarTemporalAccess.NO, restricted.getEnum(false, edgeId, edgeIntAccess)); // range does not match => inverse! edgeIntAccess = new ArrayEdgeIntAccess(1); way.setTag("access:conditional", "no @ ( Jun 23 - Aug 23 )"); parser.handleWayTags(edgeId, edgeIntAccess, way, IntsRef.EMPTY); assertEquals(CarTemporalAccess.YES, restricted.getEnum(false, edgeId, edgeIntAccess)); edgeIntAccess = new ArrayEdgeIntAccess(1); way.setTag("access:conditional", "no @ ( 2023 Mar 23 )"); parser.handleWayTags(edgeId, edgeIntAccess, way, IntsRef.EMPTY); assertEquals(CarTemporalAccess.YES, restricted.getEnum(false, edgeId, edgeIntAccess)); edgeIntAccess = new ArrayEdgeIntAccess(1); way.setTag("access:conditional", "yes @ Apr-Nov"); parser.handleWayTags(edgeId, edgeIntAccess, way, IntsRef.EMPTY); assertEquals(CarTemporalAccess.YES, restricted.getEnum(false, edgeId, edgeIntAccess)); }
@Override public List<Integer> applyTransforms(List<Integer> originalGlyphIds) { List<Integer> intermediateGlyphsFromGsub = originalGlyphIds; for (String feature : FEATURES_IN_ORDER) { if (!gsubData.isFeatureSupported(feature)) { LOG.debug("the feature {} was not found", feature); continue; } LOG.debug("applying the feature {}", feature); ScriptFeature scriptFeature = gsubData.getFeature(feature); intermediateGlyphsFromGsub = applyGsubFeature(scriptFeature, intermediateGlyphsFromGsub); } return Collections.unmodifiableList(repositionGlyphs(intermediateGlyphsFromGsub)); }
@Test void testApplyTransforms_o_kar() { // given List<Integer> glyphsAfterGsub = Arrays.asList(108, 89, 101, 97); // when List<Integer> result = gsubWorkerForBengali.applyTransforms(getGlyphIds("বোস")); // then assertEquals(glyphsAfterGsub, result); }
@Override public Object next() { if (_numberOfValuesPerEntry == 1) { return getNextBooleanAsInteger(); } return MultiValueGeneratorHelper .generateMultiValueEntries(_numberOfValuesPerEntry, _random, this::getNextBooleanAsInteger); }
@Test public void testNextMultiValued() { Random random = mock(Random.class); when(random.nextBoolean()) .thenReturn(false, true, false, false, false, true, true, true, true, false, true, false, false, true, true, false, false, false, true, true, false, true, true, true); when(random.nextDouble()).thenReturn(0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9); // for MV generation double numValuesPerEntry = 2.4; BooleanGenerator generator = new BooleanGenerator(numValuesPerEntry, random); int[][] expectedValues = { {0, 1, 0}, // rnd < 0.4 {0, 0, 1}, // rnd < 0.4 {1, 1, 1}, // rnd < 0.4 {0, 1, 0}, // rnd < 0.4 {0, 1}, // rnd >= 0.4 {1, 0}, // rnd >= 0.4 {0, 0}, // rnd >= 0.4 {1, 1}, // rnd >= 0.4 {0, 1}, // rnd >= 0.4 {1, 1}, // rnd >= 0.4 }; for (int[] expected : expectedValues) { List<Integer> actual = (List<Integer>) generator.next(); assertEquals(actual.toArray(), expected); } }
@Override public boolean isActive() { return isActive; }
@Test(timeOut = 30000) public void testClientClosedProducerThenSendsMessageAndGetsClosed() throws Exception { resetChannel(); setChannelConnected(); setConnectionVersion(ProtocolVersion.v5.getValue()); serverCnx.cancelKeepAliveTask(); String producerName = "my-producer"; ByteBuf clientCommand1 = Commands.newProducer(successTopicName, 1 /* producer id */, 1 /* request id */, producerName, Collections.emptyMap(), false); channel.writeInbound(clientCommand1); assertTrue(getResponse() instanceof CommandProducerSuccess); ByteBuf closeProducer = Commands.newCloseProducer(1,2); channel.writeInbound(closeProducer); assertTrue(getResponse() instanceof CommandSuccess); // Send message and get disconnected sendMessage(); Awaitility.await().atMost(10, TimeUnit.SECONDS).until(() -> !channel.isActive()); channel.finish(); }
public static URI getUri(String path) { try { URI uri = new URI(path); if (uri.getScheme() != null) { return uri; } else { return new URI(CommonConstants.Segment.LOCAL_SEGMENT_SCHEME + ":" + path); } } catch (URISyntaxException e) { throw new IllegalArgumentException("Illegal URI path: " + path, e); } }
@Test public void testGetUri() { assertEquals(URIUtils.getUri("http://foo/bar").toString(), "http://foo/bar"); assertEquals(URIUtils.getUri("http://foo/bar", "table").toString(), "http://foo/bar/table"); assertEquals(URIUtils.getUri("http://foo/bar", "table", "segment+%25").toString(), "http://foo/bar/table/segment+%25"); assertEquals(URIUtils.getUri("/foo/bar", "table", "segment+%25").toString(), "file:/foo/bar/table/segment+%25"); assertEquals(URIUtils.getUri("file:/foo/bar", "table", "segment+%25").toString(), "file:/foo/bar/table/segment+%25"); }
@Override public ReservationSubmissionResponse submitReservation( ReservationSubmissionRequest request) throws YarnException, IOException { if (request == null || request.getReservationId() == null || request.getReservationDefinition() == null || request.getQueue() == null) { routerMetrics.incrSubmitReservationFailedRetrieved(); String msg = "Missing submitReservation request or reservationId " + "or reservation definition or queue."; RouterAuditLogger.logFailure(user.getShortUserName(), SUBMIT_RESERVATION, UNKNOWN, TARGET_CLIENT_RM_SERVICE, msg); RouterServerUtil.logAndThrowException(msg, null); } long startTime = clock.getTime(); ReservationId reservationId = request.getReservationId(); for (int i = 0; i < numSubmitRetries; i++) { try { // First, Get SubClusterId according to specific strategy. SubClusterId subClusterId = policyFacade.getReservationHomeSubCluster(request); LOG.info("submitReservation ReservationId {} try #{} on SubCluster {}.", reservationId, i, subClusterId); ReservationHomeSubCluster reservationHomeSubCluster = ReservationHomeSubCluster.newInstance(reservationId, subClusterId); // Second, determine whether the current ReservationId has a corresponding subCluster. // If it does not exist, add it. If it exists, update it. Boolean exists = existsReservationHomeSubCluster(reservationId); // We may encounter the situation of repeated submission of Reservation, // at this time we should try to use the reservation that has been allocated // !exists indicates that the reservation does not exist and needs to be added // i==0, mainly to consider repeated submissions, // so the first time to apply for reservation, try to use the original reservation if (!exists || i == 0) { addReservationHomeSubCluster(reservationId, reservationHomeSubCluster); } else { updateReservationHomeSubCluster(subClusterId, reservationId, reservationHomeSubCluster); } // Third, Submit a Reservation request to the subCluster ApplicationClientProtocol clientRMProxy = getClientRMProxyForSubCluster(subClusterId); ReservationSubmissionResponse response = clientRMProxy.submitReservation(request); if (response != null) { LOG.info("Reservation {} submitted on subCluster {}.", reservationId, subClusterId); long stopTime = clock.getTime(); routerMetrics.succeededSubmitReservationRetrieved(stopTime - startTime); RouterAuditLogger.logSuccess(user.getShortUserName(), SUBMIT_RESERVATION, TARGET_CLIENT_RM_SERVICE); return response; } } catch (Exception e) { LOG.warn("Unable to submit(try #{}) the Reservation {}.", i, reservationId, e); } } routerMetrics.incrSubmitReservationFailedRetrieved(); String msg = String.format("Reservation %s failed to be submitted.", reservationId); RouterAuditLogger.logFailure(user.getShortUserName(), SUBMIT_RESERVATION, UNKNOWN, TARGET_CLIENT_RM_SERVICE, msg); throw new YarnException(msg); }
@Test public void testSubmitReservationEmptyRequest() throws Exception { LOG.info("Test FederationClientInterceptor : SubmitReservation request empty."); String errorMsg = "Missing submitReservation request or reservationId or reservation definition or queue."; // null request1 LambdaTestUtils.intercept(YarnException.class, errorMsg, () -> interceptor.submitReservation(null)); // null request2 ReservationSubmissionRequest request2 = ReservationSubmissionRequest.newInstance(null, null, null); LambdaTestUtils.intercept(YarnException.class, errorMsg, () -> interceptor.submitReservation(request2)); // null request3 ReservationSubmissionRequest request3 = ReservationSubmissionRequest.newInstance(null, "q1", null); LambdaTestUtils.intercept(YarnException.class, errorMsg, () -> interceptor.submitReservation(request3)); // null request4 ReservationId reservationId = ReservationId.newInstance(Time.now(), 1); ReservationSubmissionRequest request4 = ReservationSubmissionRequest.newInstance(null, null, reservationId); LambdaTestUtils.intercept(YarnException.class, errorMsg, () -> interceptor.submitReservation(request4)); // null request5 long arrival = Time.now(); long deadline = arrival + (int)(DEFAULT_DURATION * 1.1); ReservationRequest rRequest = ReservationRequest.newInstance( Resource.newInstance(1024, 1), 1, 1, DEFAULT_DURATION); ReservationRequest[] rRequests = new ReservationRequest[] {rRequest}; ReservationDefinition rDefinition = createReservationDefinition(arrival, deadline, rRequests, ReservationRequestInterpreter.R_ALL, "u1"); ReservationSubmissionRequest request5 = ReservationSubmissionRequest.newInstance(rDefinition, null, reservationId); LambdaTestUtils.intercept(YarnException.class, errorMsg, () -> interceptor.submitReservation(request5)); }
private static SnapshotDiffReport getSnapshotDiffReport( final FileSystem fs, final Path snapshotDir, final String fromSnapshot, final String toSnapshot) throws IOException { try { return (SnapshotDiffReport) getSnapshotDiffReportMethod(fs).invoke( fs, snapshotDir, fromSnapshot, toSnapshot); } catch (InvocationTargetException e) { throw new IOException(e.getCause()); } catch (NoSuchMethodException|IllegalAccessException e) { throw new IllegalArgumentException( "Failed to invoke getSnapshotDiffReport.", e); } }
@Test public void testSync2() throws Exception { initData2(source); initData2(target); enableAndCreateFirstSnapshot(); // make changes under source changeData2(source); dfs.createSnapshot(source, "s2"); SnapshotDiffReport report = dfs.getSnapshotDiffReport(source, "s1", "s2"); System.out.println(report); syncAndVerify(); }
@Override public void executeUpdate(final AlterStorageUnitStatement sqlStatement, final ContextManager contextManager) { checkBefore(sqlStatement); Map<String, DataSourcePoolProperties> propsMap = DataSourceSegmentsConverter.convert(database.getProtocolType(), sqlStatement.getStorageUnits()); validateHandler.validate(propsMap, getExpectedPrivileges(sqlStatement)); try { contextManager.getPersistServiceFacade().getMetaDataManagerPersistService().alterStorageUnits(database.getName(), propsMap); } catch (final SQLException | ShardingSphereExternalException ex) { throw new StorageUnitsOperateException("alter", propsMap.keySet(), ex); } }
@Test void assertExecuteUpdateWithDuplicateStorageUnitNames() { assertThrows(DuplicateStorageUnitException.class, () -> executor.executeUpdate(createAlterStorageUnitStatementWithDuplicateStorageUnitNames(), mock(ContextManager.class))); }
@Override public EntityExcerpt createExcerpt(CacheDto cacheDto) { return EntityExcerpt.builder() .id(ModelId.of(cacheDto.id())) .type(ModelTypes.LOOKUP_CACHE_V1) .title(cacheDto.title()) .build(); }
@Test public void createExcerpt() { final CacheDto cacheDto = CacheDto.builder() .id("1234567890") .name("cache-name") .title("Cache Title") .description("Cache Description") .config(new FallbackCacheConfig()) .build(); final EntityExcerpt excerpt = facade.createExcerpt(cacheDto); assertThat(excerpt.id()).isEqualTo(ModelId.of("1234567890")); assertThat(excerpt.type()).isEqualTo(ModelTypes.LOOKUP_CACHE_V1); assertThat(excerpt.title()).isEqualTo("Cache Title"); }
@Override public boolean encode( @NonNull Resource<GifDrawable> resource, @NonNull File file, @NonNull Options options) { GifDrawable drawable = resource.get(); Transformation<Bitmap> transformation = drawable.getFrameTransformation(); boolean isTransformed = !(transformation instanceof UnitTransformation); if (isTransformed && options.get(ENCODE_TRANSFORMATION)) { return encodeTransformedToFile(drawable, file); } else { return writeDataDirect(drawable.getBuffer(), file); } }
@Test public void testRecyclesTransformedResourceAfterWritingIfTransformedResourceIsDifferent() { when(decoder.getFrameCount()).thenReturn(1); Bitmap expected = Bitmap.createBitmap(100, 200, Bitmap.Config.RGB_565); when(transformedResource.get()).thenReturn(expected); when(frameTransformation.transform(anyContext(), eq(frameResource), anyInt(), anyInt())) .thenReturn(transformedResource); when(gifEncoder.start(any(OutputStream.class))).thenReturn(true); encoder.encode(resource, file, options); InOrder order = inOrder(transformedResource, gifEncoder); order.verify(gifEncoder).addFrame(eq(expected)); order.verify(transformedResource).recycle(); }
@Override public Optional<BlobDescriptor> handleHttpResponseException(ResponseException responseException) throws ResponseException { if (responseException.getStatusCode() != HttpStatusCodes.STATUS_CODE_NOT_FOUND) { throw responseException; } if (responseException.getContent() == null) { // TODO: The Google HTTP client gives null content for HEAD requests. Make the content never // be null, even for HEAD requests. return Optional.empty(); } // Find a BLOB_UNKNOWN error response code. ErrorCodes errorCode = ErrorResponseUtil.getErrorCode(responseException); if (errorCode == ErrorCodes.BLOB_UNKNOWN) { return Optional.empty(); } // BLOB_UNKNOWN was not found as a error response code. throw responseException; }
@Test public void testHandleHttpResponseException_invalidStatusCode() { ResponseException mockResponseException = Mockito.mock(ResponseException.class); Mockito.when(mockResponseException.getStatusCode()).thenReturn(-1); try { testBlobChecker.handleHttpResponseException(mockResponseException); Assert.fail("Non-404 status codes should not be handled"); } catch (ResponseException ex) { Assert.assertEquals(mockResponseException, ex); } }
@Override @Deprecated public <VR> KStream<K, VR> flatTransformValues(final org.apache.kafka.streams.kstream.ValueTransformerSupplier<? super V, Iterable<VR>> valueTransformerSupplier, final String... stateStoreNames) { Objects.requireNonNull(valueTransformerSupplier, "valueTransformerSupplier can't be null"); return doFlatTransformValues( toValueTransformerWithKeySupplier(valueTransformerSupplier), NamedInternal.empty(), stateStoreNames); }
@Test @SuppressWarnings("deprecation") public void shouldNotAllowNullStoreNameOnFlatTransformValuesWithFlatValueWithKeySupplierAndNamed() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.flatTransformValues( flatValueTransformerWithKeySupplier, Named.as("flatValueWitKeyTransformer"), (String) null)); assertThat(exception.getMessage(), equalTo("stateStoreNames can't contain `null` as store name")); }
static String encodeHex(byte[] bytes) { final char[] hex = new char[bytes.length * 2]; int i = 0; for (byte b : bytes) { hex[i++] = HEX_DIGITS[(b >> 4) & 0x0f]; hex[i++] = HEX_DIGITS[b & 0x0f]; } return String.valueOf(hex); }
@Test public void encodeHex_Null_Failure() throws Exception { try { PubkeyUtils.encodeHex(null); fail("Should throw null pointer exception when argument is null"); } catch (NullPointerException e) { // success } }
@Override public ObjectNode encode(MappingKey key, CodecContext context) { checkNotNull(key, "Mapping key cannot be null"); final ObjectNode result = context.mapper().createObjectNode(); final JsonCodec<MappingAddress> addressCodec = context.codec(MappingAddress.class); result.set(ADDRESS, addressCodec.encode(key.address(), context)); return result; }
@Test public void testMappingKeyEncode() { MappingAddress address = MappingAddresses.ipv4MappingAddress(IPV4_PREFIX); MappingKey key = DefaultMappingKey.builder() .withAddress(address) .build(); ObjectNode keyJson = keyCodec.encode(key, context); assertThat(keyJson, MappingKeyJsonMatcher.matchesMappingKey(key)); }
@PublicAPI(usage = ACCESS) public JavaField getField(String name) { return members.getField(name); }
@Test public void get_all_involved_raw_types_returns_component_type_for_array_type() { class SimpleClass { @SuppressWarnings("unused") SimpleClass[][] field; } JavaType arrayType = new ClassFileImporter().importClass(SimpleClass.class).getField("field").getType(); assertThatTypes(arrayType.getAllInvolvedRawTypes()).matchExactly(SimpleClass.class); }
@Override public void shutdown() { scheduledExecutorService.shutdown(); try { web3jService.close(); } catch (IOException e) { throw new RuntimeException("Failed to close web3j service", e); } }
@Test public void testExceptionOnServiceClosure() throws Exception { assertThrows( RuntimeException.class, () -> { doThrow(new IOException("Failed to close")).when(service).close(); web3j.shutdown(); }); }
public static DistCpOptions parse(String[] args) throws IllegalArgumentException { CommandLineParser parser = new CustomParser(); CommandLine command; try { command = parser.parse(cliOptions, args, true); } catch (ParseException e) { throw new IllegalArgumentException("Unable to parse arguments. " + Arrays.toString(args), e); } DistCpOptions.Builder builder = parseSourceAndTargetPaths(command); builder .withAtomicCommit( command.hasOption(DistCpOptionSwitch.ATOMIC_COMMIT.getSwitch())) .withSyncFolder( command.hasOption(DistCpOptionSwitch.SYNC_FOLDERS.getSwitch())) .withDeleteMissing( command.hasOption(DistCpOptionSwitch.DELETE_MISSING.getSwitch())) .withIgnoreFailures( command.hasOption(DistCpOptionSwitch.IGNORE_FAILURES.getSwitch())) .withOverwrite( command.hasOption(DistCpOptionSwitch.OVERWRITE.getSwitch())) .withAppend( command.hasOption(DistCpOptionSwitch.APPEND.getSwitch())) .withSkipCRC( command.hasOption(DistCpOptionSwitch.SKIP_CRC.getSwitch())) .withBlocking( !command.hasOption(DistCpOptionSwitch.BLOCKING.getSwitch())) .withVerboseLog( command.hasOption(DistCpOptionSwitch.VERBOSE_LOG.getSwitch())) .withDirectWrite( command.hasOption(DistCpOptionSwitch.DIRECT_WRITE.getSwitch())) .withUseIterator( command.hasOption(DistCpOptionSwitch.USE_ITERATOR.getSwitch())) .withUpdateRoot( command.hasOption(DistCpOptionSwitch.UPDATE_ROOT.getSwitch())); if (command.hasOption(DistCpOptionSwitch.DIFF.getSwitch())) { String[] snapshots = getVals(command, DistCpOptionSwitch.DIFF.getSwitch()); checkSnapshotsArgs(snapshots); builder.withUseDiff(snapshots[0], snapshots[1]); } if (command.hasOption(DistCpOptionSwitch.RDIFF.getSwitch())) { String[] snapshots = getVals(command, DistCpOptionSwitch.RDIFF.getSwitch()); checkSnapshotsArgs(snapshots); builder.withUseRdiff(snapshots[0], snapshots[1]); } if (command.hasOption(DistCpOptionSwitch.FILTERS.getSwitch())) { builder.withFiltersFile( getVal(command, DistCpOptionSwitch.FILTERS.getSwitch())); } if (command.hasOption(DistCpOptionSwitch.LOG_PATH.getSwitch())) { builder.withLogPath( new Path(getVal(command, DistCpOptionSwitch.LOG_PATH.getSwitch()))); } if (command.hasOption(DistCpOptionSwitch.WORK_PATH.getSwitch())) { final String workPath = getVal(command, DistCpOptionSwitch.WORK_PATH.getSwitch()); if (workPath != null && !workPath.isEmpty()) { builder.withAtomicWorkPath(new Path(workPath)); } } if (command.hasOption(DistCpOptionSwitch.TRACK_MISSING.getSwitch())) { builder.withTrackMissing( new Path(getVal( command, DistCpOptionSwitch.TRACK_MISSING.getSwitch()))); } if (command.hasOption(DistCpOptionSwitch.BANDWIDTH.getSwitch())) { try { final Float mapBandwidth = Float.parseFloat( getVal(command, DistCpOptionSwitch.BANDWIDTH.getSwitch())); builder.withMapBandwidth(mapBandwidth); } catch (NumberFormatException e) { throw new IllegalArgumentException("Bandwidth specified is invalid: " + getVal(command, DistCpOptionSwitch.BANDWIDTH.getSwitch()), e); } } if (command.hasOption( DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch())) { try { final Integer numThreads = Integer.parseInt(getVal(command, DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch())); builder.withNumListstatusThreads(numThreads); } catch (NumberFormatException e) { throw new IllegalArgumentException( "Number of liststatus threads is invalid: " + getVal(command, DistCpOptionSwitch.NUM_LISTSTATUS_THREADS.getSwitch()), e); } } if (command.hasOption(DistCpOptionSwitch.MAX_MAPS.getSwitch())) { try { final Integer maps = Integer.parseInt( getVal(command, DistCpOptionSwitch.MAX_MAPS.getSwitch())); builder.maxMaps(maps); } catch (NumberFormatException e) { throw new IllegalArgumentException("Number of maps is invalid: " + getVal(command, DistCpOptionSwitch.MAX_MAPS.getSwitch()), e); } } if (command.hasOption(DistCpOptionSwitch.COPY_STRATEGY.getSwitch())) { builder.withCopyStrategy( getVal(command, DistCpOptionSwitch.COPY_STRATEGY.getSwitch())); } if (command.hasOption(DistCpOptionSwitch.PRESERVE_STATUS.getSwitch())) { builder.preserve( getVal(command, DistCpOptionSwitch.PRESERVE_STATUS.getSwitch())); } if (command.hasOption(DistCpOptionSwitch.FILE_LIMIT.getSwitch())) { LOG.warn(DistCpOptionSwitch.FILE_LIMIT.getSwitch() + " is a deprecated" + " option. Ignoring."); } if (command.hasOption(DistCpOptionSwitch.SIZE_LIMIT.getSwitch())) { LOG.warn(DistCpOptionSwitch.SIZE_LIMIT.getSwitch() + " is a deprecated" + " option. Ignoring."); } if (command.hasOption(DistCpOptionSwitch.BLOCKS_PER_CHUNK.getSwitch())) { final String chunkSizeStr = getVal(command, DistCpOptionSwitch.BLOCKS_PER_CHUNK.getSwitch().trim()); try { int csize = Integer.parseInt(chunkSizeStr); csize = csize > 0 ? csize : 0; LOG.info("Set distcp blocksPerChunk to " + csize); builder.withBlocksPerChunk(csize); } catch (NumberFormatException e) { throw new IllegalArgumentException("blocksPerChunk is invalid: " + chunkSizeStr, e); } } if (command.hasOption(DistCpOptionSwitch.COPY_BUFFER_SIZE.getSwitch())) { final String copyBufferSizeStr = getVal(command, DistCpOptionSwitch.COPY_BUFFER_SIZE.getSwitch().trim()); try { int copyBufferSize = Integer.parseInt(copyBufferSizeStr); builder.withCopyBufferSize(copyBufferSize); } catch (NumberFormatException e) { throw new IllegalArgumentException("copyBufferSize is invalid: " + copyBufferSizeStr, e); } } return builder.build(); }
@Test public void testParseMaps() { DistCpOptions options = OptionsParser.parse(new String[] { "hdfs://localhost:8020/source/first", "hdfs://localhost:8020/target/"}); assertThat(options.getMaxMaps()).isEqualTo(DistCpConstants.DEFAULT_MAPS); options = OptionsParser.parse(new String[] { "-m", "1", "hdfs://localhost:8020/source/first", "hdfs://localhost:8020/target/"}); assertThat(options.getMaxMaps()).isEqualTo(1); options = OptionsParser.parse(new String[] { "-m", "0", "hdfs://localhost:8020/source/first", "hdfs://localhost:8020/target/"}); assertThat(options.getMaxMaps()).isEqualTo(1); try { OptionsParser.parse(new String[] { "-m", "hello", "hdfs://localhost:8020/source/first", "hdfs://localhost:8020/target/"}); Assert.fail("Non numberic map parsed"); } catch (IllegalArgumentException ignore) { } try { OptionsParser.parse(new String[] { "-mapredXslConf", "hdfs://localhost:8020/source/first", "hdfs://localhost:8020/target/"}); Assert.fail("Non numberic map parsed"); } catch (IllegalArgumentException ignore) { } }
public boolean saveToFile( EngineMetaInterface meta ) throws KettleException { return saveToFile( meta, false ); }
@Test public void testNullParamSaveToFile() throws Exception { doCallRealMethod().when( spoon ).saveToFile( any() ); assertFalse( spoon.saveToFile( null ) ); }
@Override public List<Intent> compile(LinkCollectionIntent intent, List<Intent> installable) { SetMultimap<DeviceId, PortNumber> inputPorts = HashMultimap.create(); SetMultimap<DeviceId, PortNumber> outputPorts = HashMultimap.create(); Map<ConnectPoint, Identifier<?>> labels = ImmutableMap.of(); Optional<EncapsulationConstraint> encapConstraint = this.getIntentEncapConstraint(intent); computePorts(intent, inputPorts, outputPorts); if (encapConstraint.isPresent()) { labels = labelAllocator.assignLabelToPorts(intent.links(), intent.key(), encapConstraint.get().encapType(), encapConstraint.get().suggestedIdentifier()); } ImmutableList.Builder<Intent> intentList = ImmutableList.builder(); if (this.isDomainProcessingEnabled(intent)) { intentList.addAll(this.getDomainIntents(intent, domainService)); } List<Objective> objectives = new ArrayList<>(); List<DeviceId> devices = new ArrayList<>(); for (DeviceId deviceId : outputPorts.keySet()) { // add only objectives that are not inside of a domain if (LOCAL.equals(domainService.getDomain(deviceId))) { List<Objective> deviceObjectives = createRules(intent, deviceId, inputPorts.get(deviceId), outputPorts.get(deviceId), labels); deviceObjectives.forEach(objective -> { objectives.add(objective); devices.add(deviceId); }); } } // if any objectives have been created if (!objectives.isEmpty()) { intentList.add(new FlowObjectiveIntent(appId, intent.key(), devices, objectives, intent.resources(), intent.resourceGroup())); } return intentList.build(); }
@Test public void testFilteredConnectPointForMp() { Set<Link> testLinks = ImmutableSet.of( DefaultLink.builder().providerId(PID).src(of1p2).dst(of2p1).type(DIRECT).build(), DefaultLink.builder().providerId(PID).src(of3p2).dst(of2p3).type(DIRECT).build(), DefaultLink.builder().providerId(PID).src(of2p2).dst(of4p1).type(DIRECT).build() ); Set<FilteredConnectPoint> ingress = ImmutableSet.of( new FilteredConnectPoint(of3p1, vlan100Selector), new FilteredConnectPoint(of1p1, vlan100Selector) ); Set<FilteredConnectPoint> egress = ImmutableSet.of( new FilteredConnectPoint(of4p2, vlan100Selector) ); LinkCollectionIntent intent = LinkCollectionIntent.builder() .appId(appId) .selector(ethDstSelector) .treatment(treatment) .links(testLinks) .filteredIngressPoints(ingress) .filteredEgressPoints(egress) .build(); List<Intent> result = compiler.compile(intent, Collections.emptyList()); assertThat(result, hasSize(1)); assertThat(result.get(0), instanceOf(FlowObjectiveIntent.class)); FlowObjectiveIntent foIntent = (FlowObjectiveIntent) result.get(0); List<Objective> objectives = foIntent.objectives(); assertThat(objectives, hasSize(15)); TrafficSelector expectSelector = DefaultTrafficSelector .builder(ethDstSelector) .matchInPort(PortNumber.portNumber(1)) .matchVlanId(VLAN_100) .build(); TrafficTreatment expectTreatment = DefaultTrafficTreatment.builder() .setOutput(PortNumber.portNumber(2)) .build(); /* * First set of objective */ filteringObjective = (FilteringObjective) objectives.get(0); forwardingObjective = (ForwardingObjective) objectives.get(1); nextObjective = (NextObjective) objectives.get(2); PortCriterion inPortCriterion = (PortCriterion) expectSelector.getCriterion(Criterion.Type.IN_PORT); // test case for first filtering objective checkFiltering(filteringObjective, inPortCriterion, intent.priority(), null, appId, true, vlan100Selector.criteria()); // test case for first next objective checkNext(nextObjective, SIMPLE, expectTreatment, expectSelector, ADD); // test case for first forwarding objective checkForward(forwardingObjective, ADD, expectSelector, nextObjective.id(), SPECIFIC); /* * Second set of objective */ filteringObjective = (FilteringObjective) objectives.get(3); forwardingObjective = (ForwardingObjective) objectives.get(4); nextObjective = (NextObjective) objectives.get(5); // test case for first filtering objective checkFiltering(filteringObjective, inPortCriterion, intent.priority(), null, appId, true, vlan100Selector.criteria()); // test case for first next objective checkNext(nextObjective, SIMPLE, expectTreatment, expectSelector, ADD); // test case for first forwarding objective checkForward(forwardingObjective, ADD, expectSelector, nextObjective.id(), SPECIFIC); /* * 3rd set of objective */ filteringObjective = (FilteringObjective) objectives.get(6); forwardingObjective = (ForwardingObjective) objectives.get(7); nextObjective = (NextObjective) objectives.get(8); // test case for first filtering objective checkFiltering(filteringObjective, inPortCriterion, intent.priority(), null, appId, true, vlan100Selector.criteria()); // test case for first next objective checkNext(nextObjective, SIMPLE, expectTreatment, expectSelector, ADD); // test case for first forwarding objective checkForward(forwardingObjective, ADD, expectSelector, nextObjective.id(), SPECIFIC); /* * 4th set of objective */ filteringObjective = (FilteringObjective) objectives.get(9); forwardingObjective = (ForwardingObjective) objectives.get(10); nextObjective = (NextObjective) objectives.get(11); // test case for first filtering objective checkFiltering(filteringObjective, inPortCriterion, intent.priority(), null, appId, true, vlan100Selector.criteria()); // test case for first next objective checkNext(nextObjective, SIMPLE, expectTreatment, expectSelector, ADD); // test case for first forwarding objective checkForward(forwardingObjective, ADD, expectSelector, nextObjective.id(), SPECIFIC); /* * 5th set of objective */ filteringObjective = (FilteringObjective) objectives.get(12); forwardingObjective = (ForwardingObjective) objectives.get(13); nextObjective = (NextObjective) objectives.get(14); expectSelector = DefaultTrafficSelector.builder(ethDstSelector) .matchVlanId(VLAN_100) .matchInPort(PortNumber.portNumber(3)) .build(); inPortCriterion = (PortCriterion) expectSelector.getCriterion(Criterion.Type.IN_PORT); // test case for first filtering objective checkFiltering(filteringObjective, inPortCriterion, intent.priority(), null, appId, true, vlan100Selector.criteria()); // test case for first next objective checkNext(nextObjective, SIMPLE, expectTreatment, expectSelector, ADD); // test case for first forwarding objective checkForward(forwardingObjective, ADD, expectSelector, nextObjective.id(), SPECIFIC); }
@Override public <T> T convert(DataTable dataTable, Type type) { return convert(dataTable, type, false); }
@Test void convert_to_unknown_type__throws_exception__with_table_entry_converter_present__throws_exception() { DataTable table = parse("", "| ♘ |"); CucumberDataTableException exception = assertThrows( CucumberDataTableException.class, () -> converter.convert(table, Piece.class)); assertThat(exception.getMessage(), is("" + "Can't convert DataTable to io.cucumber.datatable.DataTableTypeRegistryTableConverterTest$Piece.\n" + "Please review these problems:\n" + "\n" + " - There was no table entry or table row transformer registered for io.cucumber.datatable.DataTableTypeRegistryTableConverterTest$Piece.\n" + " Please consider registering a table entry or row transformer.\n" + "\n" + " - There was no table cell transformer registered for io.cucumber.datatable.DataTableTypeRegistryTableConverterTest$Piece.\n" + " Please consider registering a table cell transformer.\n" + "\n" + " - There was no default table cell transformer registered to transform io.cucumber.datatable.DataTableTypeRegistryTableConverterTest$Piece.\n" + " Please consider registering a default table cell transformer.\n" + "\n" + "Note: Usually solving one is enough")); }
public List<TimerangePreset> convert(final Map<Period, String> timerangeOptions) { if (timerangeOptions == null) { return List.of(); } return timerangeOptions.entrySet() .stream() .map(entry -> new TimerangePreset( periodConverter.apply(entry.getKey()), entry.getValue()) ) .collect(Collectors.toList()); }
@Test void testConversionOnSomeDefaultRelativeTimerangeOptions() { Map<Period, String> defaults = new LinkedHashMap( Map.of( Period.minutes(15), "15 minutes", Period.hours(8), "8 hours", Period.days(1), "1 day" ) ); doCallRealMethod().when(periodConverter).apply(any(Period.class)); final List<TimerangePreset> result = toTest.convert(defaults); assertThat(result) .isNotNull() .hasSize(3) .extracting(TimerangePreset::timeRange, TimerangePreset::description) .containsExactlyInAnyOrder( tuple(RelativeRange.Builder.builder() .from(15 * 60) .build(), "15 minutes"), tuple(RelativeRange.Builder.builder() .from(8 * 60 * 60) .build(), "8 hours"), tuple(RelativeRange.Builder.builder() .from(24 * 60 * 60) .build(), "1 day") ); }
public HeapRow() { // No-op. }
@Test public void testHeapRow() { Object[] values = new Object[2]; values[0] = new Object(); values[1] = new Object(); HeapRow row = new HeapRow(values); assertEquals(2, row.getColumnCount()); assertSame(values[0], row.get(0)); assertSame(values[1], row.get(1)); row = new HeapRow(2); row.set(0, values[0]); row.set(1, values[1]); assertEquals(2, row.getColumnCount()); assertSame(values[0], row.get(0)); assertSame(values[1], row.get(1)); }
@Override public ObjectNode encode(Instruction instruction, CodecContext context) { checkNotNull(instruction, "Instruction cannot be null"); return new EncodeInstructionCodecHelper(instruction, context).encode(); }
@Test public void modIPv6FlowLabelInstructionTest() { final int flowLabel = 0xfffff; final L3ModificationInstruction.ModIPv6FlowLabelInstruction instruction = (L3ModificationInstruction.ModIPv6FlowLabelInstruction) Instructions.modL3IPv6FlowLabel(flowLabel); final ObjectNode instructionJson = instructionCodec.encode(instruction, context); assertThat(instructionJson, matchesInstruction(instruction)); }
@Override public void run() { if (processor != null) { processor.execute(); } else { if (!beforeHook()) { logger.info("before-feature hook returned [false], aborting: {}", this); } else { scenarios.forEachRemaining(this::processScenario); } afterFeature(); } }
@Test void testCallOnceWithUtilsPresentInKarateConfig() { run("callonce-bg.feature", "classpath:com/intuit/karate/core"); }
@Override public QueryHeader build(final QueryResultMetaData queryResultMetaData, final ShardingSphereDatabase database, final String columnName, final String columnLabel, final int columnIndex) throws SQLException { String schemaName = null == database ? "" : database.getName(); String actualTableName = queryResultMetaData.getTableName(columnIndex); String tableName; boolean primaryKey; if (null == actualTableName || null == database) { tableName = actualTableName; primaryKey = false; } else { tableName = getLogicTableName(database, actualTableName); ShardingSphereSchema schema = database.getSchema(schemaName); primaryKey = null != schema && Optional.ofNullable(schema.getTable(tableName)).map(optional -> optional.getColumn(columnName)).map(ShardingSphereColumn::isPrimaryKey).orElse(false); } int columnType = queryResultMetaData.getColumnType(columnIndex); String columnTypeName = queryResultMetaData.getColumnTypeName(columnIndex); int columnLength = queryResultMetaData.getColumnLength(columnIndex); int decimals = queryResultMetaData.getDecimals(columnIndex); boolean signed = queryResultMetaData.isSigned(columnIndex); boolean notNull = queryResultMetaData.isNotNull(columnIndex); boolean autoIncrement = queryResultMetaData.isAutoIncrement(columnIndex); return new QueryHeader(schemaName, tableName, columnLabel, columnName, columnType, columnTypeName, columnLength, decimals, signed, primaryKey, notNull, autoIncrement); }
@Test void assertBuildWithoutDataNodeContainedRule() throws SQLException { QueryResultMetaData queryResultMetaData = createQueryResultMetaData(); QueryHeader actual = new MySQLQueryHeaderBuilder().build( queryResultMetaData, mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS), queryResultMetaData.getColumnName(1), queryResultMetaData.getColumnLabel(1), 1); assertFalse(actual.isPrimaryKey()); assertThat(actual.getTable(), is(actual.getTable())); }
public static int digitCount(final int value) { return (int)((value + INT_DIGITS[31 - Integer.numberOfLeadingZeros(value | 1)]) >> 32); }
@Test void digitCountIntValue() { for (int i = 0; i < INT_MAX_DIGITS; i++) { final int min = 0 == i ? 0 : INT_POW_10[i]; final int max = INT_MAX_DIGITS - 1 == i ? Integer.MAX_VALUE : INT_POW_10[i + 1] - 1; final int expectedDigitCount = i + 1; assertEquals(expectedDigitCount, digitCount(min)); assertEquals(expectedDigitCount, digitCount(min + 1)); assertEquals(expectedDigitCount, digitCount(min + (max - min) >>> 1)); assertEquals(expectedDigitCount, digitCount(max - 1)); assertEquals(expectedDigitCount, digitCount(max)); } }
public static <InputT extends PInput, OutputT extends POutput> PTransform<InputT, OutputT> compose(SerializableFunction<InputT, OutputT> fn) { return new PTransform<InputT, OutputT>() { @Override public OutputT expand(InputT input) { return fn.apply(input); } }; }
@Test @Category(NeedsRunner.class) public void testComposeBasicSerializableFunction() throws Exception { PCollection<Integer> output = pipeline .apply(Create.of(1, 2, 3)) .apply( PTransform.compose( (PCollection<Integer> numbers) -> { PCollection<Integer> inverted = numbers.apply(MapElements.into(integers()).via(input -> -input)); return PCollectionList.of(numbers) .and(inverted) .apply(Flatten.pCollections()); })); PAssert.that(output).containsInAnyOrder(-2, -1, -3, 2, 1, 3); pipeline.run(); }
public TemplateResponse mapToTemplateResponse(ReviewGroup reviewGroup, Template template) { List<SectionResponse> sectionResponses = template.getSectionIds() .stream() .map(templateSection -> mapToSectionResponse(templateSection, reviewGroup)) .toList(); return new TemplateResponse( template.getId(), reviewGroup.getReviewee(), reviewGroup.getProjectName(), sectionResponses ); }
@Test void 템플릿_매핑_시_옵션_그룹에_해당하는_옵션_아이템이_없을_경우_예외가_발생한다() { // given Question question1 = new Question(true, QuestionType.TEXT, "질문", "가이드라인", 1); Question question2 = new Question(true, QuestionType.CHECKBOX, "질문", "가이드라인", 1); questionRepository.saveAll(List.of(question1, question2)); OptionGroup optionGroup = new OptionGroup(question2.getId(), 1, 2); optionGroupRepository.save(optionGroup); Section section1 = new Section(VisibleType.ALWAYS, List.of(question1.getId()), null, "섹션명", "말머리", 1); Section section2 = new Section(VisibleType.ALWAYS, List.of(question2.getId()), null, "섹션명", "말머리", 2); sectionRepository.saveAll(List.of(section1, section2)); Template template = new Template(List.of(section1.getId(), section2.getId())); templateRepository.save(template); ReviewGroup reviewGroup = new ReviewGroup("리뷰이명", "프로젝트명", "reviewRequestCode", "groupAccessCode"); reviewGroupRepository.save(reviewGroup); // when, then assertThatThrownBy(() -> templateMapper.mapToTemplateResponse(reviewGroup, template)) .isInstanceOf(MissingOptionItemsInOptionGroupException.class); }
@Around("@annotation(com.linecorp.flagship4j.javaflagr.annotations.ControllerFeatureToggle)") public Object processControllerFeatureToggleAnnotation(ProceedingJoinPoint joinPoint) throws Throwable { log.info("start processing controllerFeatureToggle annotation"); MethodSignature signature = (MethodSignature) joinPoint.getSignature(); Method method = signature.getMethod(); Annotation[][] parameterAnnotations = method.getParameterAnnotations(); Object[] args = joinPoint.getArgs(); ControllerFeatureToggle featureToggle = method.getAnnotation(ControllerFeatureToggle.class); Boolean isFlagOn = flagrService.isFeatureFlagOn(featureToggle.value()); if (Boolean.FALSE.equals(isFlagOn)) { throw new FlagrApiNotFoundException(); } outerlabel: for (int argIndex = 0; argIndex < args.length; argIndex++) { for (Annotation annotation : parameterAnnotations[argIndex]) { if (annotation instanceof VariantKey) { args[argIndex] = isFlagOn; break outerlabel; } } } return joinPoint.proceed(args); }
@Test public void processFlagrMethodWithControllerFeatureToggleTest() throws Throwable { String methodName = "methodWithControllerFeatureToggle"; FlagrAnnotationTest flagrAnnotationTest = new FlagrAnnotationTest(); Method method = Arrays.stream(flagrAnnotationTest.getClass().getMethods()).filter(m -> m.getName().equals(methodName)).findFirst().get(); when(joinPoint.getSignature()).thenReturn(signature); when(signature.getMethod()).thenReturn(method); when(joinPoint.getArgs()).thenReturn(args); when(flagrService.isFeatureFlagOn(any(String.class))) .thenReturn(givenPostEvaluationResponse().getVariantKey().equals(EffectiveVariant.ON.toValue())); when(joinPoint.proceed(any(Object[].class))).thenReturn(args); Object returnArgs = featureToggleAspect.processControllerFeatureToggleAnnotation(joinPoint); assertEquals(args, returnArgs); verify(joinPoint, times(1)).getSignature(); verify(signature, times(1)).getMethod(); verify(joinPoint, times(1)).getArgs(); verify(signature, times(1)).getMethod(); verify(flagrService, times(1)).isFeatureFlagOn(any(String.class)); verify(joinPoint, times(1)).proceed(any(Object[].class)); }
public Map<TopicPartition, String> partitionDirs() { Map<TopicPartition, String> result = new HashMap<>(); data.dirs().forEach(alterDir -> alterDir.topics().forEach(topic -> topic.partitions().forEach(partition -> result.put(new TopicPartition(topic.name(), partition), alterDir.path()))) ); return result; }
@Test public void testPartitionDir() { AlterReplicaLogDirsRequestData data = new AlterReplicaLogDirsRequestData() .setDirs(new AlterReplicaLogDirCollection( asList(new AlterReplicaLogDir() .setPath("/data0") .setTopics(new AlterReplicaLogDirTopicCollection( asList(new AlterReplicaLogDirTopic() .setName("topic") .setPartitions(asList(0, 1)), new AlterReplicaLogDirTopic() .setName("topic2") .setPartitions(singletonList(7))).iterator())), new AlterReplicaLogDir() .setPath("/data1") .setTopics(new AlterReplicaLogDirTopicCollection( singletonList(new AlterReplicaLogDirTopic() .setName("topic3") .setPartitions(singletonList(12))).iterator()))).iterator())); AlterReplicaLogDirsRequest request = new AlterReplicaLogDirsRequest.Builder(data).build(); Map<TopicPartition, String> expect = new HashMap<>(); expect.put(new TopicPartition("topic", 0), "/data0"); expect.put(new TopicPartition("topic", 1), "/data0"); expect.put(new TopicPartition("topic2", 7), "/data0"); expect.put(new TopicPartition("topic3", 12), "/data1"); assertEquals(expect, request.partitionDirs()); }
public static AckAnswer ackAnswer(XmlPullParser parser) throws XmlPullParserException, IOException { ParserUtils.assertAtStartTag(parser); long h = ParserUtils.getLongAttribute(parser, "h"); parser.next(); ParserUtils.assertAtEndTag(parser); return new AckAnswer(h); }
@Test public void testParseAckAnswer() throws Exception { long handledPackets = 42 + 42; String ackStanza = XMLBuilder.create("a") .a("xmlns", "urn:xmpp:sm:3") .a("h", String.valueOf(handledPackets)) .asString(outputProperties); StreamManagement.AckAnswer acknowledgementPacket = ParseStreamManagement.ackAnswer( PacketParserUtils.getParserFor(ackStanza)); assertNotNull(acknowledgementPacket); assertEquals(handledPackets, acknowledgementPacket.getHandledCount()); }
static CommandLineOptions parse(Iterable<String> options) { CommandLineOptions.Builder optionsBuilder = CommandLineOptions.builder(); List<String> expandedOptions = new ArrayList<>(); expandParamsFiles(options, expandedOptions); Iterator<String> it = expandedOptions.iterator(); while (it.hasNext()) { String option = it.next(); if (!option.startsWith("-")) { optionsBuilder.filesBuilder().add(option).addAll(it); break; } String flag; String value; int idx = option.indexOf('='); if (idx >= 0) { flag = option.substring(0, idx); value = option.substring(idx + 1); } else { flag = option; value = null; } // NOTE: update usage information in UsageException when new flags are added switch (flag) { case "-i": case "-r": case "-replace": case "--replace": optionsBuilder.inPlace(true); break; case "--lines": case "-lines": case "--line": case "-line": parseRangeSet(optionsBuilder.linesBuilder(), getValue(flag, it, value)); break; case "--offset": case "-offset": optionsBuilder.addOffset(parseInteger(it, flag, value)); break; case "--length": case "-length": optionsBuilder.addLength(parseInteger(it, flag, value)); break; case "--aosp": case "-aosp": case "-a": optionsBuilder.aosp(true); break; case "--version": case "-version": case "-v": optionsBuilder.version(true); break; case "--help": case "-help": case "-h": optionsBuilder.help(true); break; case "--fix-imports-only": optionsBuilder.fixImportsOnly(true); break; case "--skip-sorting-imports": optionsBuilder.sortImports(false); break; case "--skip-removing-unused-imports": optionsBuilder.removeUnusedImports(false); break; case "--skip-reflowing-long-strings": optionsBuilder.reflowLongStrings(false); break; case "--skip-javadoc-formatting": optionsBuilder.formatJavadoc(false); break; case "-": optionsBuilder.stdin(true); break; case "-n": case "--dry-run": optionsBuilder.dryRun(true); break; case "--set-exit-if-changed": optionsBuilder.setExitIfChanged(true); break; case "-assume-filename": case "--assume-filename": optionsBuilder.assumeFilename(getValue(flag, it, value)); break; default: throw new IllegalArgumentException("unexpected flag: " + flag); } } return optionsBuilder.build(); }
@Test public void assumeFilename() { assertThat( CommandLineOptionsParser.parse(Arrays.asList("--assume-filename", "Foo.java")) .assumeFilename()) .hasValue("Foo.java"); assertThat(CommandLineOptionsParser.parse(Arrays.asList("Foo.java")).assumeFilename()) .isEmpty(); }
@Override public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException { if (!configured()) throw new IllegalStateException("Callback handler not configured"); for (Callback callback : callbacks) { if (callback instanceof OAuthBearerTokenCallback) try { handleTokenCallback((OAuthBearerTokenCallback) callback); } catch (KafkaException e) { throw new IOException(e.getMessage(), e); } else if (callback instanceof SaslExtensionsCallback) try { handleExtensionsCallback((SaslExtensionsCallback) callback); } catch (KafkaException e) { throw new IOException(e.getMessage(), e); } else throw new UnsupportedCallbackException(callback); } }
@SuppressWarnings("unchecked") @Test public void validOptionsWithExplicitOptionValues() throws IOException, UnsupportedCallbackException { String explicitScope1 = "scope1"; String explicitScope2 = "scope2"; String explicitScopeClaimName = "putScopeInHere"; String principalClaimName = "principal"; final String[] scopeClaimNameOptionValues = {null, explicitScopeClaimName}; for (String scopeClaimNameOptionValue : scopeClaimNameOptionValues) { Map<String, String> options = new HashMap<>(); String user = "user"; options.put("unsecuredLoginStringClaim_" + principalClaimName, user); options.put("unsecuredLoginListClaim_" + "list", ",1,2,"); options.put("unsecuredLoginListClaim_" + "emptyList1", ""); options.put("unsecuredLoginListClaim_" + "emptyList2", ","); options.put("unsecuredLoginNumberClaim_" + "number", "1"); long lifetimeSeconds = 10000; options.put("unsecuredLoginLifetimeSeconds", String.valueOf(lifetimeSeconds)); options.put("unsecuredLoginPrincipalClaimName", principalClaimName); if (scopeClaimNameOptionValue != null) options.put("unsecuredLoginScopeClaimName", scopeClaimNameOptionValue); String actualScopeClaimName = scopeClaimNameOptionValue == null ? "scope" : explicitScopeClaimName; options.put("unsecuredLoginListClaim_" + actualScopeClaimName, String.format("|%s|%s", explicitScope1, explicitScope2)); MockTime mockTime = new MockTime(); OAuthBearerUnsecuredLoginCallbackHandler callbackHandler = createCallbackHandler(options, mockTime); OAuthBearerTokenCallback callback = new OAuthBearerTokenCallback(); callbackHandler.handle(new Callback[] {callback}); OAuthBearerUnsecuredJws jws = (OAuthBearerUnsecuredJws) callback.token(); assertNotNull(jws, "create token failed"); long startMs = mockTime.milliseconds(); confirmCorrectValues(jws, user, startMs, lifetimeSeconds * 1000); Map<String, Object> claims = jws.claims(); assertEquals(new HashSet<>(Arrays.asList(actualScopeClaimName, principalClaimName, "iat", "exp", "number", "list", "emptyList1", "emptyList2")), claims.keySet()); assertEquals(new HashSet<>(Arrays.asList(explicitScope1, explicitScope2)), new HashSet<>((List<String>) claims.get(actualScopeClaimName))); assertEquals(new HashSet<>(Arrays.asList(explicitScope1, explicitScope2)), jws.scope()); assertEquals(1.0, jws.claim("number", Number.class)); assertEquals(Arrays.asList("1", "2", ""), jws.claim("list", List.class)); assertEquals(Collections.emptyList(), jws.claim("emptyList1", List.class)); assertEquals(Collections.emptyList(), jws.claim("emptyList2", List.class)); } }
@Override public StringBuffer format(Object obj, StringBuffer result, FieldPosition pos) { return format.format(toArgs((Attributes) obj), result, pos); }
@Test public void testOffset() { Attributes attrs = new Attributes(); attrs.setString(Tag.SeriesNumber, VR.IS, "1"); attrs.setString(Tag.InstanceNumber, VR.IS, "2"); assertEquals("101/1", new AttributesFormat(TEST_PATTERN_OFFSET).format(attrs)); }
@SuppressWarnings("unchecked") private SpscChannelConsumer<E> newConsumer(Object... args) { return mapper.newFlyweight(SpscChannelConsumer.class, "ChannelConsumerTemplate.java", Template.fromFile(Channel.class, "ChannelConsumerTemplate.java"), args); }
@Test public void shouldNotReadFromEmptyChannel() { ChannelConsumer consumer = newConsumer(); assertEmpty(); assertFalse(consumer.read()); }
public static byte[] bigIntegerToBytes(BigInteger b, int numBytes) { checkArgument(b.signum() >= 0, () -> "b must be positive or zero: " + b); checkArgument(numBytes > 0, () -> "numBytes must be positive: " + numBytes); byte[] src = b.toByteArray(); byte[] dest = new byte[numBytes]; boolean isFirstByteOnlyForSign = src[0] == 0; int length = isFirstByteOnlyForSign ? src.length - 1 : src.length; checkArgument(length <= numBytes, () -> "The given number does not fit in " + numBytes); int srcPos = isFirstByteOnlyForSign ? 1 : 0; int destPos = numBytes - length; System.arraycopy(src, srcPos, dest, destPos, length); return dest; }
@Test(expected = IllegalArgumentException.class) public void bigIntegerToBytes_convertNegativeNumber() { BigInteger b = BigInteger.valueOf(-1); ByteUtils.bigIntegerToBytes(b, 32); }
public <T> Future<Iterable<Map.Entry<ByteString, Iterable<T>>>> multimapFetchAllFuture( boolean omitValues, ByteString encodedTag, String stateFamily, Coder<T> elemCoder) { StateTag<ByteString> stateTag = StateTag.<ByteString>of(Kind.MULTIMAP_ALL, encodedTag, stateFamily) .toBuilder() .setOmitValues(omitValues) .build(); return valuesToPagingIterableFuture(stateTag, elemCoder, this.stateFuture(stateTag, elemCoder)); }
@Test public void testReadMultimapKeys() throws Exception { Future<Iterable<Map.Entry<ByteString, Iterable<Integer>>>> future = underTest.multimapFetchAllFuture(true, STATE_KEY_1, STATE_FAMILY, INT_CODER); Mockito.verifyNoMoreInteractions(mockWindmill); Windmill.KeyedGetDataRequest.Builder expectedRequest = Windmill.KeyedGetDataRequest.newBuilder() .setKey(DATA_KEY) .setShardingKey(SHARDING_KEY) .setWorkToken(WORK_TOKEN) .setMaxBytes(WindmillStateReader.MAX_KEY_BYTES) .addMultimapsToFetch( Windmill.TagMultimapFetchRequest.newBuilder() .setTag(STATE_KEY_1) .setStateFamily(STATE_FAMILY) .setFetchEntryNamesOnly(true) .setFetchMaxBytes(WindmillStateReader.INITIAL_MAX_MULTIMAP_BYTES)); Windmill.KeyedGetDataResponse.Builder response = Windmill.KeyedGetDataResponse.newBuilder() .setKey(DATA_KEY) .addTagMultimaps( Windmill.TagMultimapFetchResponse.newBuilder() .setTag(STATE_KEY_1) .setStateFamily(STATE_FAMILY) .addEntries( Windmill.TagMultimapEntry.newBuilder().setEntryName(STATE_MULTIMAP_KEY_1)) .addEntries( Windmill.TagMultimapEntry.newBuilder().setEntryName(STATE_MULTIMAP_KEY_2))); Mockito.when(mockWindmill.getStateData(COMPUTATION, expectedRequest.build())) .thenReturn(response.build()); Iterable<Map.Entry<ByteString, Iterable<Integer>>> results = future.get(); Mockito.verify(mockWindmill).getStateData(COMPUTATION, expectedRequest.build()); List<ByteString> keys = Lists.newArrayList(); for (Map.Entry<ByteString, Iterable<Integer>> entry : results) { keys.add(entry.getKey()); assertEquals(0, Iterables.size(entry.getValue())); } Mockito.verifyNoMoreInteractions(mockWindmill); assertThat(keys, Matchers.containsInAnyOrder(STATE_MULTIMAP_KEY_1, STATE_MULTIMAP_KEY_2)); assertNoReader(future); }
public boolean eval(ContentFile<?> file) { // TODO: detect the case where a column is missing from the file using file's max field id. return new MetricsEvalVisitor().eval(file); }
@Test public void testNoNulls() { boolean shouldRead = new InclusiveMetricsEvaluator(SCHEMA, isNull("all_nulls")).eval(FILE); assertThat(shouldRead).as("Should read: at least one null value in all null column").isTrue(); shouldRead = new InclusiveMetricsEvaluator(SCHEMA, isNull("some_nulls")).eval(FILE); assertThat(shouldRead).as("Should read: column with some nulls contains a null value").isTrue(); shouldRead = new InclusiveMetricsEvaluator(SCHEMA, isNull("no_nulls")).eval(FILE); assertThat(shouldRead).as("Should skip: non-null column contains no null values").isFalse(); }
void decode(int streamId, ByteBuf in, Http2Headers headers, boolean validateHeaders) throws Http2Exception { Http2HeadersSink sink = new Http2HeadersSink( streamId, headers, maxHeaderListSize, validateHeaders); // Check for dynamic table size updates, which must occur at the beginning: // https://www.rfc-editor.org/rfc/rfc7541.html#section-4.2 decodeDynamicTableSizeUpdates(in); decode(in, sink); // Now that we've read all of our headers we can perform the validation steps. We must // delay throwing until this point to prevent dynamic table corruption. sink.finish(); }
@Test public void unknownPseudoHeader() throws Exception { final ByteBuf in = Unpooled.buffer(200); try { HpackEncoder hpackEncoder = new HpackEncoder(true); Http2Headers toEncode = new DefaultHttp2Headers(false); toEncode.add(":test", "1"); hpackEncoder.encodeHeaders(1, in, toEncode, NEVER_SENSITIVE); final Http2Headers decoded = new DefaultHttp2Headers(true); assertThrows(Http2Exception.StreamException.class, new Executable() { @Override public void execute() throws Throwable { hpackDecoder.decode(1, in, decoded, true); } }); } finally { in.release(); } }
public static boolean isSameMinute(Date date1, Date date2) { final Calendar cal1 = Calendar.getInstance(); final Calendar cal2 = Calendar.getInstance(); cal1.setTime(date1); cal2.setTime(date2); return cal1.get(YEAR) == cal2.get(YEAR) && cal1.get(MONTH) == cal2.get(MONTH) && cal1.get(DAY_OF_MONTH) == cal2.get(DAY_OF_MONTH) && cal1.get(HOUR_OF_DAY) == cal2.get(HOUR_OF_DAY) && cal1.get(MINUTE) == cal2.get(MINUTE); }
@Test public void testSameMinute() { Assert.assertTrue(isSameMinute(new Date(), new Date())); Assert.assertFalse(isSameMinute(new Date(), new Date(System.currentTimeMillis() + 60 * 1000))); }
@Override protected long getEncodedElementByteSize(BigDecimal value) throws Exception { checkNotNull(value, String.format("cannot encode a null %s", BigDecimal.class.getSimpleName())); return VAR_INT_CODER.getEncodedElementByteSize(value.scale()) + BIG_INT_CODER.getEncodedElementByteSize(value.unscaledValue()); }
@Test public void testGetEncodedElementByteSize() throws Exception { TestElementByteSizeObserver observer = new TestElementByteSizeObserver(); for (BigDecimal value : TEST_VALUES) { TEST_CODER.registerByteSizeObserver(value, observer); observer.advance(); assertThat( observer.getSumAndReset(), equalTo( (long) CoderUtils.encodeToByteArray(TEST_CODER, value, Coder.Context.NESTED).length)); } }
static void verifyFixInvalidValues(final List<KiePMMLMiningField> notTargetMiningFields, final PMMLRequestData requestData) { logger.debug("verifyInvalidValues {} {}", notTargetMiningFields, requestData); final Collection<ParameterInfo> requestParams = requestData.getRequestParams(); final List<ParameterInfo> toRemove = new ArrayList<>(); notTargetMiningFields.forEach(miningField -> { ParameterInfo parameterInfo = requestParams.stream() .filter(paramInfo -> miningField.getName().equals(paramInfo.getName())) .findFirst() .orElse(null); if (parameterInfo != null) { boolean match = isMatching(parameterInfo, miningField); if (!match) { manageInvalidValues(miningField, parameterInfo, toRemove); } toRemove.forEach(requestData::removeRequestParam); } }); }
@Test void verifyFixInvalidValuesNotInvalid() { KiePMMLMiningField miningField0 = KiePMMLMiningField.builder("FIELD-0", null) .withDataType(DATA_TYPE.STRING) .withAllowedValues(Arrays.asList("123", "124", "125")) .build(); KiePMMLMiningField miningField1 = KiePMMLMiningField.builder("FIELD-1", null) .withDataType(DATA_TYPE.DOUBLE) .withAllowedValues(Arrays.asList("1.23", "12.4", "1.25")) .build(); List<KiePMMLInterval> intervals = Arrays.asList(new KiePMMLInterval(0.0, 12.4, CLOSURE.CLOSED_CLOSED), new KiePMMLInterval(12.6, 14.5, CLOSURE.OPEN_CLOSED)); KiePMMLMiningField miningField2 = KiePMMLMiningField.builder("FIELD-2", null) .withDataType(DATA_TYPE.DOUBLE) .withIntervals(intervals) .build(); List<KiePMMLMiningField> miningFields = Arrays.asList(miningField0, miningField1, miningField2); PMMLRequestData pmmlRequestData = new PMMLRequestData("123", "modelName"); pmmlRequestData.addRequestParam("FIELD-0", "123"); pmmlRequestData.addRequestParam("FIELD-1", 12.4); pmmlRequestData.addRequestParam("FIELD-2", 9.3); PreProcess.verifyFixInvalidValues(miningFields, pmmlRequestData); pmmlRequestData = new PMMLRequestData("123", "modelName"); pmmlRequestData.addRequestParam("FIELD-0", "125"); pmmlRequestData.addRequestParam("FIELD-1", 1.25); pmmlRequestData.addRequestParam("FIELD-2", 13.9); PreProcess.verifyFixInvalidValues(miningFields, pmmlRequestData); }
public static String getMaskedStatement(final String query) { try { final ParseTree tree = DefaultKsqlParser.getParseTree(query); return new Visitor().visit(tree); } catch (final Exception | StackOverflowError e) { return fallbackMasking(query); } }
@Test public void shouldMaskSourceConnector() { // Given: final String query = "CREATE SOURCE CONNECTOR `test-connector` WITH (" + " \"connector.class\" = 'PostgresSource', \n" + " 'connection.url' = 'jdbc:postgresql://localhost:5432/my.db',\n" + " `mode`='bulk',\n" + " \"topic.prefix\"='jdbc-',\n" + " \"table.whitelist\"='users',\n" + " \"key\"='username');"; // When final String maskedQuery = QueryMask.getMaskedStatement(query); // Then final String expected = "CREATE SOURCE CONNECTOR `test-connector` WITH " + "(\"connector.class\"='PostgresSource', " + "'connection.url'='[string]', " + "`mode`='[string]', " + "\"topic.prefix\"='[string]', " + "\"table.whitelist\"='[string]', " + "\"key\"='[string]');"; assertThat(maskedQuery, is(expected)); }
@Override public Credentials getCredential() { return credentials; }
@Test void testGetCredential() { CredentialService credentialService1 = CredentialService.getInstance(); Credentials credential = credentialService1.getCredential(); assertNotNull(credential); }
public void dispose( StepMetaInterface smi, StepDataInterface sdi ) { meta = (JoinRowsMeta) smi; data = (JoinRowsData) sdi; // Remove the temporary files... if ( data.file != null ) { for ( int i = 1; i < data.file.length; i++ ) { if ( data.file[i] != null ) { data.file[i].delete(); } } } super.dispose( meta, data ); }
@Test public void checkThatMethodPerformedWithoutError() throws Exception { getJoinRows().dispose( meta, data ); }
@Override public void onActivitySaveInstanceState(Activity activity, Bundle bundle) { }
@Test public void onActivitySaveInstanceState() { mActivityLifecycle.onActivitySaveInstanceState(mActivity, null); }
public static String buildErrorMessage(final Throwable throwable) { if (throwable == null) { return ""; } final List<String> messages = dedup(getErrorMessages(throwable)); final String msg = messages.remove(0); final String causeMsg = messages.stream() .filter(s -> !s.isEmpty()) .map(cause -> WordUtils.wrap(PREFIX + cause, 80, "\n\t", true)) .collect(Collectors.joining(System.lineSeparator())); return causeMsg.isEmpty() ? msg : msg + System.lineSeparator() + causeMsg; }
@Test public void shouldHandleRecursiveExceptionChain() { final Exception cause = new TestException("Something went wrong"); final Throwable e = new TestException("Top level", cause); cause.initCause(e); assertThat( buildErrorMessage(e), is("Top level" + System.lineSeparator() + "Caused by: Something went wrong") ); }
@Override public Long createSmsLog(String mobile, Long userId, Integer userType, Boolean isSend, SmsTemplateDO template, String templateContent, Map<String, Object> templateParams) { SmsLogDO.SmsLogDOBuilder logBuilder = SmsLogDO.builder(); // 根据是否要发送,设置状态 logBuilder.sendStatus(Objects.equals(isSend, true) ? SmsSendStatusEnum.INIT.getStatus() : SmsSendStatusEnum.IGNORE.getStatus()); // 设置手机相关字段 logBuilder.mobile(mobile).userId(userId).userType(userType); // 设置模板相关字段 logBuilder.templateId(template.getId()).templateCode(template.getCode()).templateType(template.getType()); logBuilder.templateContent(templateContent).templateParams(templateParams) .apiTemplateId(template.getApiTemplateId()); // 设置渠道相关字段 logBuilder.channelId(template.getChannelId()).channelCode(template.getChannelCode()); // 设置接收相关字段 logBuilder.receiveStatus(SmsReceiveStatusEnum.INIT.getStatus()); // 插入数据库 SmsLogDO logDO = logBuilder.build(); smsLogMapper.insert(logDO); return logDO.getId(); }
@Test public void testCreateSmsLog() { // 准备参数 String mobile = randomString(); Long userId = randomLongId(); Integer userType = randomEle(UserTypeEnum.values()).getValue(); Boolean isSend = randomBoolean(); SmsTemplateDO templateDO = randomPojo(SmsTemplateDO.class, o -> o.setType(randomEle(SmsTemplateTypeEnum.values()).getType())); String templateContent = randomString(); Map<String, Object> templateParams = randomTemplateParams(); // mock 方法 // 调用 Long logId = smsLogService.createSmsLog(mobile, userId, userType, isSend, templateDO, templateContent, templateParams); // 断言 SmsLogDO logDO = smsLogMapper.selectById(logId); assertEquals(isSend ? SmsSendStatusEnum.INIT.getStatus() : SmsSendStatusEnum.IGNORE.getStatus(), logDO.getSendStatus()); assertEquals(mobile, logDO.getMobile()); assertEquals(userType, logDO.getUserType()); assertEquals(userId, logDO.getUserId()); assertEquals(templateDO.getId(), logDO.getTemplateId()); assertEquals(templateDO.getCode(), logDO.getTemplateCode()); assertEquals(templateDO.getType(), logDO.getTemplateType()); assertEquals(templateDO.getChannelId(), logDO.getChannelId()); assertEquals(templateDO.getChannelCode(), logDO.getChannelCode()); assertEquals(templateContent, logDO.getTemplateContent()); assertEquals(templateParams, logDO.getTemplateParams()); assertEquals(SmsReceiveStatusEnum.INIT.getStatus(), logDO.getReceiveStatus()); }
public synchronized void register(String id, MapInterceptor interceptor) { assert !(Thread.currentThread() instanceof PartitionOperationThread); if (id2InterceptorMap.containsKey(id)) { return; } Map<String, MapInterceptor> tmpMap = new HashMap<>(id2InterceptorMap); tmpMap.put(id, interceptor); id2InterceptorMap = unmodifiableMap(tmpMap); List<MapInterceptor> tmpInterceptors = new ArrayList<>(interceptors); tmpInterceptors.add(interceptor); interceptors = unmodifiableList(tmpInterceptors); }
@Test public void testRegister_whenRegisteredTwice_doNothing() { registry.register(interceptor.id, interceptor); registry.register(interceptor.id, interceptor); assertInterceptorRegistryContainsInterceptor(); }
public void convert(FSConfigToCSConfigConverterParams params) throws Exception { validateParams(params); this.clusterResource = getClusterResource(params); this.convertPlacementRules = params.isConvertPlacementRules(); this.outputDirectory = params.getOutputDirectory(); this.rulesToFile = params.isPlacementRulesToFile(); this.usePercentages = params.isUsePercentages(); this.preemptionMode = params.getPreemptionMode(); prepareOutputFiles(params.isConsole()); loadConversionRules(params.getConversionRulesConfig()); Configuration inputYarnSiteConfig = getInputYarnSiteConfig(params); handleFairSchedulerConfig(params, inputYarnSiteConfig); convert(inputYarnSiteConfig); }
@Test public void testConversionWhenInvalidPlacementRulesIgnored() throws Exception { FSConfigToCSConfigConverterParams params = createDefaultParamsBuilder() .withClusterResource(CLUSTER_RESOURCE_STRING) .withFairSchedulerXmlConfig(FS_INVALID_PLACEMENT_RULES_XML) .build(); ConversionOptions conversionOptions = createDefaultConversionOptions(); conversionOptions.setNoTerminalRuleCheck(true); converter = new FSConfigToCSConfigConverter(ruleHandler, conversionOptions); converter.convert(params); // expected: no exception }
public static <T> RBFNetwork<T> fit(T[] x, double[] y, RBF<T>[] rbf) { return fit(x, y, rbf, false); }
@Test public void testAilerons() { System.out.println("ailerons"); MathEx.setSeed(19650218); // to get repeatable results. double[][] x = MathEx.clone(Ailerons.x); MathEx.standardize(x); RegressionValidations<RBFNetwork<double[]>> result = CrossValidation.regression(10, x, Ailerons.y, (xi, yi) -> RBFNetwork.fit(xi, yi, RBF.fit(xi, 20, 5.0))); System.out.println(result); assertEquals(0.00025, result.avg.rmse, 1E-5); }
public String format(Date then) { if (then == null) then = now(); Duration d = approximateDuration(then); return format(d); }
@Test public void testYearsAgo() throws Exception { PrettyTime t = new PrettyTime(now); Assert.assertEquals("3 years ago", t.format(now.minusYears(3))); }
@Override public void register(String path, ServiceRecord record) throws IOException { op(path, record, addRecordCommand); }
@Test public void testAppRegistration() throws Exception { ServiceRecord record = getMarshal().fromBytes("somepath", APPLICATION_RECORD.getBytes()); getRegistryDNS().register( "/registry/users/root/services/org-apache-slider/test1/", record); // start assessing whether correct records are available List<Record> recs = assertDNSQuery("test1.root.dev.test."); assertEquals("wrong result", "192.168.1.5", ((ARecord) recs.get(0)).getAddress().getHostAddress()); recs = assertDNSQuery("management-api.test1.root.dev.test.", 2); assertEquals("wrong target name", "test1.root.dev.test.", ((CNAMERecord) recs.get(0)).getTarget().toString()); assertTrue("not an ARecord", recs.get(isSecure() ? 2 : 1) instanceof ARecord); recs = assertDNSQuery("appmaster-ipc-api.test1.root.dev.test.", Type.SRV, 1); assertTrue("not an SRV record", recs.get(0) instanceof SRVRecord); assertEquals("wrong port", 1026, ((SRVRecord) recs.get(0)).getPort()); recs = assertDNSQuery("appmaster-ipc-api.test1.root.dev.test.", 2); assertEquals("wrong target name", "test1.root.dev.test.", ((CNAMERecord) recs.get(0)).getTarget().toString()); assertTrue("not an ARecord", recs.get(isSecure() ? 2 : 1) instanceof ARecord); recs = assertDNSQuery("http-api.test1.root.dev.test.", 2); assertEquals("wrong target name", "test1.root.dev.test.", ((CNAMERecord) recs.get(0)).getTarget().toString()); assertTrue("not an ARecord", recs.get(isSecure() ? 2 : 1) instanceof ARecord); recs = assertDNSQuery("http-api.test1.root.dev.test.", Type.SRV, 1); assertTrue("not an SRV record", recs.get(0) instanceof SRVRecord); assertEquals("wrong port", 1027, ((SRVRecord) recs.get(0)).getPort()); assertDNSQuery("test1.root.dev.test.", Type.TXT, 3); assertDNSQuery("appmaster-ipc-api.test1.root.dev.test.", Type.TXT, 1); assertDNSQuery("http-api.test1.root.dev.test.", Type.TXT, 1); assertDNSQuery("management-api.test1.root.dev.test.", Type.TXT, 1); }
public static <T> T getMapper(Class<T> clazz) { try { List<ClassLoader> classLoaders = collectClassLoaders( clazz.getClassLoader() ); return getMapper( clazz, classLoaders ); } catch ( ClassNotFoundException | NoSuchMethodException e ) { throw new RuntimeException( e ); } }
@Test public void shouldReturnImplementationInstance() { Foo mapper = Mappers.getMapper( Foo.class ); assertThat( mapper ).isNotNull(); }
@Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } final P4MatchFieldModel other = (P4MatchFieldModel) obj; return Objects.equals(this.id, other.id) && Objects.equals(this.bitWidth, other.bitWidth) && Objects.equals(this.matchType, other.matchType); }
@Test public void testEquals() { new EqualsTester() .addEqualityGroup(P4_MATCH_FIELD_MODEL_1, SAME_AS_P4_MATCH_FIELD_MODEL_1) .addEqualityGroup(P4_MATCH_FIELD_MODEL_3, SAME_AS_P4_MATCH_FIELD_MODEL_3) .addEqualityGroup(P4_MATCH_FIELD_MODEL_2) .addEqualityGroup(P4_MATCH_FIELD_MODEL_4) .testEquals(); }
public static String format(TemporalAccessor time, DateTimeFormatter formatter) { if (null == time) { return null; } if(time instanceof Month){ return time.toString(); } if(null == formatter){ formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME; } try { return formatter.format(time); } catch (UnsupportedTemporalTypeException e){ if(time instanceof LocalDate && e.getMessage().contains("HourOfDay")){ // 用户传入LocalDate,但是要求格式化带有时间部分,转换为LocalDateTime重试 return formatter.format(((LocalDate) time).atStartOfDay()); }else if(time instanceof LocalTime && e.getMessage().contains("YearOfEra")){ // 用户传入LocalTime,但是要求格式化带有日期部分,转换为LocalDateTime重试 return formatter.format(((LocalTime) time).atDate(LocalDate.now())); } else if(time instanceof Instant){ // 时间戳没有时区信息,赋予默认时区 return formatter.format(((Instant) time).atZone(ZoneId.systemDefault())); } throw e; } }
@Test public void formatCustomTest(){ final String today = TemporalAccessorUtil.format( LocalDate.of(2021, 6, 26), "#sss"); assertEquals("1624636800", today); final String today2 = TemporalAccessorUtil.format( LocalDate.of(2021, 6, 26), "#SSS"); assertEquals("1624636800000", today2); }
static S3ResourceId fromUri(String uri) { Matcher m = S3_URI.matcher(uri); checkArgument(m.matches(), "Invalid S3 URI: [%s]", uri); String scheme = m.group("SCHEME"); String bucket = m.group("BUCKET"); String key = Strings.nullToEmpty(m.group("KEY")); if (!key.startsWith("/")) { key = "/" + key; } return fromComponents(scheme, bucket, key); }
@Test public void testInvalidPathNoBucketAndSlash() { assertThrows( "Invalid S3 URI: [s3:///]", IllegalArgumentException.class, () -> S3ResourceId.fromUri("s3:///")); }
static Method getGetter(final Class<?> clazz, final String propertyName) { final String getterName = "get" + Character.toUpperCase(propertyName.charAt(0)) + propertyName.substring(1); final String iserName = "is" + Character.toUpperCase(propertyName.charAt(0)) + propertyName.substring(1); try { return clazz.getMethod(getterName, NO_ARGS); } catch (NoSuchMethodException e) { // ignore for now - might be a boolean property } try { return clazz.getMethod(iserName, NO_ARGS); } catch (NoSuchMethodException e) { final String className = clazz.getName(); throw SarLogger.ROOT_LOGGER.propertyMethodNotFound("Get", propertyName, className); } }
@Test public void findNonBooleanGetter() throws Exception { final Method getter = ReflectionUtils.getGetter(Foo.class, "a"); assertNotNull(getter); assertEquals("getA", getter.getName()); }
public OpenAPI read(Class<?> cls) { return read(cls, resolveApplicationPath(), null, false, null, null, new LinkedHashSet<String>(), new ArrayList<Parameter>(), new HashSet<Class<?>>()); }
@Test(description = "Link with Ref") public void testLinkWithRef() { Components components = new Components(); components.addLinks("Link", new Link().description("Link Description").operationId("id")); OpenAPI oas = new OpenAPI() .info(new Info().description("info")) .components(components); Reader reader = new Reader(oas); OpenAPI openAPI = reader.read(RefLinksResource.class); String yaml = "openapi: 3.0.1\n" + "info:\n" + " description: info\n" + "paths:\n" + " /links:\n" + " get:\n" + " operationId: getUserWithAddress\n" + " parameters:\n" + " - name: userId\n" + " in: query\n" + " schema:\n" + " type: string\n" + " responses:\n" + " default:\n" + " description: test description\n" + " content:\n" + " '*/*':\n" + " schema:\n" + " $ref: '#/components/schemas/User'\n" + " links:\n" + " address:\n" + " operationId: getAddress\n" + " parameters:\n" + " userId: $request.query.userId\n" + " $ref: '#/components/links/Link'\n" + "components:\n" + " links:\n" + " Link:\n" + " operationId: id\n" + " description: Link Description\n"; SerializationMatchers.assertEqualsToYaml(openAPI, yaml); }
public static List<List<Expr>> candidateOfPartitionByExprs(List<List<Expr>> partitionByExprs) { if (partitionByExprs.isEmpty()) { return Lists.newArrayList(); } PermutationGenerator generator = new PermutationGenerator<Expr>(partitionByExprs); int totalCount = 0; List<List<Expr>> candidates = Lists.newArrayList(); while (generator.hasNext() && totalCount < 8) { candidates.add(generator.next()); totalCount++; } return candidates; }
@Test public void testPermutaionsOfPartitionByExprs3() throws Exception { List<List<Expr>> slotRefs = createSlotRefArray(4, 5); for (List<Expr> refs: slotRefs) { System.out.println(slotRefsToInt(refs)); } List<List<Expr>> newSlotRefs = PlanNode.candidateOfPartitionByExprs(slotRefs); Assert.assertTrue (newSlotRefs.size() == 8); for (List<Expr> candidates: newSlotRefs) { System.out.println(slotRefsToInt(candidates)); } Assert.assertTrue(slotRefsEqualTo(newSlotRefs.get(0), Arrays.asList(0, 5, 10, 15))); Assert.assertTrue(slotRefsEqualTo(newSlotRefs.get(1), Arrays.asList(0, 5, 10, 16))); Assert.assertTrue(slotRefsEqualTo(newSlotRefs.get(2), Arrays.asList(0, 5, 10, 17))); Assert.assertTrue(slotRefsEqualTo(newSlotRefs.get(3), Arrays.asList(0, 5, 10, 18))); Assert.assertTrue(slotRefsEqualTo(newSlotRefs.get(4), Arrays.asList(0, 5, 10, 19))); Assert.assertTrue(slotRefsEqualTo(newSlotRefs.get(5), Arrays.asList(0, 5, 11, 15))); Assert.assertTrue(slotRefsEqualTo(newSlotRefs.get(6), Arrays.asList(0, 5, 11, 16))); Assert.assertTrue(slotRefsEqualTo(newSlotRefs.get(7), Arrays.asList(0, 5, 11, 17))); }
@SuppressWarnings("unchecked") public synchronized T load(File jsonFile) throws IOException, JsonParseException, JsonMappingException { if (!jsonFile.exists()) { throw new FileNotFoundException("No such file: " + jsonFile); } if (!jsonFile.isFile()) { throw new FileNotFoundException("Not a file: " + jsonFile); } if (jsonFile.length() == 0) { throw new EOFException("File is empty: " + jsonFile); } try { return mapper.readValue(jsonFile, classType); } catch (IOException e) { LOG.warn("Exception while parsing json file {}", jsonFile, e); throw e; } }
@Test public void testFileSystemEmptyStatus() throws Throwable { File tempFile = File.createTempFile("Keyval", ".json"); Path tempPath = new Path(tempFile.toURI()); LocalFileSystem fs = FileSystem.getLocal(new Configuration()); try { final FileStatus st = fs.getFileStatus(tempPath); LambdaTestUtils.intercept(EOFException.class, () -> serDeser.load(fs, tempPath, st)); } finally { fs.delete(tempPath, false); } }
public static List<PlanNodeId> scheduleOrder(PlanNode root) { ImmutableList.Builder<PlanNodeId> schedulingOrder = ImmutableList.builder(); root.accept(new Visitor(), schedulingOrder::add); return schedulingOrder.build(); }
@Test public void testSemiJoinOrder() { PlanBuilder planBuilder = new PlanBuilder(TEST_SESSION, new PlanNodeIdAllocator(), METADATA); VariableReferenceExpression sourceJoin = planBuilder.variable("sourceJoin"); TableScanNode a = planBuilder.tableScan(ImmutableList.of(sourceJoin), ImmutableMap.of(sourceJoin, new TestingColumnHandle("sourceJoin"))); VariableReferenceExpression filteringSource = planBuilder.variable("filteringSource"); TableScanNode b = planBuilder.tableScan(ImmutableList.of(filteringSource), ImmutableMap.of(filteringSource, new TestingColumnHandle("filteringSource"))); List<PlanNodeId> order = scheduleOrder(planBuilder.semiJoin( sourceJoin, filteringSource, planBuilder.variable("semiJoinOutput"), Optional.empty(), Optional.empty(), a, b)); assertEquals(order, ImmutableList.of(b.getId(), a.getId())); }
public void close() throws SQLException { databaseConnectionManager.unmarkResourceInUse(proxyBackendHandler); proxyBackendHandler.close(); }
@Test void assertClose() throws SQLException { PostgreSQLServerPreparedStatement preparedStatement = new PostgreSQLServerPreparedStatement("", new UnknownSQLStatementContext(new PostgreSQLEmptyStatement()), new HintValueContext(), Collections.emptyList(), Collections.emptyList()); new Portal("", preparedStatement, Collections.emptyList(), Collections.emptyList(), databaseConnectionManager).close(); verify(databaseConnectionManager).unmarkResourceInUse(proxyBackendHandler); verify(proxyBackendHandler).close(); }
@SuppressWarnings("unchecked") @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) throws YarnException, IOException { NodeStatus remoteNodeStatus = request.getNodeStatus(); /** * Here is the node heartbeat sequence... * 1. Check if it's a valid (i.e. not excluded) node * 2. Check if it's a registered node * 3. Check if it's a 'fresh' heartbeat i.e. not duplicate heartbeat * 4. Send healthStatus to RMNode * 5. Update node's labels if distributed Node Labels configuration is enabled */ NodeId nodeId = remoteNodeStatus.getNodeId(); // 1. Check if it's a valid (i.e. not excluded) node, if not, see if it is // in decommissioning. if (!this.nodesListManager.isValidNode(nodeId.getHost()) && !isNodeInDecommissioning(nodeId)) { String message = "Disallowed NodeManager nodeId: " + nodeId + " hostname: " + nodeId.getHost(); LOG.info(message); return YarnServerBuilderUtils.newNodeHeartbeatResponse( NodeAction.SHUTDOWN, message); } // 2. Check if it's a registered node RMNode rmNode = this.rmContext.getRMNodes().get(nodeId); if (rmNode == null) { /* node does not exist */ String message = "Node not found resyncing " + remoteNodeStatus.getNodeId(); LOG.info(message); return YarnServerBuilderUtils.newNodeHeartbeatResponse(NodeAction.RESYNC, message); } // Send ping this.nmLivelinessMonitor.receivedPing(nodeId); this.decommissioningWatcher.update(rmNode, remoteNodeStatus); // 3. Check if it's a 'fresh' heartbeat i.e. not duplicate heartbeat NodeHeartbeatResponse lastNodeHeartbeatResponse = rmNode.getLastNodeHeartBeatResponse(); if (getNextResponseId( remoteNodeStatus.getResponseId()) == lastNodeHeartbeatResponse .getResponseId()) { LOG.info("Received duplicate heartbeat from node " + rmNode.getNodeAddress()+ " responseId=" + remoteNodeStatus.getResponseId()); return lastNodeHeartbeatResponse; } else if (remoteNodeStatus.getResponseId() != lastNodeHeartbeatResponse .getResponseId()) { String message = "Too far behind rm response id:" + lastNodeHeartbeatResponse.getResponseId() + " nm response id:" + remoteNodeStatus.getResponseId(); LOG.info(message); // TODO: Just sending reboot is not enough. Think more. this.rmContext.getDispatcher().getEventHandler().handle( new RMNodeEvent(nodeId, RMNodeEventType.REBOOTING)); return YarnServerBuilderUtils.newNodeHeartbeatResponse(NodeAction.RESYNC, message); } // Evaluate whether a DECOMMISSIONING node is ready to be DECOMMISSIONED. if (rmNode.getState() == NodeState.DECOMMISSIONING && decommissioningWatcher.checkReadyToBeDecommissioned( rmNode.getNodeID())) { String message = "DECOMMISSIONING " + nodeId + " is ready to be decommissioned"; LOG.info(message); this.rmContext.getDispatcher().getEventHandler().handle( new RMNodeEvent(nodeId, RMNodeEventType.DECOMMISSION)); this.nmLivelinessMonitor.unregister(nodeId); return YarnServerBuilderUtils.newNodeHeartbeatResponse( NodeAction.SHUTDOWN, message); } if (timelineServiceV2Enabled) { // Check & update collectors info from request. updateAppCollectorsMap(request); } // Heartbeat response long newInterval = nextHeartBeatInterval; if (heartBeatIntervalScalingEnable) { newInterval = rmNode.calculateHeartBeatInterval( nextHeartBeatInterval, heartBeatIntervalMin, heartBeatIntervalMax, heartBeatIntervalSpeedupFactor, heartBeatIntervalSlowdownFactor); } NodeHeartbeatResponse nodeHeartBeatResponse = YarnServerBuilderUtils.newNodeHeartbeatResponse( getNextResponseId(lastNodeHeartbeatResponse.getResponseId()), NodeAction.NORMAL, null, null, null, null, newInterval); rmNode.setAndUpdateNodeHeartbeatResponse(nodeHeartBeatResponse); populateKeys(request, nodeHeartBeatResponse); populateTokenSequenceNo(request, nodeHeartBeatResponse); if (timelineServiceV2Enabled) { // Return collectors' map that NM needs to know setAppCollectorsMapToResponse(rmNode.getRunningApps(), nodeHeartBeatResponse); } // 4. Send status to RMNode, saving the latest response. RMNodeStatusEvent nodeStatusEvent = new RMNodeStatusEvent(nodeId, remoteNodeStatus); if (request.getLogAggregationReportsForApps() != null && !request.getLogAggregationReportsForApps().isEmpty()) { nodeStatusEvent.setLogAggregationReportsForApps(request .getLogAggregationReportsForApps()); } this.rmContext.getDispatcher().getEventHandler().handle(nodeStatusEvent); // 5. Update node's labels to RM's NodeLabelManager. if (isDistributedNodeLabelsConf && request.getNodeLabels() != null) { try { updateNodeLabelsFromNMReport( NodeLabelsUtils.convertToStringSet(request.getNodeLabels()), nodeId); nodeHeartBeatResponse.setAreNodeLabelsAcceptedByRM(true); } catch (IOException ex) { //ensure the error message is captured and sent across in response nodeHeartBeatResponse.setDiagnosticsMessage(ex.getMessage()); nodeHeartBeatResponse.setAreNodeLabelsAcceptedByRM(false); } } // 6. check if node's capacity is load from dynamic-resources.xml // if so, send updated resource back to NM. String nid = nodeId.toString(); Resource capability = loadNodeResourceFromDRConfiguration(nid); // sync back with new resource if not null. if (capability != null) { nodeHeartBeatResponse.setResource(capability); } // Check if we got an event (AdminService) that updated the resources if (rmNode.isUpdatedCapability()) { nodeHeartBeatResponse.setResource(rmNode.getTotalCapability()); rmNode.resetUpdatedCapability(); } // 7. Send Container Queuing Limits back to the Node. This will be used by // the node to truncate the number of Containers queued for execution. if (this.rmContext.getNodeManagerQueueLimitCalculator() != null) { nodeHeartBeatResponse.setContainerQueuingLimit( this.rmContext.getNodeManagerQueueLimitCalculator() .createContainerQueuingLimit()); } // 8. Get node's attributes and update node-to-attributes mapping // in RMNodeAttributeManager. if (request.getNodeAttributes() != null) { try { // update node attributes if necessary then update heartbeat response updateNodeAttributesIfNecessary(nodeId, request.getNodeAttributes()); nodeHeartBeatResponse.setAreNodeAttributesAcceptedByRM(true); } catch (IOException ex) { //ensure the error message is captured and sent across in response String errorMsg = nodeHeartBeatResponse.getDiagnosticsMessage() == null ? ex.getMessage() : nodeHeartBeatResponse.getDiagnosticsMessage() + "\n" + ex .getMessage(); nodeHeartBeatResponse.setDiagnosticsMessage(errorMsg); nodeHeartBeatResponse.setAreNodeAttributesAcceptedByRM(false); } } return nodeHeartBeatResponse; }
@Test public void testGracefulDecommissionDefaultTimeoutResolution() throws Exception { Configuration conf = new Configuration(); conf.set(YarnConfiguration.RM_NODES_EXCLUDE_FILE_PATH, excludeHostXmlFile .getAbsolutePath()); writeToHostsXmlFile(excludeHostXmlFile, Pair.of("", null)); rm = new MockRM(conf); rm.start(); int nodeMemory = 1024; MockNM nm1 = rm.registerNode("host1:1234", nodeMemory); MockNM nm2 = rm.registerNode("host2:5678", nodeMemory); MockNM nm3 = rm.registerNode("host3:9101", nodeMemory); NodeHeartbeatResponse nodeHeartbeat1 = nm1.nodeHeartbeat(true); NodeHeartbeatResponse nodeHeartbeat2 = nm2.nodeHeartbeat(true); NodeHeartbeatResponse nodeHeartbeat3 = nm3.nodeHeartbeat(true); Assert.assertTrue( NodeAction.NORMAL.equals(nodeHeartbeat1.getNodeAction())); Assert.assertTrue( NodeAction.NORMAL.equals(nodeHeartbeat2.getNodeAction())); Assert.assertTrue( NodeAction.NORMAL.equals(nodeHeartbeat3.getNodeAction())); rm.waitForState(nm1.getNodeId(), NodeState.RUNNING); rm.waitForState(nm2.getNodeId(), NodeState.RUNNING); rm.waitForState(nm3.getNodeId(), NodeState.RUNNING); // Graceful decommission both host1 and host2, with // non default timeout for host1 final Integer nm1DecommissionTimeout = 20; writeToHostsXmlFile( excludeHostXmlFile, Pair.of(nm1.getNodeId().getHost(), nm1DecommissionTimeout), Pair.of(nm2.getNodeId().getHost(), null)); rm.getNodesListManager().refreshNodes(conf, true); rm.waitForState(nm1.getNodeId(), NodeState.DECOMMISSIONING); rm.waitForState(nm2.getNodeId(), NodeState.DECOMMISSIONING); Assert.assertEquals( nm1DecommissionTimeout, rm.getDecommissioningTimeout(nm1.getNodeId())); Integer defaultDecTimeout = conf.getInt(YarnConfiguration.RM_NODE_GRACEFUL_DECOMMISSION_TIMEOUT, YarnConfiguration.DEFAULT_RM_NODE_GRACEFUL_DECOMMISSION_TIMEOUT); Assert.assertEquals( defaultDecTimeout, rm.getDecommissioningTimeout(nm2.getNodeId())); // Graceful decommission host3 with a new default timeout final Integer newDefaultDecTimeout = defaultDecTimeout + 10; writeToHostsXmlFile( excludeHostXmlFile, Pair.of(nm3.getNodeId().getHost(), null)); conf.setInt(YarnConfiguration.RM_NODE_GRACEFUL_DECOMMISSION_TIMEOUT, newDefaultDecTimeout); rm.getNodesListManager().refreshNodes(conf, true); rm.waitForState(nm3.getNodeId(), NodeState.DECOMMISSIONING); Assert.assertEquals( newDefaultDecTimeout, rm.getDecommissioningTimeout(nm3.getNodeId())); }
public Result execute() { long start = clock().getTick(); Result result; try { result = check(); } catch (Exception e) { result = Result.unhealthy(e); } result.setDuration(TimeUnit.MILLISECONDS.convert(clock().getTick() - start, TimeUnit.NANOSECONDS)); return result; }
@Test public void returnsResultsWhenExecuted() { final HealthCheck.Result result = mock(HealthCheck.Result.class); when(underlying.execute()).thenReturn(result); assertThat(healthCheck.execute()) .isEqualTo(result); verify(result).setDuration(anyLong()); }
public static Collection<java.nio.file.Path> listFilesInDirectory( final java.nio.file.Path directory, final Predicate<java.nio.file.Path> fileFilter) throws IOException { checkNotNull(directory, "directory"); checkNotNull(fileFilter, "fileFilter"); if (!Files.exists(directory)) { throw new IllegalArgumentException( String.format("The directory %s dose not exist.", directory)); } if (!Files.isDirectory(directory)) { throw new IllegalArgumentException( String.format("The %s is not a directory.", directory)); } final FilterFileVisitor filterFileVisitor = new FilterFileVisitor(fileFilter); Files.walkFileTree( directory, EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, filterFileVisitor); return filterFileVisitor.getFiles(); }
@Test void testListDirFailsIfDirectoryDoesNotExist() { final String fileName = "_does_not_exists_file"; assertThatThrownBy( () -> FileUtils.listFilesInDirectory( temporaryFolder.getRoot().resolve(fileName), FileUtils::isJarFile)) .isInstanceOf(IllegalArgumentException.class); }
public static String toString(RedisCommand<?> command, Object... params) { if (RedisCommands.AUTH.equals(command)) { return "command: " + command + ", params: (password masked)"; } return "command: " + command + ", params: " + LogHelper.toString(params); }
@Test public void toStringWithBigArrays() { String[] strings = new String[15]; Arrays.fill(strings, "0"); int[] ints = new int[15]; Arrays.fill(ints, 1); long[] longs = new long[15]; Arrays.fill(longs, 2L); double[] doubles = new double[15]; Arrays.fill(doubles, 3.1D); float[] floats = new float[15]; Arrays.fill(floats, 4.2F); byte[] bytes = new byte[15]; Arrays.fill(bytes, (byte) 5); char[] chars = new char[15]; Arrays.fill(chars, '6'); assertThat(LogHelper.toString(strings)).isEqualTo("[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...]"); assertThat(LogHelper.toString(ints)).isEqualTo("[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, ...]"); assertThat(LogHelper.toString(longs)).isEqualTo("[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, ...]"); assertThat(LogHelper.toString(doubles)).isEqualTo("[3.1, 3.1, 3.1, 3.1, 3.1, 3.1, 3.1, 3.1, 3.1, 3.1, ...]"); assertThat(LogHelper.toString(floats)).isEqualTo("[4.2, 4.2, 4.2, 4.2, 4.2, 4.2, 4.2, 4.2, 4.2, 4.2, ...]"); assertThat(LogHelper.toString(bytes)).isEqualTo("[5, 5, 5, 5, 5, 5, 5, 5, 5, 5, ...]"); assertThat(LogHelper.toString(chars)).isEqualTo("[6, 6, 6, 6, 6, 6, 6, 6, 6, 6, ...]"); }
@Override public boolean filterPath(Path filePath) { if (getIncludeMatchers().isEmpty() && getExcludeMatchers().isEmpty()) { return false; } // compensate for the fact that Flink paths are slashed final String path = filePath.hasWindowsDrive() ? filePath.getPath().substring(1) : filePath.getPath(); final java.nio.file.Path nioPath = Paths.get(path); for (PathMatcher matcher : getIncludeMatchers()) { if (matcher.matches(nioPath)) { return shouldExclude(nioPath); } } return true; }
@Test void testExcludeFilesNotInIncludePatterns() { GlobFilePathFilter matcher = new GlobFilePathFilter(Collections.singletonList("dir/*"), Collections.emptyList()); assertThat(matcher.filterPath(new Path("dir/file.txt"))).isFalse(); assertThat(matcher.filterPath(new Path("dir1/file.txt"))).isTrue(); }
@Operation(summary = "queryAllProjectListForDependent", description = "QUERY_ALL_PROJECT_LIST_FOR_DEPENDENT_NOTES") @GetMapping(value = "/list-dependent") @ResponseStatus(HttpStatus.OK) @ApiException(LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR) public Result queryAllProjectListForDependent(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { return projectService.queryAllProjectListForDependent(); }
@Test public void testQueryAllProjectListForDependent() { User user = new User(); user.setId(0); Result result = new Result(); putMsg(result, Status.SUCCESS); Mockito.when(projectService.queryAllProjectListForDependent()).thenReturn(result); Result response = projectController.queryAllProjectListForDependent(user); Assertions.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue()); }
public void execute() throws DdlException { Map<String, UserVariable> clonedUserVars = new ConcurrentHashMap<>(); boolean hasUserVar = stmt.getSetListItems().stream().anyMatch(var -> var instanceof UserVariable); boolean executeSuccess = true; if (hasUserVar) { clonedUserVars.putAll(ctx.getUserVariables()); ctx.modifyUserVariablesCopyInWrite(clonedUserVars); } try { for (SetListItem var : stmt.getSetListItems()) { setVariablesOfAllType(var); } } catch (Throwable e) { if (hasUserVar) { executeSuccess = false; } throw e; } finally { //If the set sql contains more than one user variable, //the atomicity of the modification of this set of variables must be ensured. if (hasUserVar) { ctx.resetUserVariableCopyInWrite(); if (executeSuccess) { ctx.modifyUserVariables(clonedUserVars); } } } }
@Test public void testUserDefineVariable3() throws Exception { ConnectContext ctx = starRocksAssert.getCtx(); String sql = "set @aVar = 5, @bVar = @aVar + 1, @cVar = @bVar + 1"; SetStmt stmt = (SetStmt) UtFrameUtils.parseStmtWithNewParser(sql, ctx); SetExecutor executor = new SetExecutor(ctx, stmt); executor.execute(); UserVariable userVariableA = ctx.getUserVariable("aVar"); UserVariable userVariableB = ctx.getUserVariable("bVar"); UserVariable userVariableC = ctx.getUserVariable("cVar"); Assert.assertTrue(userVariableA.getEvaluatedExpression().getType().matchesType(Type.TINYINT)); Assert.assertTrue(userVariableB.getEvaluatedExpression().getType().matchesType(Type.SMALLINT)); Assert.assertTrue(userVariableC.getEvaluatedExpression().getType().matchesType(Type.INT)); LiteralExpr literalExprA = (LiteralExpr) userVariableA.getEvaluatedExpression(); Assert.assertEquals("5", literalExprA.getStringValue()); LiteralExpr literalExprB = (LiteralExpr) userVariableB.getEvaluatedExpression(); Assert.assertEquals("6", literalExprB.getStringValue()); LiteralExpr literalExprC = (LiteralExpr) userVariableC.getEvaluatedExpression(); Assert.assertEquals("7", literalExprC.getStringValue()); sql = "set @aVar = 6, @bVar = @aVar + 1, @cVar = @bVar + 1"; stmt = (SetStmt) UtFrameUtils.parseStmtWithNewParser(sql, ctx); executor = new SetExecutor(ctx, stmt); executor.execute(); userVariableA = ctx.getUserVariable("aVar"); userVariableB = ctx.getUserVariable("bVar"); userVariableC = ctx.getUserVariable("cVar"); Assert.assertTrue(userVariableA.getEvaluatedExpression().getType().matchesType(Type.TINYINT)); Assert.assertTrue(userVariableB.getEvaluatedExpression().getType().matchesType(Type.SMALLINT)); Assert.assertTrue(userVariableC.getEvaluatedExpression().getType().matchesType(Type.INT)); literalExprA = (LiteralExpr) userVariableA.getEvaluatedExpression(); Assert.assertEquals("6", literalExprA.getStringValue()); literalExprB = (LiteralExpr) userVariableB.getEvaluatedExpression(); Assert.assertEquals("7", literalExprB.getStringValue()); literalExprC = (LiteralExpr) userVariableC.getEvaluatedExpression(); Assert.assertEquals("8", literalExprC.getStringValue()); sql = "set @aVar = 5, @bVar = @aVar + 1, @cVar = @eVar + 1"; stmt = (SetStmt) UtFrameUtils.parseStmtWithNewParser(sql, ctx); executor = new SetExecutor(ctx, stmt); executor.execute(); userVariableA = ctx.getUserVariable("aVar"); userVariableB = ctx.getUserVariable("bVar"); userVariableC = ctx.getUserVariable("cVar"); Assert.assertTrue(userVariableA.getEvaluatedExpression().getType().matchesType(Type.TINYINT)); Assert.assertTrue(userVariableB.getEvaluatedExpression().getType().matchesType(Type.SMALLINT)); Assert.assertTrue(userVariableC.getEvaluatedExpression() instanceof NullLiteral); literalExprA = (LiteralExpr) userVariableA.getEvaluatedExpression(); Assert.assertEquals("5", literalExprA.getStringValue()); literalExprB = (LiteralExpr) userVariableB.getEvaluatedExpression(); Assert.assertEquals("6", literalExprB.getStringValue()); literalExprC = (LiteralExpr) userVariableC.getEvaluatedExpression(); Assert.assertEquals("NULL", literalExprC.getStringValue()); try { sql = "set @fVar = 1, " + "@abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz=2"; stmt = (SetStmt) UtFrameUtils.parseStmtWithNewParser(sql, ctx); executor = new SetExecutor(ctx, stmt); executor.execute(); } catch (AnalysisException e) { Assert.assertTrue(e.getMessage().contains("User variable name " + "'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz' is illegal")); } Assert.assertTrue(ctx.getUserVariable("fVar") == null); ctx.getUserVariables().clear(); for (int i = 0; i < 1023; ++i) { ctx.getUserVariables().put(String.valueOf(i), new UserVariable(null, null, null)); } System.out.println(ctx.getUserVariables().keySet().size()); try { sql = "set @aVar = 6, @bVar = @aVar + 1, @cVar = @bVar + 1"; stmt = (SetStmt) UtFrameUtils.parseStmtWithNewParser(sql, ctx); executor = new SetExecutor(ctx, stmt); executor.execute(); } catch (SemanticException e) { Assert.assertTrue(e.getMessage().contains("User variable exceeds the maximum limit of 1024")); } Assert.assertFalse(ctx.getUserVariables().containsKey("aVar")); Assert.assertFalse(ctx.getUserVariables().containsKey("bVar")); Assert.assertFalse(ctx.getUserVariables().containsKey("cVar")); Assert.assertTrue(ctx.getUserVariables().size() == 1023); }
@PostConstruct public void doRegister() { DistroClientDataProcessor dataProcessor = new DistroClientDataProcessor(clientManager, distroProtocol); DistroTransportAgent transportAgent = new DistroClientTransportAgent(clusterRpcClientProxy, serverMemberManager); DistroClientTaskFailedHandler taskFailedHandler = new DistroClientTaskFailedHandler(taskEngineHolder); componentHolder.registerDataStorage(DistroClientDataProcessor.TYPE, dataProcessor); componentHolder.registerDataProcessor(dataProcessor); componentHolder.registerTransportAgent(DistroClientDataProcessor.TYPE, transportAgent); componentHolder.registerFailedTaskHandler(DistroClientDataProcessor.TYPE, taskFailedHandler); }
@Test void testDoRegister() { distroClientComponentRegistry.doRegister(); DistroDataStorage dataStorage = componentHolder.findDataStorage(DistroClientDataProcessor.TYPE); assertNotNull(dataStorage); DistroDataProcessor dataProcessor = componentHolder.findDataProcessor(DistroClientDataProcessor.TYPE); assertNotNull(dataProcessor); DistroFailedTaskHandler failedTaskHandler = componentHolder.findFailedTaskHandler(DistroClientDataProcessor.TYPE); assertNotNull(failedTaskHandler); DistroTransportAgent transportAgent = componentHolder.findTransportAgent(DistroClientDataProcessor.TYPE); assertNotNull(transportAgent); }