focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
static String getTableName(TableIdentifier tableIdentifier, boolean skipNameValidation) { if (!skipNameValidation) { validateTableName(tableIdentifier.name()); } return tableIdentifier.name(); }
@Test public void testSkipTableNameValidation() { List<TableIdentifier> acceptableIdentifiers = Lists.newArrayList( TableIdentifier.parse("db.a-1"), TableIdentifier.parse("db.a-1-1"), TableIdentifier.parse("db.a#1")); for (TableIdentifier identifier : acceptableIdentifiers) { assertThat(IcebergToGlueConverter.getTableName(identifier, true)) .isEqualTo(identifier.name()); } }
public boolean hasViewAccessToTemplate(PipelineTemplateConfig template, CaseInsensitiveString username, List<Role> roles, boolean isGroupAdministrator) { boolean hasViewAccessToTemplate = template.getAuthorization().isViewUser(username, roles); hasViewAccessToTemplate = hasViewAccessToTemplate || (template.isAllowGroupAdmins() && isGroupAdministrator); return hasViewAccessToTemplate; }
@Test public void shouldReturnTrueIfUserCanViewTemplate() { CaseInsensitiveString templateViewUser = new CaseInsensitiveString("view"); String templateName = "template"; PipelineTemplateConfig template = PipelineTemplateConfigMother.createTemplate(templateName, StageConfigMother.manualStage("stage")); template.setAuthorization(new Authorization(new ViewConfig(new AdminUser(templateViewUser)))); TemplatesConfig templates = new TemplatesConfig(template); assertThat(templates.hasViewAccessToTemplate(template, templateViewUser, null, false), is(true)); }
String parseAndGenerateOutput(String json) { JsonNode jsonNode; try { jsonNode = Jackson.mapper().readTree(json); } catch (IOException e) { throw new RuntimeException(e); } String status = jsonNode.get("status").asText(); return switch (status) { case statusUnknown -> "File distribution status unknown: " + jsonNode.get("message").asText(); case statusInProgress -> "File distribution in progress:\n" + inProgressOutput(jsonNode.get("hosts")); case statusFinished -> "File distribution finished"; default -> throw new RuntimeException("Unknown status " + status); }; }
@Test public void unknownForAllHosts() { String output = client.parseAndGenerateOutput("{\"status\":\"UNKNOWN\", \"message\":\"Something went wrong\"}"); assertEquals("File distribution status unknown: Something went wrong", output); }
@Override public boolean isOperational() { if (nodeOperational) { return true; } boolean flag = false; try { flag = checkOperational(); } catch (InterruptedException e) { LOG.trace("Interrupted while checking ES node is operational", e); Thread.currentThread().interrupt(); } finally { if (flag) { esConnector.stop(); nodeOperational = true; } } return nodeOperational; }
@Test public void isOperational_should_return_true_if_Elasticsearch_is_YELLOW() { EsConnector esConnector = mock(EsConnector.class); when(esConnector.getClusterHealthStatus()).thenReturn(Optional.of(ClusterHealthStatus.YELLOW)); EsManagedProcess underTest = new EsManagedProcess(mock(Process.class), ProcessId.ELASTICSEARCH, esConnector, WAIT_FOR_UP_TIMEOUT); assertThat(underTest.isOperational()).isTrue(); }
public URL getInterNodeListener( final Function<URL, Integer> portResolver ) { return getInterNodeListener(portResolver, LOGGER); }
@Test public void shouldResolveInterNodeListenerToInternalListenerSetToIpv6Loopback() { // Given: final URL expected = url("https://[::1]:12345"); final KsqlRestConfig config = new KsqlRestConfig(ImmutableMap.<String, Object>builder() .put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092") .put(INTERNAL_LISTENER_CONFIG, expected.toString()) .build() ); // When: final URL actual = config.getInterNodeListener(portResolver, logger); // Then: assertThat(actual, is(expected)); verifyLogsInterNodeListener(expected, QUOTED_INTERNAL_LISTENER_CONFIG); verifyLogsLoopBackWarning(expected, QUOTED_INTERNAL_LISTENER_CONFIG); verifyNoMoreInteractions(logger); }
ClassicGroup getOrMaybeCreateClassicGroup( String groupId, boolean createIfNotExists ) throws GroupIdNotFoundException { Group group = groups.get(groupId); if (group == null && !createIfNotExists) { throw new GroupIdNotFoundException(String.format("Classic group %s not found.", groupId)); } if (group == null) { ClassicGroup classicGroup = new ClassicGroup(logContext, groupId, ClassicGroupState.EMPTY, time, metrics); groups.put(groupId, classicGroup); metrics.onClassicGroupStateTransition(null, classicGroup.currentState()); return classicGroup; } else { if (group.type() == CLASSIC) { return (ClassicGroup) group; } else { // We don't support upgrading/downgrading between protocols at the moment so // we throw an exception if a group exists with the wrong type. throw new GroupIdNotFoundException(String.format("Group %s is not a classic group.", groupId)); } } }
@Test public void testStaticMemberRejoinWithLeaderIdAndKnownMemberId() throws Exception { GroupMetadataManagerTestContext context = new GroupMetadataManagerTestContext.Builder() .build(); GroupMetadataManagerTestContext.RebalanceResult rebalanceResult = context.staticMembersJoinAndRebalance( "group-id", "leader-instance-id", "follower-instance-id" ); ClassicGroup group = context.groupMetadataManager.getOrMaybeCreateClassicGroup("group-id", false); // Known static leader rejoin will trigger rebalance. JoinGroupRequestData request = new GroupMetadataManagerTestContext.JoinGroupRequestBuilder() .withGroupId("group-id") .withGroupInstanceId("leader-instance-id") .withMemberId(rebalanceResult.leaderId) .withDefaultProtocolTypeAndProtocols() .withRebalanceTimeoutMs(10000) .build(); JoinGroupResponseData joinResponse = context.joinClassicGroupAndCompleteJoin(request, true, true, 10000); // Follower's heartbeat expires as the leader rejoins. assertFalse(group.hasMember(rebalanceResult.followerId)); JoinGroupResponseData expectedResponse = new JoinGroupResponseData() .setErrorCode(Errors.NONE.code()) .setGenerationId(rebalanceResult.generationId + 1) .setMemberId(rebalanceResult.leaderId) .setLeader(rebalanceResult.leaderId) .setProtocolName("range") .setProtocolType("consumer") .setMembers(toJoinResponseMembers(group)); checkJoinGroupResponse( expectedResponse, joinResponse, group, COMPLETING_REBALANCE, Collections.singleton("leader-instance-id") ); }
public synchronized boolean isEmpty() { return this.layersList.isEmpty(); }
@Test public void isEmptyTest() { Layers layers = new Layers(new DummyRedrawer(), new DisplayModel()); Assert.assertTrue(layers.isEmpty()); layers.add(new DummyLayer()); Assert.assertFalse(layers.isEmpty()); layers.clear(); Assert.assertTrue(layers.isEmpty()); }
public Matrix transpose() { return transpose(true); }
@Test public void testTranspose() { Matrix t = matrix.transpose(); assertEquals(Layout.COL_MAJOR, matrix.layout()); assertEquals(Layout.ROW_MAJOR, t.layout()); assertEquals(3, t.nrow()); assertEquals(3, t.ncol()); assertEquals(0.9f, matrix.get(0, 0), 1E-6f); assertEquals(0.8f, matrix.get(2, 2), 1E-6f); assertEquals(0.5f, matrix.get(1, 1), 1E-6f); assertEquals(0.0f, matrix.get(0, 2), 1E-6f); assertEquals(0.0f, matrix.get(2, 0), 1E-6f); assertEquals(0.4f, matrix.get(1, 0), 1E-6f); }
public static Optional<Object> invokeMethod(Object target, String methodName, Class<?>[] paramsType, Object[] params) { if (methodName == null || target == null) { return Optional.empty(); } final Optional<Method> method = findMethod(target.getClass(), methodName, paramsType); if (method.isPresent()) { return invokeMethod(target, method.get(), params); } return Optional.empty(); }
@Test public void testInvokeMethod2() throws NoSuchMethodException { final Method invokeMethod = TestReflect.class.getDeclaredMethod("invokeMethod"); final TestReflect testReflect = new TestReflect(); final Optional<Object> result = ReflectUtils.invokeMethod(testReflect, invokeMethod, null); Assert.assertTrue(result.isPresent() && result.get() instanceof Integer); Assert.assertEquals(result.get(), Integer.MAX_VALUE); }
@Override public byte[] getFileContent(Long configId, String path) throws Exception { FileClient client = fileConfigService.getFileClient(configId); Assert.notNull(client, "客户端({}) 不能为空", configId); return client.getContent(path); }
@Test public void testGetFileContent() throws Exception { // 准备参数 Long configId = 10L; String path = "tudou.jpg"; // mock 方法 FileClient client = mock(FileClient.class); when(fileConfigService.getFileClient(eq(10L))).thenReturn(client); byte[] content = new byte[]{}; when(client.getContent(eq("tudou.jpg"))).thenReturn(content); // 调用 byte[] result = fileService.getFileContent(configId, path); // 断言 assertSame(result, content); }
public static List<String> orderFields(List<String> fields, List<SortSpec> sorts) { if (!needsReorderingFields(fields, sorts)) { return fields; } final List<String> sortFields = sorts.stream() .filter(ValuesBucketOrdering::isGroupingSort) .map(SortSpec::field) .collect(Collectors.toList()); return fields.stream() .sorted(new FieldsSortingComparator(sortFields)) .collect(Collectors.toList()); }
@Test void staysInSameOrderIfNoPivotIsUsedForSort() { final List<String> orderedBuckets = ValuesBucketOrdering.orderFields(List.of("foo", "bar", "baz"), Collections.emptyList()); assertThat(orderedBuckets).containsExactly("foo", "bar", "baz"); }
@Override public void decodeObjectByTemplate(AbstractByteBuf data, Map<String, String> context, SofaResponse template) throws SofaRpcException { try { UnsafeByteArrayInputStream inputStream = new UnsafeByteArrayInputStream(data.array()); Hessian2Input input = new Hessian2Input(inputStream); // 根据SerializeType信息决定序列化器 boolean genericSerialize = context != null && isGenericResponse( context.get(RemotingConstants.HEAD_GENERIC_TYPE)); if (genericSerialize) { input.setSerializerFactory(genericSerializerFactory); GenericObject genericObject = (GenericObject) input.readObject(); template.setErrorMsg((String) genericObject.getField("errorMsg")); template.setAppResponse(judgeCustomThrowableForGenericObject(genericObject.getField("appResponse"))); template.setResponseProps((Map<String, String>) genericObject.getField("responseProps")); } else { input.setSerializerFactory(serializerFactory); SofaResponse tmp = (SofaResponse) input.readObject(); // copy values to template template.setErrorMsg(tmp.getErrorMsg()); template.setAppResponse(tmp.getAppResponse()); template.setResponseProps(tmp.getResponseProps()); } input.close(); } catch (IOException e) { throw buildDeserializeError(e.getMessage(), e); } }
@Test public void testCustomThrowableDeserializerEnabledForIncompatible() throws Exception { setGenericThrowException(true); try { GenericMultipleClassLoaderSofaSerializerFactory factory = new GenericMultipleClassLoaderSofaSerializerFactory(); SofaResponseHessianSerializer serializer = new SofaResponseHessianSerializer(null, factory); // 将com.alipay.sofa.rpc.codec.sofahessian.mock.DeprecatedMockError重命名为com.alipay.sofa.rpc.codec.sofahessian.mock.MockError // SofaResponse sofaResponse = new SofaResponse(); // DeprecatedMockError mockError = new DeprecatedMockError(); // sofaResponse.setAppResponse(mockError); // 对sofaResponse进行序列化的hex string String encodeMsg = "4fbe636f6d2e616c697061792e736f66612e7270632e636f72652e726573706f6e73652e536f6661526573706f6e7365940769734572726f72086572726f724d73670b617070526573706f6e73650d726573706f6e736550726f70736f90464e4fc83e636f6d2e616c697061792e736f66612e7270632e636f6465632e736f66616865737369616e2e6d6f636b2e446570726563617465644d6f636b4572726f72940d64657461696c4d6573736167650563617573650a737461636b54726163651473757070726573736564457863657074696f6e736f914e4a015674001c5b6a6176612e6c616e672e537461636b5472616365456c656d656e746e1c4fab6a6176612e6c616e672e537461636b5472616365456c656d656e74940e6465636c6172696e67436c6173730a6d6574686f644e616d650866696c654e616d650a6c696e654e756d6265726f92530051636f6d2e616c697061792e736f66612e7270632e636f6465632e736f66616865737369616e2e73657269616c697a652e536f6661526573706f6e73654865737369616e53657269616c697a65725465737453003574657374437573746f6d5468726f7761626c65446573657269616c697a6572456e61626c6564466f72496e636f6d70617469626c65530026536f6661526573706f6e73654865737369616e53657269616c697a6572546573742e6a617661c86c6f9253002473756e2e7265666c6563742e4e61746976654d6574686f644163636573736f72496d706c07696e766f6b65301d4e61746976654d6574686f644163636573736f72496d706c2e6a6176618e6f9253002473756e2e7265666c6563742e4e61746976654d6574686f644163636573736f72496d706c06696e766f6b651d4e61746976654d6574686f644163636573736f72496d706c2e6a617661c83e6f9253002873756e2e7265666c6563742e44656c65676174696e674d6574686f644163636573736f72496d706c06696e766f6b6553002144656c65676174696e674d6574686f644163636573736f72496d706c2e6a617661bb6f92186a6176612e6c616e672e7265666c6563742e4d6574686f6406696e766f6b650b4d6574686f642e6a617661c9f26f925300296f72672e6a756e69742e72756e6e6572732e6d6f64656c2e4672616d65776f726b4d6574686f6424311172756e5265666c65637469766543616c6c144672616d65776f726b4d6574686f642e6a617661c83b6f925300336f72672e6a756e69742e696e7465726e616c2e72756e6e6572732e6d6f64656c2e5265666c65637469766543616c6c61626c650372756e175265666c65637469766543616c6c61626c652e6a6176619c6f925300276f72672e6a756e69742e72756e6e6572732e6d6f64656c2e4672616d65776f726b4d6574686f6411696e766f6b654578706c6f736976656c79144672616d65776f726b4d6574686f642e6a617661c8386f925300326f72672e6a756e69742e696e7465726e616c2e72756e6e6572732e73746174656d656e74732e496e766f6b654d6574686f64086576616c7561746511496e766f6b654d6574686f642e6a617661a16f925300206f72672e6a756e69742e72756e6e6572732e506172656e7452756e6e65722433086576616c7561746511506172656e7452756e6e65722e6a617661c9326f9253002a6f72672e6a756e69742e72756e6e6572732e426c6f636b4a556e697434436c61737352756e6e65722431086576616c756174651b426c6f636b4a556e697434436c61737352756e6e65722e6a617661c8646f921e6f72672e6a756e69742e72756e6e6572732e506172656e7452756e6e65720772756e4c65616611506172656e7452756e6e65722e6a617661c96e6f925300286f72672e6a756e69742e72756e6e6572732e426c6f636b4a556e697434436c61737352756e6e65720872756e4368696c641b426c6f636b4a556e697434436c61737352756e6e65722e6a617661c8676f925300286f72672e6a756e69742e72756e6e6572732e426c6f636b4a556e697434436c61737352756e6e65720872756e4368696c641b426c6f636b4a556e697434436c61737352756e6e65722e6a617661c83f6f925300206f72672e6a756e69742e72756e6e6572732e506172656e7452756e6e657224340372756e11506172656e7452756e6e65722e6a617661c94b6f925300206f72672e6a756e69742e72756e6e6572732e506172656e7452756e6e65722431087363686564756c6511506172656e7452756e6e65722e6a617661c84f6f921e6f72672e6a756e69742e72756e6e6572732e506172656e7452756e6e65720b72756e4368696c6472656e11506172656e7452756e6e65722e6a617661c9496f921e6f72672e6a756e69742e72756e6e6572732e506172656e7452756e6e65720a6163636573732431303011506172656e7452756e6e65722e6a617661c8426f925300206f72672e6a756e69742e72756e6e6572732e506172656e7452756e6e65722432086576616c7561746511506172656e7452756e6e65722e6a617661c9256f925300206f72672e6a756e69742e72756e6e6572732e506172656e7452756e6e65722433086576616c7561746511506172656e7452756e6e65722e6a617661c9326f921e6f72672e6a756e69742e72756e6e6572732e506172656e7452756e6e65720372756e11506172656e7452756e6e65722e6a617661c99d6f921a6f72672e6a756e69742e72756e6e65722e4a556e6974436f72650372756e0e4a556e6974436f72652e6a617661c8896f92530028636f6d2e696e74656c6c696a2e6a756e6974342e4a556e697434496465615465737452756e6e657213737461727452756e6e65725769746841726773194a556e697434496465615465737452756e6e65722e6a617661c8456f9253002f636f6d2e696e74656c6c696a2e72742e6a756e69742e496465615465737452756e6e65722452657065617465722431076578656375746513496465615465737452756e6e65722e6a617661b66f9253002d636f6d2e696e74656c6c696a2e72742e657865637574696f6e2e6a756e69742e546573747352657065617465720672657065617412546573747352657065617465722e6a6176619b6f9253002d636f6d2e696e74656c6c696a2e72742e6a756e69742e496465615465737452756e6e657224526570656174657213737461727452756e6e6572576974684172677313496465615465737452756e6e65722e6a617661b36f92530022636f6d2e696e74656c6c696a2e72742e6a756e69742e4a556e697453746172746572167072657061726553747265616d73416e645374617274114a556e6974537461727465722e6a617661c8eb6f92530022636f6d2e696e74656c6c696a2e72742e6a756e69742e4a556e697453746172746572046d61696e114a556e6974537461727465722e6a617661c8367a567400326a6176612e7574696c2e436f6c6c656374696f6e7324556e6d6f6469666961626c6552616e646f6d4163636573734c6973746e007a4e"; ByteArrayWrapperByteBuf bsIn = new ByteArrayWrapperByteBuf(hexToByte(encodeMsg)); Map<String, String> ctx = new HashMap<>(); ctx.put(RemotingConstants.HEAD_GENERIC_TYPE, "2"); SofaResponse sofaResponse2 = new SofaResponse(); serializer.decodeObjectByTemplate(bsIn, ctx, sofaResponse2); Assert.assertTrue(sofaResponse2.getAppResponse() instanceof RuntimeException); Assert.assertTrue(((RuntimeException) sofaResponse2.getAppResponse()).getMessage().startsWith( "occur business exception, but type=com.alipay.sofa.rpc.codec.sofahessian.mock.DeprecatedMockError class is not " + "found, error: ")); } finally { setGenericThrowException(false); clearCacheDeserializerMap(); } }
@Override public MODEL build(ConfigModelInstanceFactory<MODEL> factory, Element spec, ConfigModelContext context) { VespaDomBuilder.DomSimpleConfigProducerBuilder builder = new VespaDomBuilder.DomSimpleConfigProducerBuilder(context.getProducerId()); TreeConfigProducer<AnyConfigProducer> producer = builder.build(context.getDeployState(), context.getParentProducer(), spec); return super.build(factory, spec, context.withParent(producer)); }
@Test void testThatProducerIsInserted() { String services = "<foo><config name=\"bar.foo\"><key>value</key></config></foo>"; ModelBuilder builder = new ModelBuilder(); Model model = builder.build(DeployState.createTestState(new MockApplicationPackage.Builder().withServices(services).build()), null, null, new MockRoot(), XML.getDocument(services).getDocumentElement()); assertEquals(1, model.getContext().getParentProducer().getUserConfigs().size()); }
@Override public void set(Map<String, ?> buckets) { commandExecutor.get(setAsync(buckets)); }
@Test public void testSet() { Map<String, Integer> buckets = new HashMap<String, Integer>(); buckets.put("12", 1); buckets.put("41", 2); redisson.getBuckets().set(buckets); RBucket<Object> r1 = redisson.getBucket("12"); assertThat(r1.get()).isEqualTo(1); RBucket<Object> r2 = redisson.getBucket("41"); assertThat(r2.get()).isEqualTo(2); }
@PUT @Path("{id}") @ApiOperation("Update view") @AuditEvent(type = ViewsAuditEventTypes.VIEW_UPDATE) public ViewDTO update(@ApiParam(name = "id") @PathParam("id") @NotEmpty String id, @ApiParam @Valid ViewDTO dto, @Context SearchUser searchUser) { final ViewDTO updatedDTO = dto.toBuilder().id(id).build(); if (!searchUser.canUpdateView(updatedDTO)) { throw new ForbiddenException("Not allowed to edit " + summarize(updatedDTO) + "."); } validateIntegrity(updatedDTO, searchUser, false); var result = dbService.update(updatedDTO); recentActivityService.update(result.id(), result.type().equals(ViewDTO.Type.DASHBOARD) ? GRNTypes.DASHBOARD : GRNTypes.SEARCH, searchUser); return result; }
@Test public void throwsExceptionWhenUpdatingSearchWithFilterThatUserIsNotAllowedToSee() { final ViewsResource viewsResource = createViewsResource( mockViewService(TEST_DASHBOARD_VIEW), mock(StartPageService.class), mock(RecentActivityService.class), mock(ClusterEventBus.class), new ReferencedSearchFiltersHelper(), searchFilterVisibilityChecker(Collections.singletonList("<<You cannot see this filter>>")), EMPTY_VIEW_RESOLVERS, SEARCH ); Assertions.assertThatThrownBy(() -> viewsResource.update(VIEW_ID, TEST_DASHBOARD_VIEW, SEARCH_USER)) .isInstanceOf(BadRequestException.class) .hasMessageContaining("View cannot be saved, as it contains Search Filters which you are not privileged to view : [<<You cannot see this filter>>]"); }
public static ParameterizedType parameterize(final Class<?> raw, final Type... typeArguments) { checkParameterizeMethodParameter(raw, typeArguments); return new ParameterizedTypeImpl(raw, raw.getEnclosingClass(), typeArguments); }
@Test void testParameterizeForNullTypeArray() { assertThrows(IllegalArgumentException.class, () -> { TypeUtils.parameterize(List.class, (Type) null); }); }
private static String approximateSimpleName(Class<?> clazz, boolean dropOuterClassNames) { checkArgument(!clazz.isAnonymousClass(), "Attempted to get simple name of anonymous class"); return approximateSimpleName(clazz.getName(), dropOuterClassNames); }
@Test public void testNestedSimpleName() { EmbeddedDoFn fn = new EmbeddedDoFn(); EmbeddedDoFn inner = fn.getEmbedded(); assertEquals("DeeperEmbedded", NameUtils.approximateSimpleName(inner)); }
public void begin(InterpretationContext ec, String localName, Attributes attributes) { if ("substitutionProperty".equals(localName)) { addWarn("[substitutionProperty] element has been deprecated. Please use the [property] element instead."); } String name = attributes.getValue(NAME_ATTRIBUTE); String value = attributes.getValue(VALUE_ATTRIBUTE); String scopeStr = attributes.getValue(SCOPE_ATTRIBUTE); Scope scope = ActionUtil.stringToScope(scopeStr); if (checkFileAttributeSanity(attributes)) { String file = attributes.getValue(FILE_ATTRIBUTE); file = ec.subst(file); try { FileInputStream istream = new FileInputStream(file); loadAndSetProperties(ec, istream, scope); } catch (FileNotFoundException e) { addError("Could not find properties file [" + file + "].", e); } catch (IOException e1) { addError("Could not read properties file [" + file + "].", e1); } } else if (checkResourceAttributeSanity(attributes)) { String resource = attributes.getValue(RESOURCE_ATTRIBUTE); resource = ec.subst(resource); URL resourceURL = Loader.getResourceBySelfClassLoader(resource); if (resourceURL == null) { addError("Could not find resource [" + resource + "]."); } else { try { InputStream istream = resourceURL.openStream(); loadAndSetProperties(ec, istream, scope); } catch (IOException e) { addError("Could not read resource file [" + resource + "].", e); } } } else if (checkValueNameAttributesSanity(attributes)) { value = RegularEscapeUtil.basicEscape(value); // now remove both leading and trailing spaces value = value.trim(); value = ec.subst(value); ActionUtil.setProperty(ec, name, value, scope); } else { addError(INVALID_ATTRIBUTES); } }
@Test public void testLoadFileWithPrerequisiteSubsitution() { context.putProperty("STEM", CoreTestConstants.TEST_DIR_PREFIX + "input/joran"); atts.setValue("file", "${STEM}/propertyActionTest.properties"); propertyAction.begin(ec, null, atts); assertEquals("tata", ec.getProperty("v1")); assertEquals("toto", ec.getProperty("v2")); }
public static Method getApplyMethod(ScalarFn scalarFn) { Class<? extends ScalarFn> clazz = scalarFn.getClass(); Collection<Method> matches = ReflectHelpers.declaredMethodsWithAnnotation( ScalarFn.ApplyMethod.class, clazz, ScalarFn.class); if (matches.isEmpty()) { throw new IllegalArgumentException( String.format( "No method annotated with @%s found in class %s.", ScalarFn.ApplyMethod.class.getSimpleName(), clazz.getName())); } // If we have at least one match, then either it should be the only match // or it should be an extension of the other matches (which came from parent // classes). Method first = matches.iterator().next(); for (Method other : matches) { if (!first.getName().equals(other.getName()) || !Arrays.equals(first.getParameterTypes(), other.getParameterTypes())) { throw new IllegalArgumentException( String.format( "Found multiple methods annotated with @%s. [%s] and [%s]", ScalarFn.ApplyMethod.class.getSimpleName(), ReflectHelpers.formatMethod(first), ReflectHelpers.formatMethod(other))); } } // Method must be public. if ((first.getModifiers() & Modifier.PUBLIC) == 0) { throw new IllegalArgumentException( String.format("Method %s is not public.", ReflectHelpers.formatMethod(first))); } return first; }
@Test @SuppressWarnings("nullness") // If result is null, test will fail as expected. public void testGetApplyMethodOverride() throws InvocationTargetException, IllegalAccessException { IncrementFnChild incrementFn = new IncrementFnChild(); Method method = ScalarFnReflector.getApplyMethod(incrementFn); @Nullable Object result = method.invoke(incrementFn, Long.valueOf(24L)); assertEquals(Long.valueOf(26L), result); }
public static PostgreSQLBinaryProtocolValue getBinaryProtocolValue(final BinaryColumnType binaryColumnType) { Preconditions.checkArgument(BINARY_PROTOCOL_VALUES.containsKey(binaryColumnType), "Cannot find PostgreSQL type '%s' in column type when process binary protocol value", binaryColumnType); return BINARY_PROTOCOL_VALUES.get(binaryColumnType); }
@Test void assertGetBinaryProtocolValueExThrown() { assertThrows(IllegalArgumentException.class, () -> PostgreSQLBinaryProtocolValueFactory.getBinaryProtocolValue(PostgreSQLColumnType.XML)); }
public static String toUriAuthority(NetworkEndpoint networkEndpoint) { return toHostAndPort(networkEndpoint).toString(); }
@Test public void toUriString_withIpV6Endpoint_returnsIpAddressWithBracket() { NetworkEndpoint ipV6Endpoint = NetworkEndpoint.newBuilder() .setType(NetworkEndpoint.Type.IP) .setIpAddress( IpAddress.newBuilder().setAddress("3ffe::1").setAddressFamily(AddressFamily.IPV6)) .build(); assertThat(NetworkEndpointUtils.toUriAuthority(ipV6Endpoint)).isEqualTo("[3ffe::1]"); }
@SuppressWarnings("java:S2583") public static boolean verify(@NonNull JWKSet jwks, @NonNull JWSObject jws) { if (jwks == null) { throw new IllegalArgumentException("no JWKS provided to verify JWS"); } if (jwks.getKeys() == null || jwks.getKeys().isEmpty()) { return false; } var header = jws.getHeader(); if (!JWSAlgorithm.ES256.equals(header.getAlgorithm())) { throw new UnsupportedOperationException( "only supports ES256, found: " + header.getAlgorithm()); } var key = jwks.getKeyByKeyId(header.getKeyID()); if (key == null) { return false; } try { var processor = new DefaultJWSVerifierFactory(); var verifier = processor.createJWSVerifier(jws.getHeader(), key.toECKey().toPublicKey()); return jws.verify(verifier); } catch (JOSEException e) { throw FederationExceptions.badSignature(e); } }
@Test void verifyEmptyJwks() { var jwks = new JWKSet(); assertFalse(JwsVerifier.verify(jwks, null)); }
Map<Path, Set<Integer>> changedLines() { return tracker.changedLines(); }
@Test(expected = IllegalStateException.class) public void crash_on_invalid_start_line_format() throws IOException { String example = "Index: sample1\n" + "===================================================================\n" + "--- a/sample1\n" + "+++ b/sample1\n" + "@@ -1 +x1,3 @@\n" + " same line\n" + "+added line 1\n" + "+added line 2\n"; printDiff(example); underTest.changedLines(); }
public static CsvReader getReader(CsvReadConfig config) { return new CsvReader(config); }
@Test @Disabled public void readTest3() { CsvReader reader = CsvUtil.getReader(); String path = FileUtil.isWindows() ? "d:/test/test.csv" : "~/test/test.csv"; reader.read(FileUtil.getUtf8Reader(path), Console::log); }
@Override public long getLong(final int columnIndex) throws SQLException { return (long) ResultSetUtils.convertValue(mergeResultSet.getValue(columnIndex, long.class), long.class); }
@Test void assertGetLongWithColumnLabel() throws SQLException { when(mergeResultSet.getValue(1, long.class)).thenReturn(1L); assertThat(shardingSphereResultSet.getLong("label"), is(1L)); }
public DirectoryEntry lookUp( File workingDirectory, JimfsPath path, Set<? super LinkOption> options) throws IOException { checkNotNull(path); checkNotNull(options); DirectoryEntry result = lookUp(workingDirectory, path, options, 0); if (result == null) { // an intermediate file in the path did not exist or was not a directory throw new NoSuchFileException(path.toString()); } return result; }
@Test public void testLookup_absolute_symlinkLoop() { try { lookup("/work/four/loop"); fail(); } catch (IOException expected) { } try { lookup("/work/four/loop/whatever"); fail(); } catch (IOException expected) { } }
public void setProperty(String name, String value) { if (value == null) { return; } name = Introspector.decapitalize(name); PropertyDescriptor prop = getPropertyDescriptor(name); if (prop == null) { addWarn("No such property [" + name + "] in " + objClass.getName() + "."); } else { try { setProperty(prop, name, value); } catch (PropertySetterException ex) { addWarn("Failed to set property [" + name + "] to value \"" + value + "\". ", ex); } } }
@Test public void testFileSize() { setter.setProperty("fs", "2 kb"); assertEquals(2 * 1024, house.getFs().getSize()); }
public synchronized long nextId() { long timestamp = genTime(); if (timestamp < this.lastTimestamp) { if (this.lastTimestamp - timestamp < timeOffset) { // 容忍指定的回拨,避免NTP校时造成的异常 timestamp = lastTimestamp; } else { // 如果服务器时间有问题(时钟后退) 报错。 throw new IllegalStateException(StrUtil.format("Clock moved backwards. Refusing to generate id for {}ms", lastTimestamp - timestamp)); } } if (timestamp == this.lastTimestamp) { final long sequence = (this.sequence + 1) & SEQUENCE_MASK; if (sequence == 0) { timestamp = tilNextMillis(lastTimestamp); } this.sequence = sequence; } else { // issue#I51EJY if (randomSequenceLimit > 1) { sequence = RandomUtil.randomLong(randomSequenceLimit); } else { sequence = 0L; } } lastTimestamp = timestamp; return ((timestamp - twepoch) << TIMESTAMP_LEFT_SHIFT) | (dataCenterId << DATA_CENTER_ID_SHIFT) | (workerId << WORKER_ID_SHIFT) | sequence; }
@Test @Disabled public void uniqueTest(){ // 测试并发环境下生成ID是否重复 final Snowflake snowflake = IdUtil.getSnowflake(0, 0); final Set<Long> ids = new ConcurrentHashSet<>(); ThreadUtil.concurrencyTest(100, () -> { for (int i = 0; i < 50000; i++) { if(false == ids.add(snowflake.nextId())){ throw new UtilException("重复ID!"); } } }); }
static int calculateHashMapCapacity(int numMappings) { return (int) Math.ceil(numMappings / DEFAULT_LOAD_FACTOR); }
@Test public void calculateHashMapCapacity() { @SuppressWarnings("UnnecessaryMethodReference") Iterable<Integer> iterable = List.of(1, 2, 3)::iterator; assertThat(Caffeine.calculateHashMapCapacity(iterable)).isEqualTo(16); assertThat(Caffeine.calculateHashMapCapacity(List.of(1, 2, 3))).isEqualTo(4); }
public static <K, V> V getOrPutIfAbsent(ConcurrentMap<K, V> map, K key, ConstructorFunction<K, V> func) { V value = map.get(key); if (value == null) { value = func.createNew(key); V current = map.putIfAbsent(key, value); value = current == null ? value : current; } return value; }
@Test public void testGetOrPutIfAbsent() { int result = ConcurrencyUtil.getOrPutIfAbsent(map, 5, constructorFunction); assertEquals(1005, result); result = ConcurrencyUtil.getOrPutIfAbsent(map, 5, constructorFunction); assertEquals(1005, result); assertEquals(1, constructorFunction.getConstructions()); }
@Override public void deleteDataSourceConfig(Long id) { // 校验存在 validateDataSourceConfigExists(id); // 删除 dataSourceConfigMapper.deleteById(id); }
@Test public void testDeleteDataSourceConfig_success() { // mock 数据 DataSourceConfigDO dbDataSourceConfig = randomPojo(DataSourceConfigDO.class); dataSourceConfigMapper.insert(dbDataSourceConfig);// @Sql: 先插入出一条存在的数据 // 准备参数 Long id = dbDataSourceConfig.getId(); // 调用 dataSourceConfigService.deleteDataSourceConfig(id); // 校验数据不存在了 assertNull(dataSourceConfigMapper.selectById(id)); }
public ListNode2<T> addAfter(ListNode2<T> node, T value) { ListNode2<T> newNode = new ListNode2<T>(value); addAfter(node, newNode); return newNode; }
@Test public void testAddAfter() { DoublyLinkedList<Integer> list = new DoublyLinkedList<Integer>(); list.add(1); ListNode2<Integer> node2 = list.add(2); ListNode2<Integer> node4 = list.add(4); list.addAfter(node2, 3); assertEquals(4, list.size()); assertArrayEquals(new Integer[]{1, 2, 3, 4}, list.toArray()); ListNode2<Integer> node5 = list.addAfter(node4, 5); assertEquals(5, list.size()); assertArrayEquals(new Integer[]{1, 2, 3, 4, 5}, list.toArray()); assertEquals(new Integer(5), list.last()); assertEquals(node5, list.head()); }
static <K, V> StateSerdes<K, V> prepareStoreSerde(final StateStoreContext context, final String storeName, final String changelogTopic, final Serde<K> keySerde, final Serde<V> valueSerde, final PrepareFunc<V> prepareValueSerdeFunc) { return new StateSerdes<>( changelogTopic, prepareSerde(WrappingNullableUtils::prepareKeySerde, storeName, keySerde, new SerdeGetter(context), true, context.taskId()), prepareSerde(prepareValueSerdeFunc, storeName, valueSerde, new SerdeGetter(context), false, context.taskId()) ); }
@Test public void shouldThrowStreamsExceptionOnUndefinedKeySerdeForStateStoreContext() { final MockInternalNewProcessorContext<String, String> context = new MockInternalNewProcessorContext<>(); utilsMock.when(() -> WrappingNullableUtils.prepareKeySerde(any(), any())) .thenThrow(new ConfigException("Please set StreamsConfig#DEFAULT_KEY_SERDE_CLASS_CONFIG")); final Throwable exception = assertThrows(StreamsException.class, () -> StoreSerdeInitializer.prepareStoreSerde((StateStoreContext) context, "myStore", "topic", new Serdes.StringSerde(), new Serdes.StringSerde(), WrappingNullableUtils::prepareValueSerde)); assertThat(exception.getMessage(), equalTo("Failed to initialize key serdes for store myStore")); assertThat(exception.getCause().getMessage(), equalTo("Please set StreamsConfig#DEFAULT_KEY_SERDE_CLASS_CONFIG")); }
public static synchronized <I> IconCache<I> get() { if(null == singleton) { singleton = new IconCacheFactory().create(); } return singleton; }
@Test public void testGet() { assertSame(IconCacheFactory.get(), IconCacheFactory.get()); }
public IndexerDirectoryInformation parse(Path path) { if (!Files.exists(path)) { throw new IndexerInformationParserException("Path " + path + " does not exist."); } if (!Files.isDirectory(path)) { throw new IndexerInformationParserException("Path " + path + " is not a directory"); } if (!Files.isReadable(path)) { throw new IndexerInformationParserException("Path " + path + " is not readable"); } final Path nodesPath = path.resolve("nodes"); if (!Files.exists(nodesPath)) { return IndexerDirectoryInformation.empty(path); } try (final Stream<Path> nodes = Files.list(nodesPath)) { final List<NodeInformation> nodeInformation = nodes.filter(Files::isDirectory) .filter(p -> p.getFileName().toString().matches("\\d+")) .map(this::parseNode) .filter(node -> !node.isEmpty()) .toList(); return new IndexerDirectoryInformation(path, nodeInformation); } catch (IOException e) { throw new IndexerInformationParserException("Failed to list nodes", e); } }
@Test void testOpensearch2() throws URISyntaxException { final URI uri = getClass().getResource("/indices/opensearch2").toURI(); final IndexerDirectoryInformation result = parser.parse(Path.of(uri)); Assertions.assertThat(result.nodes()) .hasSize(1) .allSatisfy(node -> { Assertions.assertThat(node.nodeVersion()).isEqualTo("2.10.0"); Assertions.assertThat(node.indices()) .hasSize(6) .extracting(IndexInformation::indexName) .contains(".opensearch-sap-log-types-config", ".plugins-ml-config", "graylog_0", ".opensearch-observability", ".opendistro_security", "security-auditlog-2023.11.24"); final IndexInformation graylog_0 = node.indices().stream().filter(i -> i.indexName().equals("graylog_0")).findFirst().orElseThrow(() -> new RuntimeException("Failed to detect graylog_0 index")); Assertions.assertThat(graylog_0.indexVersionCreated()).isEqualTo("2.10.0"); Assertions.assertThat(graylog_0.shards()) .hasSize(1) .allSatisfy(shard -> { Assertions.assertThat(shard.documentsCount()).isEqualTo(1); Assertions.assertThat(shard.name()).isEqualTo("S0"); Assertions.assertThat(shard.primary()).isEqualTo(true); Assertions.assertThat(shard.minLuceneVersion()).isEqualTo("9.7.0"); }); }); }
@Override public AttributedList<Path> search(final Path workdir, final Filter<Path> regex, final ListProgressListener listener) throws BackgroundException { try { final AttributedList<Path> list = new AttributedList<>(); String prefix = null; final AttributedList<Path> containers; if(workdir.isRoot()) { containers = new B2BucketListService(session, fileid).list(new Path(String.valueOf(Path.DELIMITER), EnumSet.of(Path.Type.volume, Path.Type.directory)), listener); } else { containers = new AttributedList<>(Collections.singletonList(containerService.getContainer(workdir))); if(!containerService.isContainer(workdir)) { prefix = containerService.getKey(workdir) + Path.DELIMITER; } } for(Path container : containers) { String startFilename = prefix; do { final B2ListFilesResponse response = session.getClient().listFileNames( fileid.getVersionId(container), startFilename, new HostPreferences(session.getHost()).getInteger("b2.listing.chunksize"), prefix, null); for(B2FileInfoResponse info : response.getFiles()) { final Path f = new Path(String.format("%s%s%s", container.getAbsolute(), Path.DELIMITER, info.getFileName()), EnumSet.of(Path.Type.file)); if(regex.accept(f)) { list.add(f.withAttributes(new B2AttributesFinderFeature(session, fileid).toAttributes(info))); } } startFilename = response.getNextFileName(); } while(startFilename != null); } return list; } catch(B2ApiException e) { throw new B2ExceptionMappingService(fileid).map(e); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map(e); } }
@Test public void testSearchInBucket() throws Exception { final String name = new AlphanumericRandomStringService().random(); final Path bucket = new Path("test-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); final B2VersionIdProvider fileid = new B2VersionIdProvider(session); final Path file = new B2TouchFeature(session, fileid).touch(new Path(bucket, name, EnumSet.of(Path.Type.file)), new TransferStatus()); final B2SearchFeature feature = new B2SearchFeature(session, fileid); assertNotNull(feature.search(bucket, new SearchFilter(name), new DisabledListProgressListener()).find(new SimplePathPredicate(file))); assertNotNull(feature.search(bucket, new SearchFilter(StringUtils.upperCase(name)), new DisabledListProgressListener()).find(new SimplePathPredicate(file))); assertNotNull(feature.search(bucket, new SearchFilter(StringUtils.substring(name, 2)), new DisabledListProgressListener()).find(new SimplePathPredicate(file))); assertNotNull(feature.search(bucket, new SearchFilter(StringUtils.substring(name, 0, name.length() - 2)), new DisabledListProgressListener()).find(new SimplePathPredicate(file))); final Path subdir = new Path(bucket, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)); assertNull(feature.search(subdir, new SearchFilter(name), new DisabledListProgressListener()).find(new SimplePathPredicate(file))); new B2DeleteFeature(session, fileid).delete(Collections.singletonList(file), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static void setLogLevel(String logName, String level) { switch (logName) { case "config-server": ((ch.qos.logback.classic.Logger) DEFAULT_LOG).setLevel(Level.valueOf(level)); break; case "config-fatal": ((ch.qos.logback.classic.Logger) FATAL_LOG).setLevel(Level.valueOf(level)); break; case "config-pull": ((ch.qos.logback.classic.Logger) PULL_LOG).setLevel(Level.valueOf(level)); break; case "config-pull-check": ((ch.qos.logback.classic.Logger) PULL_CHECK_LOG).setLevel(Level.valueOf(level)); break; case "config-dump": ((ch.qos.logback.classic.Logger) DUMP_LOG).setLevel(Level.valueOf(level)); break; case "config-memory": ((ch.qos.logback.classic.Logger) MEMORY_LOG).setLevel(Level.valueOf(level)); break; case "config-client-request": ((ch.qos.logback.classic.Logger) CLIENT_LOG).setLevel(Level.valueOf(level)); break; case "config-trace": ((ch.qos.logback.classic.Logger) TRACE_LOG).setLevel(Level.valueOf(level)); break; case "config-notify": ((ch.qos.logback.classic.Logger) NOTIFY_LOG).setLevel(Level.valueOf(level)); break; default: break; } }
@Test void testSetLogLevel() { LogUtil.setLogLevel("config-server", "INFO"); ch.qos.logback.classic.Logger defaultLog = (Logger) LogUtil.DEFAULT_LOG; assertEquals("INFO", defaultLog.getLevel().levelStr); LogUtil.setLogLevel("config-fatal", "INFO"); ch.qos.logback.classic.Logger fatalLog = (Logger) LogUtil.FATAL_LOG; assertEquals("INFO", fatalLog.getLevel().levelStr); LogUtil.setLogLevel("config-pull", "INFO"); ch.qos.logback.classic.Logger pullLog = (Logger) LogUtil.PULL_LOG; assertEquals("INFO", pullLog.getLevel().levelStr); LogUtil.setLogLevel("config-pull-check", "INFO"); ch.qos.logback.classic.Logger pullCheckLog = (Logger) LogUtil.PULL_CHECK_LOG; assertEquals("INFO", pullCheckLog.getLevel().levelStr); LogUtil.setLogLevel("config-dump", "INFO"); ch.qos.logback.classic.Logger dumpLog = (Logger) LogUtil.DUMP_LOG; assertEquals("INFO", dumpLog.getLevel().levelStr); LogUtil.setLogLevel("config-memory", "INFO"); ch.qos.logback.classic.Logger memoryLog = (Logger) LogUtil.MEMORY_LOG; assertEquals("INFO", memoryLog.getLevel().levelStr); LogUtil.setLogLevel("config-client-request", "INFO"); ch.qos.logback.classic.Logger clientRequestLog = (Logger) LogUtil.CLIENT_LOG; assertEquals("INFO", clientRequestLog.getLevel().levelStr); LogUtil.setLogLevel("config-trace", "INFO"); ch.qos.logback.classic.Logger traceLog = (Logger) LogUtil.TRACE_LOG; assertEquals("INFO", traceLog.getLevel().levelStr); LogUtil.setLogLevel("config-notify", "INFO"); ch.qos.logback.classic.Logger notifyLog = (Logger) LogUtil.NOTIFY_LOG; assertEquals("INFO", notifyLog.getLevel().levelStr); }
@Override public void onResponse(Call call, okhttp3.Response okHttpResponse) { try { final Response response = OkHttpHttpClient.convertResponse(okHttpResponse); try { @SuppressWarnings("unchecked") final T t = converter == null ? (T) response : converter.convert(response); okHttpFuture.setResult(t); if (callback != null) { callback.onCompleted(t); } } catch (IOException | RuntimeException e) { okHttpFuture.setException(e); if (callback != null) { callback.onThrowable(e); } } } finally { okHttpFuture.finish(); } }
@Test public void shouldReleaseLatchOnIOException() { handler = new OAuthAsyncCompletionHandler<>(callback, EXCEPTION_RESPONSE_CONVERTER, future); call.enqueue(handler); final Request request = new Request.Builder().url("http://localhost/").build(); final okhttp3.Response response = new okhttp3.Response.Builder() .request(request) .protocol(Protocol.HTTP_1_1) .code(200) .message("ok") .body(ResponseBody.create(new byte[0], MediaType.get("text/plain"))) .build(); handler.onResponse(call, response); assertNull(callback.getResponse()); assertNotNull(callback.getThrowable()); assertTrue(callback.getThrowable() instanceof IOException); // verify latch is released assertThrows(ExecutionException.class, new ThrowingRunnable() { @Override public void run() throws Throwable { future.get(); } }); }
@Override protected Observable<Void> resumeWithFallback() { return RxReactiveStreams.toObservable(doFallback()); }
@Test public void testResumeWithFallback() { MockServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.get("http://localhost:8080/http") .remoteAddress(new InetSocketAddress(8092)) .header("MetaDataCache", "Hello") .build()); HystrixHandle hystrixHandle = new HystrixHandle(); hystrixHandle.setGroupKey("groupKey"); hystrixHandle.setCommandKey("commandKey"); final HystrixCommandProperties.Setter propertiesSetter = HystrixCommandProperties.Setter() .withExecutionTimeoutInMilliseconds((int) hystrixHandle.getTimeout()) .withCircuitBreakerEnabled(true) .withExecutionIsolationStrategy(HystrixCommandProperties.ExecutionIsolationStrategy.SEMAPHORE) .withExecutionIsolationSemaphoreMaxConcurrentRequests(hystrixHandle.getMaxConcurrentRequests()) .withCircuitBreakerErrorThresholdPercentage(hystrixHandle.getErrorThresholdPercentage()) .withCircuitBreakerRequestVolumeThreshold(hystrixHandle.getRequestVolumeThreshold()) .withCircuitBreakerSleepWindowInMilliseconds(hystrixHandle.getSleepWindowInMilliseconds()); HystrixObservableCommand.Setter setter = HystrixObservableCommand.Setter .withGroupKey(HystrixCommandGroupKey.Factory.asKey(hystrixHandle.getGroupKey())) .andCommandKey(HystrixCommandKey.Factory.asKey(hystrixHandle.getCommandKey())) .andCommandPropertiesDefaults(propertiesSetter); assertThrows(NullPointerException.class, () -> { HystrixCommand hystrixCommand = new HystrixCommand(setter, exchange, mock(ShenyuPluginChain.class), null); TestSubscriber<Void> testSubscriberWithNull = new TestSubscriber<>(); when(hystrixCommand.resumeWithFallback().subscribe(testSubscriberWithNull)).thenThrow(NullPointerException.class); }); }
public static void printHelp(PrintStream out) { checkNotNull(out); out.println("The set of registered options are:"); Set<Class<? extends PipelineOptions>> sortedOptions = new TreeSet<>(ClassNameComparator.INSTANCE); sortedOptions.addAll(CACHE.get().registeredOptions); for (Class<? extends PipelineOptions> kls : sortedOptions) { out.format(" %s%n", kls.getName()); } out.format( "%nUse --help=<OptionsName> for detailed help. For example:%n" + " --help=DataflowPipelineOptions <short names valid for registered options>%n" + " --help=org.apache.beam.runners.dataflow.options.DataflowPipelineOptions%n"); }
@Test public void testProgrammaticPrintHelpForSpecificType() { ByteArrayOutputStream baos = new ByteArrayOutputStream(); PipelineOptionsFactory.printHelp(new PrintStream(baos), PipelineOptions.class); String output = new String(baos.toByteArray(), StandardCharsets.UTF_8); assertThat(output, containsString("org.apache.beam.sdk.options.PipelineOptions")); assertThat(output, containsString("--runner")); assertThat(output, containsString("Default: " + DEFAULT_RUNNER_NAME)); assertThat( output, containsString("The pipeline runner that will be used to execute the pipeline.")); }
@Override public void onApplicationEvent(ContextRefreshedEvent contextRefreshedEvent) { try { File pluginsFolder = new File(systemEnvironment.get(SystemEnvironment.AGENT_PLUGINS_PATH)); if (pluginsFolder.exists()) { FileUtils.forceDelete(pluginsFolder); } zipUtil.unzip(DownloadableFile.AGENT_PLUGINS.getLocalFile(), pluginsFolder); defaultPluginJarLocationMonitor.initialize(); pluginManager.startInfrastructure(false); } catch (IOException e) { LOG.warn("could not extract plugin zip", e); } catch (RuntimeException e) { LOG.warn("error while initializing agent plugins", e); } }
@Test void shouldExtractPluginZip() throws Exception { agentPluginsInitializer.onApplicationEvent(null); verify(zipUtil).unzip(DownloadableFile.AGENT_PLUGINS.getLocalFile(), new File(SystemEnvironment.PLUGINS_PATH)); }
public long scan( final UnsafeBuffer termBuffer, final long rebuildPosition, final long hwmPosition, final long nowNs, final int termLengthMask, final int positionBitsToShift, final int initialTermId) { boolean lossFound = false; int rebuildOffset = (int)rebuildPosition & termLengthMask; if (rebuildPosition < hwmPosition) { final int rebuildTermCount = (int)(rebuildPosition >>> positionBitsToShift); final int hwmTermCount = (int)(hwmPosition >>> positionBitsToShift); final int rebuildTermId = initialTermId + rebuildTermCount; final int hwmTermOffset = (int)hwmPosition & termLengthMask; final int limitOffset = rebuildTermCount == hwmTermCount ? hwmTermOffset : termLengthMask + 1; rebuildOffset = scanForGap(termBuffer, rebuildTermId, rebuildOffset, limitOffset, this); if (rebuildOffset < limitOffset) { if (scannedTermOffset != activeTermOffset || scannedTermId != activeTermId) { activateGap(nowNs); lossFound = true; } checkTimerExpiry(nowNs); } } return pack(rebuildOffset, lossFound); }
@Test void shouldHandleHwmGreaterThanCompletedBuffer() { lossDetector = getLossHandlerWithLongRetry(); long rebuildPosition = ACTIVE_TERM_POSITION; final long hwmPosition = ACTIVE_TERM_POSITION + TERM_BUFFER_LENGTH + ALIGNED_FRAME_LENGTH; insertDataFrame(offsetOfMessage(0)); rebuildPosition += ALIGNED_FRAME_LENGTH; lossDetector.scan(termBuffer, rebuildPosition, hwmPosition, currentTime, MASK, POSITION_BITS_TO_SHIFT, TERM_ID); currentTime = TimeUnit.MILLISECONDS.toNanos(40); lossDetector.scan(termBuffer, rebuildPosition, hwmPosition, currentTime, MASK, POSITION_BITS_TO_SHIFT, TERM_ID); verify(lossHandler).onGapDetected(TERM_ID, offsetOfMessage(1), TERM_BUFFER_LENGTH - (int)rebuildPosition); }
@Override public byte[] get(byte[] key) { return read(key, ByteArrayCodec.INSTANCE, RedisCommands.GET, key); }
@Test public void testGeo() { RedisTemplate<String, String> redisTemplate = new RedisTemplate<>(); redisTemplate.setConnectionFactory(new RedissonConnectionFactory(redisson)); redisTemplate.afterPropertiesSet(); String key = "test_geo_key"; Point point = new Point(116.401001, 40.119499); redisTemplate.opsForGeo().add(key, point, "a"); point = new Point(111.545998, 36.133499); redisTemplate.opsForGeo().add(key, point, "b"); point = new Point(111.483002, 36.030998); redisTemplate.opsForGeo().add(key, point, "c"); Circle within = new Circle(116.401001, 40.119499, 80000); RedisGeoCommands.GeoRadiusCommandArgs args = RedisGeoCommands.GeoRadiusCommandArgs.newGeoRadiusArgs().includeCoordinates(); GeoResults<RedisGeoCommands.GeoLocation<String>> res = redisTemplate.opsForGeo().radius(key, within, args); assertThat(res.getContent().get(0).getContent().getName()).isEqualTo("a"); }
@Override public List<Instance> selectInstances(String serviceName, boolean healthy) throws NacosException { return selectInstances(serviceName, new ArrayList<>(), healthy); }
@Test void testSelectInstances3() throws NacosException { //given String serviceName = "service1"; //when client.selectInstances(serviceName, true, false); //then verify(proxy, times(1)).queryInstancesOfService(serviceName, Constants.DEFAULT_GROUP, "", false); }
@Override public synchronized ConfigurationChanges persist(Device device, EnumSet<Option> options) throws ConfigurationException { ensureConfigurationExists(); String deviceName = device.getDeviceName(); String deviceDN = deviceRef(deviceName); boolean rollback = false; ArrayList<String> destroyDNs = new ArrayList<>(); try { if (options != null && options.contains(Option.REGISTER)) register(device, destroyDNs); ConfigurationChanges diffs = configurationChangesOf(options); ConfigurationChanges.ModifiedObject ldapObj = ConfigurationChanges.addModifiedObject(diffs, deviceDN, ConfigurationChanges.ChangeType.C); createSubcontext(deviceDN, storeTo(ConfigurationChanges.nullifyIfNotVerbose(diffs, ldapObj), device, new BasicAttributes(true))); rollback = true; storeChilds(ConfigurationChanges.nullifyIfNotVerbose(diffs, diffs), deviceDN, device); if (options == null || !options.contains(Option.PRESERVE_CERTIFICATE)) updateCertificates(device); rollback = false; destroyDNs.clear(); return diffs; } catch (NameAlreadyBoundException e) { throw new ConfigurationAlreadyExistsException(deviceName); } catch (NamingException e) { throw new ConfigurationException(e); } catch (CertificateException e) { throw new ConfigurationException(e); } finally { if (rollback) { try { destroySubcontextWithChilds(deviceDN); } catch (NamingException e) { LOG.warn("Rollback failed:", e); } } unregister(destroyDNs); } }
@Test public void testPersist() throws Exception { try { config.removeDevice("Test-Device-1", null); } catch (ConfigurationNotFoundException e) {} Device device = createDevice("Test-Device-1", "TEST-AET1"); config.persist(device, null); ApplicationEntity ae = config.findApplicationEntity("TEST-AET1"); assertFalse(ae.isAssociationInitiator()); assertTrue(ae.isAssociationAcceptor()); assertTrue(ae.getConnections().get(0).isServer()); TransferCapability echoSCP = ae.getTransferCapabilityFor( UID.Verification, TransferCapability.Role.SCP); assertNotNull(echoSCP); assertArrayEquals(new String[] { UID.ImplicitVRLittleEndian }, echoSCP.getTransferSyntaxes()); TransferCapability ctSCP = ae.getTransferCapabilityFor( UID.CTImageStorage, TransferCapability.Role.SCP); assertNotNull(ctSCP); assertArrayEquals(new String[] { UID.ImplicitVRLittleEndian, UID.ExplicitVRLittleEndian }, sort(ctSCP.getTransferSyntaxes())); assertNull(ctSCP.getStorageOptions()); TransferCapability findSCP = ae.getTransferCapabilityFor( UID.StudyRootQueryRetrieveInformationModelFind, TransferCapability.Role.SCP); assertNotNull(findSCP); assertArrayEquals(new String[] { UID.ImplicitVRLittleEndian }, findSCP.getTransferSyntaxes()); assertEquals(EnumSet.of(QueryOption.RELATIONAL), findSCP.getQueryOptions()); assertEquals(1, config.listDeviceInfos(deviceInfo("Test-Device-1")).length); try { config.persist(createDevice("Test-Device-1", "TEST-AET1"), null); fail("ConfigurationAlreadyExistsException expected"); } catch (ConfigurationAlreadyExistsException e) {} config.removeDevice("Test-Device-1", null); }
public static Long toLong(Object o) { if (o == null) { return null; } else if (o instanceof Number) { return ((Number) o).longValue(); } else if (o instanceof String) { try { return Long.parseLong((String) o); } catch (NumberFormatException ignored) { return null; } } else { return null; } }
@Test public void testToLong() { Assert.assertNull(Util.toLong(null)); Assert.assertEquals(Util.toLong(100).longValue(), 100L); Assert.assertEquals(Util.toLong(100.0).longValue(), 100L); Assert.assertEquals(Util.toLong("100").longValue(), 100L); Assert.assertNull(Util.toLong("ABC")); Assert.assertNull(Util.toLong(new byte[] {'a', 'b', 'c'})); }
public List<String> getTransformersNames() { return transformers.stream().map(t -> t.getTransformer().getClass().getName()).collect(Collectors.toList()); }
@Test public void testGetTransformersNames() { Transformer t1 = (jsc, sparkSession, dataset, properties) -> dataset.withColumnRenamed("foo", "bar"); Transformer t2 = (jsc, sparkSession, dataset, properties) -> dataset.withColumn("bar", dataset.col("bar").cast(IntegerType)); ChainedTransformer transformer = new ChainedTransformer(Arrays.asList(t1, t2)); List<String> classNames = transformer.getTransformersNames(); assertEquals(t1.getClass().getName(), classNames.get(0)); assertEquals(t2.getClass().getName(), classNames.get(1)); }
public CoordinatorResult<OffsetCommitResponseData, CoordinatorRecord> commitOffset( RequestContext context, OffsetCommitRequestData request ) throws ApiException { Group group = validateOffsetCommit(context, request); // In the old consumer group protocol, the offset commits maintain the session if // the group is in Stable or PreparingRebalance state. if (group.type() == Group.GroupType.CLASSIC) { ClassicGroup classicGroup = (ClassicGroup) group; if (classicGroup.isInState(ClassicGroupState.STABLE) || classicGroup.isInState(ClassicGroupState.PREPARING_REBALANCE)) { groupMetadataManager.rescheduleClassicGroupMemberHeartbeat( classicGroup, classicGroup.member(request.memberId()) ); } } final OffsetCommitResponseData response = new OffsetCommitResponseData(); final List<CoordinatorRecord> records = new ArrayList<>(); final long currentTimeMs = time.milliseconds(); final OptionalLong expireTimestampMs = expireTimestampMs(request.retentionTimeMs(), currentTimeMs); request.topics().forEach(topic -> { final OffsetCommitResponseTopic topicResponse = new OffsetCommitResponseTopic().setName(topic.name()); response.topics().add(topicResponse); topic.partitions().forEach(partition -> { if (isMetadataInvalid(partition.committedMetadata())) { topicResponse.partitions().add(new OffsetCommitResponsePartition() .setPartitionIndex(partition.partitionIndex()) .setErrorCode(Errors.OFFSET_METADATA_TOO_LARGE.code())); } else { log.debug("[GroupId {}] Committing offsets {} for partition {}-{} from member {} with leader epoch {}.", request.groupId(), partition.committedOffset(), topic.name(), partition.partitionIndex(), request.memberId(), partition.committedLeaderEpoch()); topicResponse.partitions().add(new OffsetCommitResponsePartition() .setPartitionIndex(partition.partitionIndex()) .setErrorCode(Errors.NONE.code())); final OffsetAndMetadata offsetAndMetadata = OffsetAndMetadata.fromRequest( partition, currentTimeMs, expireTimestampMs ); records.add(GroupCoordinatorRecordHelpers.newOffsetCommitRecord( request.groupId(), topic.name(), partition.partitionIndex(), offsetAndMetadata, metadataImage.features().metadataVersion() )); } }); }); if (!records.isEmpty()) { metrics.record(GroupCoordinatorMetrics.OFFSET_COMMITS_SENSOR_NAME, records.size()); } return new CoordinatorResult<>(records, response); }
@Test public void testGenericGroupOffsetCommitWithUnknownMemberId() { OffsetMetadataManagerTestContext context = new OffsetMetadataManagerTestContext.Builder().build(); // Create an empty group. context.groupMetadataManager.getOrMaybeCreateClassicGroup( "foo", true ); // Verify that the request is rejected with the correct exception. assertThrows(UnknownMemberIdException.class, () -> context.commitOffset( new OffsetCommitRequestData() .setGroupId("foo") .setMemberId("member") .setGenerationIdOrMemberEpoch(10) .setTopics(Collections.singletonList( new OffsetCommitRequestData.OffsetCommitRequestTopic() .setName("bar") .setPartitions(Collections.singletonList( new OffsetCommitRequestData.OffsetCommitRequestPartition() .setPartitionIndex(0) .setCommittedOffset(100L) )) )) ) ); }
@Override public long read() { return gaugeSource.read(); }
@Test public void whenCacheDynamicMetricSourceReplacedWithConcreteValue() { SomeObject someObject = new SomeObject(); someObject.longField = 42; metricsRegistry.registerDynamicMetricsProvider(someObject); LongGaugeImpl longGauge = metricsRegistry.newLongGauge("foo.longField"); // needed to collect dynamic metrics and update the gauge created from them metricsRegistry.collect(mock(MetricsCollector.class)); assertEquals(42, longGauge.read()); metricsRegistry.deregisterDynamicMetricsProvider(someObject); metricsRegistry.registerDynamicMetricsProvider((descriptor, context) -> context.collect(descriptor.withPrefix("foo"), "longField", INFO, COUNT, 142)); // needed to collect dynamic metrics and update the gauge created from them metricsRegistry.collect(mock(MetricsCollector.class)); assertEquals(142, longGauge.read()); }
@Override public NSImage applicationIcon(final Application app, final Integer size) { NSImage icon = this.load(app.getIdentifier(), size); if(null == icon) { final String path = workspace.absolutePathForAppBundleWithIdentifier(app.getIdentifier()); // Null if the bundle cannot be found if(StringUtils.isNotBlank(path)) { return this.cache(app.getIdentifier(), this.convert(app.getIdentifier(), workspace.iconForFile(path), size), size); } } if(null == icon) { return this.iconNamed("notfound.tiff", size); } return icon; }
@Test public void testCacheApplicationIcon() { final NSImageIconCache cache = new NSImageIconCache(); final NSImage icon32 = cache.applicationIcon(new Application("com.apple.TextEdit"), 32); assertNotNull(icon32); assertEquals(32, icon32.size().width.intValue()); assertEquals(32, icon32.size().height.intValue()); final NSImage icon16 = cache.applicationIcon(new Application("com.apple.TextEdit"), 16); assertNotNull(icon16); assertEquals(16, icon16.size().width.intValue()); assertEquals(16, icon16.size().height.intValue()); final NSImage icon64 = cache.applicationIcon(new Application("com.apple.TextEdit"), 64); assertNotNull(icon64); assertEquals(64, icon64.size().width.intValue()); assertEquals(64, icon64.size().height.intValue()); }
public ModelApiResponse message(String message) { this.message = message; return this; }
@Test public void messageTest() { // TODO: test message }
T getFunction(final List<SqlArgument> arguments) { // first try to get the candidates without any implicit casting Optional<T> candidate = findMatchingCandidate(arguments, false); if (candidate.isPresent()) { return candidate.get(); } else if (!supportsImplicitCasts) { throw createNoMatchingFunctionException(arguments); } // if none were found (candidate isn't present) try again with implicit casting candidate = findMatchingCandidate(arguments, true); if (candidate.isPresent()) { return candidate.get(); } throw createNoMatchingFunctionException(arguments); }
@Test public void shouldFindNonVarargWithList() { // Given: givenFunctions( function(EXPECTED, -1, STRING_VARARGS) ); // When: final KsqlScalarFunction fun = udfIndex .getFunction(ImmutableList.of(SqlArgument.of(SqlArray.of(SqlTypes.STRING)))); // Then: assertThat(fun.name(), equalTo(EXPECTED)); }
@Override public int getOrder() { return PluginEnum.WEB_CLIENT.getCode(); }
@Test public void testGetOrder() { assertEquals(PluginEnum.WEB_CLIENT.getCode(), webClientPlugin.getOrder()); }
public void renameDirectory(Path source, Path target, Runnable runWhenPathNotExist) { if (pathExists(target)) { throw new StarRocksConnectorException("Unable to rename from %s to %s. msg: target directory already exists", source, target); } if (!pathExists(target.getParent())) { createDirectory(target.getParent(), conf); } runWhenPathNotExist.run(); try { if (!FileSystem.get(source.toUri(), conf).rename(source, target)) { throw new StarRocksConnectorException("Failed to rename %s to %s: rename returned false", source, target); } } catch (IOException e) { throw new StarRocksConnectorException("Failed to rename %s to %s, msg: %s", source, target, e.getMessage()); } }
@Test public void testRenameDirFailed() { HiveRemoteFileIO hiveRemoteFileIO = new HiveRemoteFileIO(new Configuration()); FileSystem fs = new MockedRemoteFileSystem(HDFS_HIVE_TABLE); hiveRemoteFileIO.setFileSystem(fs); FeConstants.runningUnitTest = true; ExecutorService executorToRefresh = Executors.newSingleThreadExecutor(); ExecutorService executorToLoad = Executors.newSingleThreadExecutor(); CachingRemoteFileIO cachingFileIO = new CachingRemoteFileIO(hiveRemoteFileIO, executorToRefresh, 10, 10, 10); RemoteFileOperations ops = new RemoteFileOperations(cachingFileIO, executorToLoad, executorToLoad, false, true, new Configuration()); Path writePath = new Path("hdfs://hadoop01:9000/tmp/starrocks/queryid"); Path targetPath = new Path("hdfs://hadoop01:9000/user/hive/warehouse/test.db/t1"); FileSystem mockedFs = new MockedRemoteFileSystem(HDFS_HIVE_TABLE) { @Override public boolean exists(Path path) { if (path.equals(targetPath.getParent())) { return true; } else { return false; } } }; new MockUp<FileSystem>() { @Mock public FileSystem get(URI uri, Configuration conf) throws IOException { return mockedFs; } }; ExceptionChecker.expectThrowsWithMsg( StarRocksConnectorException.class, "Failed to rename", () -> ops.renameDirectory(writePath, targetPath, () -> { })); }
@Override protected void handleMethod(final Object bean, final Class<?> clazz, @Nullable final ShenyuSpringWebSocketClient beanShenyuClient, final Method method, final String superPath) { ShenyuSpringWebSocketClient methodShenyuClient = AnnotatedElementUtils.findMergedAnnotation(method, getAnnotationType()); methodShenyuClient = Objects.isNull(methodShenyuClient) ? beanShenyuClient : methodShenyuClient; if (Objects.nonNull(methodShenyuClient)) { final MetaDataRegisterDTO metaData = buildMetaDataDTO(bean, methodShenyuClient, buildApiPath(method, superPath, methodShenyuClient), clazz, method); getPublisher().publishEvent(metaData); getMetaDataMap().put(method, metaData); } }
@Test public void testHandleMethod() throws NoSuchMethodException { Method method = mockClass.getClass().getMethod("mockMethod"); eventListener.handleMethod(mockClass, MockClass.class, annotation, method, SUPER_PATH); }
@Override public Schema getTargetSchema() { String registryUrl = getStringWithAltKeys(config, HoodieSchemaProviderConfig.SRC_SCHEMA_REGISTRY_URL); String targetRegistryUrl = getStringWithAltKeys(config, HoodieSchemaProviderConfig.TARGET_SCHEMA_REGISTRY_URL, registryUrl); try { return parseSchemaFromRegistry(targetRegistryUrl); } catch (Exception e) { throw new HoodieSchemaFetchException(String.format( "Error reading target schema from registry. Please check %s is configured correctly. If that is not configured then check %s. Truncated URL: %s", Config.SRC_SCHEMA_REGISTRY_URL_PROP, Config.TARGET_SCHEMA_REGISTRY_URL_PROP, StringUtils.truncate(targetRegistryUrl, 10, 10)), e); } }
@Test public void testGetTargetSchemaWithoutConverter() throws Exception { TypedProperties props = getProps(); props.put("hoodie.deltastreamer.schemaprovider.registry.url", "http://localhost/subjects/test/versions/latest"); SchemaRegistryProvider underTest = getUnderTest(props, -1, false); Schema actual = underTest.getTargetSchema(); assertNotNull(actual); assertEquals(getExpectedSchema(), actual); verify(mockRestService, never()).setHttpHeaders(any()); }
@Override public Response updateApplicationTimeout(AppTimeoutInfo appTimeout, HttpServletRequest hsr, String appId) throws AuthorizationException, YarnException, InterruptedException, IOException { if (appTimeout == null) { routerMetrics.incrUpdateApplicationTimeoutsRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), UPDATE_APPLICATIONTIMEOUTS, UNKNOWN, TARGET_WEB_SERVICE, "Parameter error, the appTimeout is null."); throw new IllegalArgumentException("Parameter error, the appTimeout is null."); } try { long startTime = Time.now(); DefaultRequestInterceptorREST interceptor = getOrCreateInterceptorByAppId(appId); Response response = interceptor.updateApplicationTimeout(appTimeout, hsr, appId); if (response != null) { long stopTime = clock.getTime(); RouterAuditLogger.logSuccess(getUser().getShortUserName(), UPDATE_APPLICATIONTIMEOUTS, TARGET_WEB_SERVICE); routerMetrics.succeededUpdateAppTimeoutsRetrieved((stopTime - startTime)); return response; } } catch (IllegalArgumentException e) { routerMetrics.incrUpdateApplicationTimeoutsRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), UPDATE_APPLICATIONTIMEOUTS, UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage()); RouterServerUtil.logAndThrowRunTimeException(e, "Unable to get the updateApplicationTimeout appId: %s.", appId); } catch (YarnException e) { routerMetrics.incrUpdateApplicationTimeoutsRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), UPDATE_APPLICATIONTIMEOUTS, UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage()); RouterServerUtil.logAndThrowRunTimeException("updateApplicationTimeout error.", e); } routerMetrics.incrUpdateApplicationTimeoutsRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), UPDATE_APPLICATIONTIMEOUTS, UNKNOWN, TARGET_WEB_SERVICE, "updateApplicationTimeout Failed."); throw new RuntimeException("updateApplicationTimeout Failed."); }
@Test public void testUpdateApplicationTimeout() throws IOException, InterruptedException, YarnException { // Generate ApplicationId information ApplicationId appId = ApplicationId.newInstance(Time.now(), 1); ApplicationSubmissionContextInfo context = new ApplicationSubmissionContextInfo(); context.setApplicationId(appId.toString()); // Generate ApplicationAttemptId information Assert.assertNotNull(interceptor.submitApplication(context, null)); long newLifetime = 10L; // update 10L seconds more to timeout String timeout = Times.formatISO8601(Time.now() + newLifetime * 1000); AppTimeoutInfo paramAppTimeOut = new AppTimeoutInfo(); paramAppTimeOut.setExpiryTime(timeout); // RemainingTime = Math.max((timeoutInMillis - System.currentTimeMillis()) / 1000, 0)) paramAppTimeOut.setRemainingTime(newLifetime); paramAppTimeOut.setTimeoutType(ApplicationTimeoutType.LIFETIME); Response response = interceptor.updateApplicationTimeout(paramAppTimeOut, null, appId.toString()); Assert.assertNotNull(response); AppTimeoutInfo entity = (AppTimeoutInfo) response.getEntity(); Assert.assertNotNull(entity); Assert.assertEquals(paramAppTimeOut.getExpireTime(), entity.getExpireTime()); Assert.assertEquals(paramAppTimeOut.getTimeoutType(), entity.getTimeoutType()); Assert.assertEquals(paramAppTimeOut.getRemainingTimeInSec(), entity.getRemainingTimeInSec()); }
@Override public ApiResult<TopicPartition, Void> handleResponse( Node broker, Set<TopicPartition> topicPartitions, AbstractResponse abstractResponse ) { validateTopicPartitions(topicPartitions); WriteTxnMarkersResponse response = (WriteTxnMarkersResponse) abstractResponse; List<WriteTxnMarkersResponseData.WritableTxnMarkerResult> markerResponses = response.data().markers(); if (markerResponses.size() != 1 || markerResponses.get(0).producerId() != abortSpec.producerId()) { return ApiResult.failed(abortSpec.topicPartition(), new KafkaException("WriteTxnMarkers response " + "included unexpected marker entries: " + markerResponses + "(expected to find exactly one " + "entry with producerId " + abortSpec.producerId() + ")")); } WriteTxnMarkersResponseData.WritableTxnMarkerResult markerResponse = markerResponses.get(0); List<WriteTxnMarkersResponseData.WritableTxnMarkerTopicResult> topicResponses = markerResponse.topics(); if (topicResponses.size() != 1 || !topicResponses.get(0).name().equals(abortSpec.topicPartition().topic())) { return ApiResult.failed(abortSpec.topicPartition(), new KafkaException("WriteTxnMarkers response " + "included unexpected topic entries: " + markerResponses + "(expected to find exactly one " + "entry with topic partition " + abortSpec.topicPartition() + ")")); } WriteTxnMarkersResponseData.WritableTxnMarkerTopicResult topicResponse = topicResponses.get(0); List<WriteTxnMarkersResponseData.WritableTxnMarkerPartitionResult> partitionResponses = topicResponse.partitions(); if (partitionResponses.size() != 1 || partitionResponses.get(0).partitionIndex() != abortSpec.topicPartition().partition()) { return ApiResult.failed(abortSpec.topicPartition(), new KafkaException("WriteTxnMarkers response " + "included unexpected partition entries for topic " + abortSpec.topicPartition().topic() + ": " + markerResponses + "(expected to find exactly one entry with partition " + abortSpec.topicPartition().partition() + ")")); } WriteTxnMarkersResponseData.WritableTxnMarkerPartitionResult partitionResponse = partitionResponses.get(0); Errors error = Errors.forCode(partitionResponse.errorCode()); if (error != Errors.NONE) { return handleError(error); } else { return ApiResult.completed(abortSpec.topicPartition(), null); } }
@Test public void testInvalidResponse() { AbortTransactionHandler handler = new AbortTransactionHandler(abortSpec, logContext); WriteTxnMarkersResponseData response = new WriteTxnMarkersResponseData(); assertFailed(KafkaException.class, topicPartition, handler.handleResponse(node, singleton(topicPartition), new WriteTxnMarkersResponse(response))); WriteTxnMarkersResponseData.WritableTxnMarkerResult markerResponse = new WriteTxnMarkersResponseData.WritableTxnMarkerResult(); response.markers().add(markerResponse); assertFailed(KafkaException.class, topicPartition, handler.handleResponse(node, singleton(topicPartition), new WriteTxnMarkersResponse(response))); markerResponse.setProducerId(abortSpec.producerId()); assertFailed(KafkaException.class, topicPartition, handler.handleResponse(node, singleton(topicPartition), new WriteTxnMarkersResponse(response))); WriteTxnMarkersResponseData.WritableTxnMarkerTopicResult topicResponse = new WriteTxnMarkersResponseData.WritableTxnMarkerTopicResult(); markerResponse.topics().add(topicResponse); assertFailed(KafkaException.class, topicPartition, handler.handleResponse(node, singleton(topicPartition), new WriteTxnMarkersResponse(response))); topicResponse.setName(abortSpec.topicPartition().topic()); assertFailed(KafkaException.class, topicPartition, handler.handleResponse(node, singleton(topicPartition), new WriteTxnMarkersResponse(response))); WriteTxnMarkersResponseData.WritableTxnMarkerPartitionResult partitionResponse = new WriteTxnMarkersResponseData.WritableTxnMarkerPartitionResult(); topicResponse.partitions().add(partitionResponse); assertFailed(KafkaException.class, topicPartition, handler.handleResponse(node, singleton(topicPartition), new WriteTxnMarkersResponse(response))); partitionResponse.setPartitionIndex(abortSpec.topicPartition().partition()); topicResponse.setName(abortSpec.topicPartition().topic() + "random"); assertFailed(KafkaException.class, topicPartition, handler.handleResponse(node, singleton(topicPartition), new WriteTxnMarkersResponse(response))); topicResponse.setName(abortSpec.topicPartition().topic()); markerResponse.setProducerId(abortSpec.producerId() + 1); assertFailed(KafkaException.class, topicPartition, handler.handleResponse(node, singleton(topicPartition), new WriteTxnMarkersResponse(response))); }
static <T> T getWildcardMappedObject(final Map<String, T> mapping, final String query) { T value = mapping.get(query); if (value == null) { for (String key : mapping.keySet()) { // Turn the search key into a regex, using all characters but the * as a literal. String regex = Arrays.stream(key.split("\\*")) // split in parts that do not have a wildcard in them .map(Pattern::quote) // each part should be used as a literal (not as a regex or partial regex) .collect(Collectors.joining(".*")); // join all literal parts with a regex representation on the wildcard. if (key.endsWith("*")) { // the 'split' will have removed any trailing wildcard characters. Correct for that. regex += ".*"; } if (query.matches(regex)) { value = mapping.get(key); break; } } } return value; }
@Test public void testExactConcat() throws Exception { // Setup test fixture. final Map<String, Object> haystack = Map.of("myplugin/foo", new Object()); // Execute system under test. final Object result = PluginServlet.getWildcardMappedObject(haystack, "myplugin/foobar"); // Verify results. assertNull(result); }
public void validate(Map<String, NewDocumentType> documentDefinitions) { List<String> conflictingNames = documentDefinitions.keySet().stream() .filter(this::isReservedName) .toList(); if (!conflictingNames.isEmpty()) { throw new IllegalArgumentException(makeReservedNameMessage(conflictingNames)); } }
@Test void exception_is_not_thrown_on_unreserved_name() { ReservedDocumentTypeNameValidator validator = new ReservedDocumentTypeNameValidator(); validator.validate(asDocTypeMapping(List.of("foo"))); }
public static void checkAtLeastOneChar(final Properties props, final String propKey, final MaskAlgorithm<?, ?> algorithm) { checkRequired(props, propKey, algorithm); ShardingSpherePreconditions.checkNotEmpty(props.getProperty(propKey), () -> new AlgorithmInitializationException(algorithm, "%s's length must be at least one", propKey)); }
@Test void assertCheckAtLeastOneCharFailedWithoutKey() { Properties props = new Properties(); assertThrows(AlgorithmInitializationException.class, () -> MaskAlgorithmPropertiesChecker.checkAtLeastOneChar(props, "key", mock(MaskAlgorithm.class))); }
@Override public KTable<Windowed<K>, V> aggregate(final Initializer<V> initializer, final Merger<? super K, V> sessionMerger) { return aggregate(initializer, sessionMerger, Materialized.with(null, null)); }
@Test public void shouldNotHaveNullSessionMergerOnAggregate() { assertThrows(NullPointerException.class, () -> windowedCogroupedStream.aggregate(MockInitializer.STRING_INIT, null)); }
public static boolean seemDuplicates(FeedItem item1, FeedItem item2) { if (sameAndNotEmpty(item1.getItemIdentifier(), item2.getItemIdentifier())) { return true; } FeedMedia media1 = item1.getMedia(); FeedMedia media2 = item2.getMedia(); if (media1 == null || media2 == null) { return false; } if (sameAndNotEmpty(media1.getStreamUrl(), media2.getStreamUrl())) { return true; } return titlesLookSimilar(item1, item2) && datesLookSimilar(item1, item2) && durationsLookSimilar(media1, media2) && mimeTypeLooksSimilar(media1, media2); }
@Test public void testDuplicateDownloadUrl() { assertTrue(FeedItemDuplicateGuesser.seemDuplicates( item("id1", "Title1", "example.com/episode", 0, 5 * MINUTES, "audio/*"), item("id2", "Title2", "example.com/episode", 0, 5 * MINUTES, "audio/*"))); assertFalse(FeedItemDuplicateGuesser.seemDuplicates( item("id1", "Title1", "example.com/episode1", 0, 5 * MINUTES, "audio/*"), item("id2", "Title2", "example.com/episode2", 0, 5 * MINUTES, "audio/*"))); }
@Override public String generateSqlType(Dialect dialect) { return switch (dialect.getId()) { case MsSql.ID -> "VARBINARY(MAX)"; case Oracle.ID, H2.ID -> "BLOB"; case PostgreSql.ID -> "BYTEA"; default -> throw new IllegalArgumentException("Unsupported dialect id " + dialect.getId()); }; }
@Test public void generateSqlType_for_PostgreSql() { assertThat(underTest.generateSqlType(new PostgreSql())).isEqualTo("BYTEA"); }
public void log(final DriverEventCode code, final DirectBuffer buffer, final int offset, final int length) { if (DriverComponentLogger.ENABLED_EVENTS.contains(code)) { final int captureLength = captureLength(length); final int encodedLength = encodedLength(captureLength); final ManyToOneRingBuffer ringBuffer = this.ringBuffer; final int index = ringBuffer.tryClaim(toEventCodeId(code), encodedLength); if (index > 0) { try { encode((UnsafeBuffer)ringBuffer.buffer(), index, captureLength, length, buffer, offset); } finally { ringBuffer.commit(index); } } } }
@Test void log() { final DriverEventCode eventCode = CMD_IN_TERMINATE_DRIVER; DriverComponentLogger.ENABLED_EVENTS.add(eventCode); final int recordOffset = align(13, ALIGNMENT); logBuffer.putLong(CAPACITY + TAIL_POSITION_OFFSET, recordOffset); final int length = 100; final int srcOffset = 20; buffer.setMemory(srcOffset, length, (byte)5); logger.log(eventCode, buffer, srcOffset, length); verifyLogHeader(logBuffer, recordOffset, toEventCodeId(eventCode), length, length); for (int i = 0; i < length; i++) { assertEquals(5, logBuffer.getByte(encodedMsgOffset(recordOffset + LOG_HEADER_LENGTH + i))); } }
public static ShardingRouteEngine newInstance(final ShardingRule shardingRule, final ShardingSphereDatabase database, final QueryContext queryContext, final ShardingConditions shardingConditions, final ConfigurationProperties props, final ConnectionContext connectionContext) { SQLStatementContext sqlStatementContext = queryContext.getSqlStatementContext(); SQLStatement sqlStatement = sqlStatementContext.getSqlStatement(); if (sqlStatement instanceof TCLStatement) { return new ShardingDatabaseBroadcastRoutingEngine(); } if (sqlStatement instanceof DDLStatement) { if (sqlStatementContext instanceof CursorAvailable) { return getCursorRouteEngine(shardingRule, database, sqlStatementContext, queryContext.getHintValueContext(), shardingConditions, props); } return getDDLRoutingEngine(shardingRule, database, sqlStatementContext); } if (sqlStatement instanceof DALStatement) { return getDALRoutingEngine(shardingRule, database, sqlStatementContext, connectionContext); } if (sqlStatement instanceof DCLStatement) { return getDCLRoutingEngine(shardingRule, database, sqlStatementContext); } return getDQLRoutingEngine(shardingRule, database, sqlStatementContext, queryContext.getHintValueContext(), shardingConditions, props, connectionContext); }
@Test void assertNewInstanceForOptimizeTableWithSingleTable() { MySQLOptimizeTableStatement optimizeTableStatement = mock(MySQLOptimizeTableStatement.class); when(sqlStatementContext.getSqlStatement()).thenReturn(optimizeTableStatement); tableNames.add("table_1"); when(shardingRule.getShardingRuleTableNames(tableNames)).thenReturn(Collections.emptyList()); QueryContext queryContext = new QueryContext(sqlStatementContext, "", Collections.emptyList(), new HintValueContext(), mockConnectionContext(), mock(ShardingSphereMetaData.class)); ShardingRouteEngine actual = ShardingRouteEngineFactory.newInstance(shardingRule, database, queryContext, shardingConditions, props, new ConnectionContext(Collections::emptySet)); assertThat(actual, instanceOf(ShardingIgnoreRoutingEngine.class)); }
static String readFileContents(String fileName) { try { File file = new File(fileName); return Files.readString(file.toPath(), StandardCharsets.UTF_8); } catch (IOException e) { throw new RuntimeException("Could not get " + fileName, e); } }
@Test public void readFileContents() throws IOException { // given String expectedContents = "Hello, world!\nThis is a test with Unicode ✓."; String testFile = createTestFile(expectedContents); // when String actualContents = AwsDiscoveryStrategyFactory.readFileContents(testFile); // then assertEquals(expectedContents, actualContents); }
public boolean canProcessTask(final Task task, final long now) { final String topologyName = task.id().topologyName(); if (!hasNamedTopologies) { // TODO implement error handling/backoff for non-named topologies (needs KIP) return !pausedTopologies.contains(UNNAMED_TOPOLOGY); } else { if (pausedTopologies.contains(topologyName)) { return false; } else { final NamedTopologyMetadata metadata = topologyNameToErrorMetadata.get(topologyName); return metadata == null || (metadata.canProcess() && metadata.canProcessTask(task, now)); } } }
@Test public void testCanProcessWithoutNamedTopologies() { final Set<String> topologies = Collections.singleton(UNNAMED_TOPOLOGY); final Set<String> pausedTopologies = new HashSet<>(); final TaskExecutionMetadata metadata = new TaskExecutionMetadata(topologies, pausedTopologies, ProcessingMode.AT_LEAST_ONCE); final Task mockTask = createMockTask(UNNAMED_TOPOLOGY); assertTrue(metadata.canProcessTask(mockTask, TIME_ZERO)); // This pauses an UNNAMED_TOPOLOGY / a KafkaStreams instance without named/modular // topologies. pausedTopologies.add(UNNAMED_TOPOLOGY); assertFalse(metadata.canProcessTask(mockTask, TIME_ZERO)); }
@Override public Statement createStatement() throws SQLException { validateState(); return new PinotStatement(this); }
@Test public void createStatementTest() throws Exception { PinotConnection pinotConnection = new PinotConnection("dummy", _dummyPinotClientTransport, "dummy", _dummyPinotControllerTransport); Statement statement = pinotConnection.createStatement(); Assert.assertNotNull(statement); }
public Long getHttpCacheControlDuration(final String pHttpCacheControlHeader) { if (pHttpCacheControlHeader != null && pHttpCacheControlHeader.length() > 0) { try { final String[] parts = pHttpCacheControlHeader.split(", "); final String maxAge = "max-age="; for (final String part : parts) { final int pos = part.indexOf(maxAge); if (pos == 0) { final String durationString = part.substring(maxAge.length()); return Long.valueOf(durationString); } } } catch (final Exception ex) { if (Configuration.getInstance().isDebugMapTileDownloader()) Log.d(IMapView.LOGTAG, "Unable to parse cache control tag for tile, server returned " + pHttpCacheControlHeader, ex); } } return null; }
@Test public void testGetHttpCacheControlDuration() { final TileSourcePolicy tileSourcePolicy = new TileSourcePolicy(); for (final String string : mCacheControlStringOK) { Assert.assertEquals(mCacheControlValue, (long) tileSourcePolicy.getHttpCacheControlDuration(string)); } for (final String string : mCacheControlStringKO) { Assert.assertNull(tileSourcePolicy.getHttpCacheControlDuration(string)); } }
@Override public Instant next(Instant createdAtInstant, Instant currentInstant, ZoneId zoneId) { Duration durationUntilNow = Duration.between(createdAtInstant, currentInstant); long amountOfDurationsUntilNow = durationUntilNow.toNanos() / duration.toNanos(); return createdAtInstant.plusNanos(duration.toNanos() * (amountOfDurationsUntilNow + 1)); }
@Test void intervalsAreScheduledIndependentlyOfZoneId() { int hour = 8; Instant now = Instant.now(); Instant actualNextInstant1 = new Interval(Duration.ofHours(hour)).next(now, ZoneId.of("+02:00")); Instant actualNextInstant2 = new Interval(Duration.ofHours(hour)).next(now, UTC); assertThat(actualNextInstant1).isEqualTo(actualNextInstant2); }
public GoConfigHolder loadConfigHolder(final String content, Callback callback) throws Exception { CruiseConfig configForEdit; CruiseConfig config; LOGGER.debug("[Config Save] Loading config holder"); configForEdit = deserializeConfig(content); if (callback != null) callback.call(configForEdit); config = preprocessAndValidate(configForEdit); return new GoConfigHolder(config, configForEdit); }
@Test void shouldNotAllowEmptyPipelineTemplates() { String content = configWithTemplates( """ <templates> <pipeline name='erbshe'> </pipeline> </templates>"""); assertThatThrownBy(() -> xmlLoader.loadConfigHolder(content)) .as("should NotAllowEmptyPipelineTemplates") .hasMessageContaining("The content of element 'pipeline' is not complete. One of '{authorization, stage}' is expected"); }
public final void isGreaterThan(int other) { isGreaterThan((long) other); }
@Test public void isGreaterThan_int_strictly() { expectFailureWhenTestingThat(2L).isGreaterThan(3); }
public static boolean isUnclosedQuote(final String line) { // CHECKSTYLE_RULES.ON: CyclomaticComplexity int quoteStart = -1; for (int i = 0; i < line.length(); ++i) { if (quoteStart < 0 && isQuoteChar(line, i)) { quoteStart = i; } else if (quoteStart >= 0 && isTwoQuoteStart(line, i) && !isEscaped(line, i)) { // Together, two quotes are effectively an escaped quote and don't act as a quote character. // Skip the next quote char, since it's coupled with the first. i++; } else if (quoteStart >= 0 && isQuoteChar(line, i) && !isEscaped(line, i)) { quoteStart = -1; } } final int commentInd = line.indexOf(COMMENT); if (commentInd < 0) { return quoteStart >= 0; } else if (quoteStart < 0) { return false; } else { return commentInd > quoteStart; } }
@Test public void shouldNotFindUnclosedQuote_commentCharsInside() { // Given: final String line = "some line 'this is in a quote -- not a comment'"; // Then: assertThat(UnclosedQuoteChecker.isUnclosedQuote(line), is(false)); }
public ReadwriteSplittingDataSourceGroupRule getSingleDataSourceGroupRule() { return dataSourceRuleGroups.values().iterator().next(); }
@Test void assertUpdateRuleStatusWithEnable() { ReadwriteSplittingRule readwriteSplittingRule = createReadwriteSplittingRule(); readwriteSplittingRule.getAttributes().getAttribute(StaticDataSourceRuleAttribute.class).updateStatus( new QualifiedDataSource("readwrite_splitting_db.readwrite.read_ds_0"), DataSourceState.DISABLED); assertThat(readwriteSplittingRule.getSingleDataSourceGroupRule().getDisabledDataSourceNames(), is(Collections.singleton("read_ds_0"))); readwriteSplittingRule.getAttributes().getAttribute(StaticDataSourceRuleAttribute.class).updateStatus( new QualifiedDataSource("readwrite_splitting_db.readwrite.read_ds_0"), DataSourceState.ENABLED); assertThat(readwriteSplittingRule.getSingleDataSourceGroupRule().getDisabledDataSourceNames(), is(Collections.emptySet())); }
@Override public Map<String, String> generationCodes(Long tableId) { // 校验是否已经存在 CodegenTableDO table = codegenTableMapper.selectById(tableId); if (table == null) { throw exception(CODEGEN_TABLE_NOT_EXISTS); } List<CodegenColumnDO> columns = codegenColumnMapper.selectListByTableId(tableId); if (CollUtil.isEmpty(columns)) { throw exception(CODEGEN_COLUMN_NOT_EXISTS); } // 如果是主子表,则加载对应的子表信息 List<CodegenTableDO> subTables = null; List<List<CodegenColumnDO>> subColumnsList = null; if (CodegenTemplateTypeEnum.isMaster(table.getTemplateType())) { // 校验子表存在 subTables = codegenTableMapper.selectListByTemplateTypeAndMasterTableId( CodegenTemplateTypeEnum.SUB.getType(), tableId); if (CollUtil.isEmpty(subTables)) { throw exception(CODEGEN_MASTER_GENERATION_FAIL_NO_SUB_TABLE); } // 校验子表的关联字段存在 subColumnsList = new ArrayList<>(); for (CodegenTableDO subTable : subTables) { List<CodegenColumnDO> subColumns = codegenColumnMapper.selectListByTableId(subTable.getId()); if (CollUtil.findOne(subColumns, column -> column.getId().equals(subTable.getSubJoinColumnId())) == null) { throw exception(CODEGEN_SUB_COLUMN_NOT_EXISTS, subTable.getId()); } subColumnsList.add(subColumns); } } // 执行生成 return codegenEngine.execute(table, columns, subTables, subColumnsList); }
@Test public void testGenerationCodes_columnNotExists() { // mock 数据(CodegenTableDO) CodegenTableDO table = randomPojo(CodegenTableDO.class, o -> o.setScene(CodegenSceneEnum.ADMIN.getScene()) .setTemplateType(CodegenTemplateTypeEnum.MASTER_NORMAL.getType())); codegenTableMapper.insert(table); // 准备参数 Long tableId = table.getId(); // 调用,并断言 assertServiceException(() -> codegenService.generationCodes(tableId), CODEGEN_COLUMN_NOT_EXISTS); }
boolean isPreviousDurationCloserToGoal(long previousDuration, long currentDuration) { return Math.abs(GOAL_MILLISECONDS_PER_PASSWORD - previousDuration) < Math.abs(GOAL_MILLISECONDS_PER_PASSWORD - currentDuration); }
@Test void findCloserToShouldReturnGoalIfNumber2IsEqualGoal() { // given int number1 = 999; int number2 = 1000; // when boolean actual = bcCryptWorkFactorService.isPreviousDurationCloserToGoal(number1, number2); // then assertThat(actual).isFalse(); }
@Override public Response requestReply(Request request, Connection connection) { if (request instanceof NotifySubscriberRequest) { NotifySubscriberRequest notifyRequest = (NotifySubscriberRequest) request; serviceInfoHolder.processServiceInfo(notifyRequest.getServiceInfo()); return new NotifySubscriberResponse(); } return null; }
@Test void testRequestReplyOtherType() { ServiceInfoHolder holder = mock(ServiceInfoHolder.class); NamingPushRequestHandler handler = new NamingPushRequestHandler(holder); assertNull(handler.requestReply(new HealthCheckRequest(), new TestConnection(new RpcClient.ServerInfo()))); }
@Override public Map<ExecutionAttemptID, ExecutionSlotAssignment> allocateSlotsFor( List<ExecutionAttemptID> executionAttemptIds) { final Map<ExecutionVertexID, ExecutionAttemptID> vertexIdToExecutionId = new HashMap<>(); executionAttemptIds.forEach( executionId -> vertexIdToExecutionId.put(executionId.getExecutionVertexId(), executionId)); checkState( vertexIdToExecutionId.size() == executionAttemptIds.size(), "SlotSharingExecutionSlotAllocator does not support one execution vertex to have multiple concurrent executions"); final List<ExecutionVertexID> vertexIds = executionAttemptIds.stream() .map(ExecutionAttemptID::getExecutionVertexId) .collect(Collectors.toList()); return allocateSlotsForVertices(vertexIds).stream() .collect( Collectors.toMap( vertexAssignment -> vertexIdToExecutionId.get( vertexAssignment.getExecutionVertexId()), vertexAssignment -> new ExecutionSlotAssignment( vertexIdToExecutionId.get( vertexAssignment.getExecutionVertexId()), vertexAssignment.getLogicalSlotFuture()))); }
@Test void testFailedPhysicalSlotRequestFailsLogicalSlotFuturesAndRemovesSharedSlot() { AllocationContext context = AllocationContext.newBuilder() .addGroup(EV1) .withPhysicalSlotProvider( TestingPhysicalSlotProvider .createWithoutImmediatePhysicalSlotCreation()) .build(); CompletableFuture<LogicalSlot> logicalSlotFuture = getAssignmentByExecutionVertexId(context.allocateSlotsFor(EV1), EV1) .getLogicalSlotFuture(); SlotRequestId slotRequestId = context.getSlotProvider().getFirstRequestOrFail().getSlotRequestId(); assertThat(logicalSlotFuture).isNotDone(); context.getSlotProvider() .getResponses() .get(slotRequestId) .completeExceptionally(new Throwable()); assertThat(logicalSlotFuture).isCompletedExceptionally(); // next allocation allocates new shared slot context.allocateSlotsFor(EV1); assertThat(context.getSlotProvider().getRequests()).hasSize(2); }
@Override public Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException { synchronized (getClassLoadingLock(name)) { Class<?> loadedClass = findLoadedClass(name); if (loadedClass != null) { return loadedClass; } if (isClosed) { throw new ClassNotFoundException("This ClassLoader is closed"); } if (config.shouldAcquire(name)) { loadedClass = PerfStatsCollector.getInstance() .measure("load sandboxed class", () -> maybeInstrumentClass(name)); } else { loadedClass = getParent().loadClass(name); } if (resolve) { resolveClass(loadedClass); } return loadedClass; } }
@Test public void shouldInvokeShadowForEachConstructorInInheritanceTree() throws Exception { loadClass(AChild.class).getDeclaredConstructor().newInstance(); assertThat(transcript) .containsExactly( "methodInvoked: AGrandparent.__constructor__()", "methodInvoked: AParent.__constructor__()", "methodInvoked: AChild.__constructor__()"); }
@Override public Map<String, String> getSourcesMap() { return Collections.unmodifiableMap(sourcesMap); }
@Test void getSourcesMap() { assertThat(kiePMMLModelWithSources.getSourcesMap()).isEqualTo(SOURCES_MAP); }
public static Collection<String> getResourcesByExtension(String extension) { return Arrays.stream(getClassPathElements()) .flatMap(elem -> internalGetResources(elem, Pattern.compile(".*\\." + extension + "$")) .stream()) .collect(Collectors.toSet()); }
@Test public void getResourcesByExtensionTest() { Collection<String> resources = getResourcesByExtension("txt"); assertThat(resources) .hasSize(2) .allMatch(elem -> elem.endsWith(TEST_FILE)); }
@Override public JsonWriter newJsonWriter() { stream.setMediaType(JSON); return JsonWriter.of(new CacheWriter(new OutputStreamWriter(stream.output(), StandardCharsets.UTF_8))); }
@Test public void test_newJsonWriter() throws Exception { underTest.newJsonWriter(); verify(response).setContentType(JSON); verify(response).getOutputStream(); }
@Override public ResultSet getFunctionColumns(final String catalog, final String schemaPattern, final String functionNamePattern, final String columnNamePattern) { return null; }
@Test void assertGetFunctionColumns() { assertNull(metaData.getFunctionColumns("", "", "", "")); }
public static ParameterizedType setOf(Type elementType) { return parameterizedType(Set.class, elementType); }
@Test public void createSetType() { ParameterizedType type = Types.setOf(Person.class); assertThat(type.getRawType()).isEqualTo(Set.class); assertThat(type.getActualTypeArguments()).isEqualTo(new Type[] {Person.class}); }
static BlockStmt getRowVariableDeclaration(final String variableName, final Row row) { final MethodDeclaration methodDeclaration = ROW_TEMPLATE.getMethodsByName(GETKIEPMMLROW).get(0).clone(); final BlockStmt toReturn = methodDeclaration.getBody().orElseThrow(() -> new KiePMMLException(String.format(MISSING_BODY_TEMPLATE, methodDeclaration))); final String columnValuesVariableName = String.format(VARIABLE_NAME_TEMPLATE, variableName, COLUMN_VALUES); final VariableDeclarator columnValuesVariableDeclarator = getVariableDeclarator(toReturn, COLUMN_VALUES).orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, ROW, toReturn))); columnValuesVariableDeclarator.setName(columnValuesVariableName); final MethodCallExpr columnValuesVariableInit =columnValuesVariableDeclarator.getInitializer() .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, COLUMN_VALUES, toReturn))) .asMethodCallExpr(); final MethodCallExpr columnValuesVariableScope = columnValuesVariableInit.getScope() .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, COLUMN_VALUES, toReturn))) .asMethodCallExpr(); final ArrayCreationExpr columnValuesVariableArray = columnValuesVariableScope.getArguments().get(0).asArrayCreationExpr(); final ArrayInitializerExpr columnValuesVariableArrayInit = columnValuesVariableArray.getInitializer() .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, COLUMN_VALUES, toReturn))) .asArrayInitializerExpr(); Map<String, Object> rowDataMap = getRowDataMap(row); NodeList<Expression> arguments = new NodeList<>(); rowDataMap.entrySet().forEach(entry -> { ArrayInitializerExpr argument = new ArrayInitializerExpr(); NodeList<Expression> values = NodeList.nodeList(new StringLiteralExpr(entry.getKey()), getExpressionForObject(entry.getValue())); argument.setValues(values); arguments.add(argument); }); columnValuesVariableArrayInit.setValues(arguments); final VariableDeclarator variableDeclarator = getVariableDeclarator(toReturn, ROW) .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_IN_BODY, ROW, toReturn))); variableDeclarator.setName(variableName); final ObjectCreationExpr objectCreationExpr = variableDeclarator.getInitializer() .orElseThrow(() -> new KiePMMLException(String.format(MISSING_VARIABLE_INITIALIZER_TEMPLATE, ROW, toReturn))) .asObjectCreationExpr(); final NameExpr nameExpr = new NameExpr(columnValuesVariableName); objectCreationExpr.getArguments().set(0, nameExpr); return toReturn; }
@Test void getMappedValueRowVariableDeclaration() throws IOException { String variableName = "variableName"; BlockStmt retrieved = org.kie.pmml.compiler.commons.codegenfactories.KiePMMLRowFactory.getRowVariableDeclaration(variableName, MAPVALUED_ROW); String text = getFileContent(TEST_01_SOURCE); Statement expected = JavaParserUtils.parseBlock(String.format(text, variableName)); assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue(); List<Class<?>> imports = Arrays.asList(Collectors.class, KiePMMLRow.class, Map.class, Stream.class); commonValidateCompilationWithImports(retrieved, imports); }
@Override public <T> Optional<T> convert(String rawContent, Class<T> clazz) { if (StringUtils.isBlank(rawContent)) { return Optional.empty(); } try { return Optional.of(yaml.loadAs(rawContent, clazz)); } catch (YAMLException ex) { LOGGER.log(Level.WARNING, String.format(Locale.ENGLISH, "Can not convert content [%s] to LoadbalancerRule", rawContent), ex); } return Optional.empty(); }
@Test public void test() { final RuleConverter yamlRuleConverter = new YamlRuleConverter(); final Optional<LoadbalancerRule> convert = yamlRuleConverter .convert("rule: Random\nserviceName: test", LoadbalancerRule.class); Assert.assertTrue(convert.isPresent()); assertEquals("Random", convert.get().getRule()); assertEquals("test", convert.get().getServiceName()); final Optional<Map> foo = yamlRuleConverter.convert(getMatchGroup("foo"), Map.class); Assert.assertTrue(foo.isPresent()); Map<String, Object> map = foo.get(); final Object matches = map.get("matches"); Assert.assertTrue(matches instanceof List); Assert.assertTrue(((List<?>) matches).size() > 0); final Object content = ((List<?>) matches).get(0); Assert.assertTrue(content instanceof Map); final Object serviceName = ((Map<?, ?>) content).get("serviceName"); Assert.assertEquals(serviceName, "foo"); }
public PrepareAndActivateResult deploy(CompressedApplicationInputStream in, PrepareParams prepareParams) { DeployHandlerLogger logger = DeployHandlerLogger.forPrepareParams(prepareParams); File tempDir = uncheck(() -> Files.createTempDirectory("deploy")).toFile(); ThreadLockStats threadLockStats = LockStats.getForCurrentThread(); PrepareAndActivateResult result; try { threadLockStats.startRecording("deploy of " + prepareParams.getApplicationId().serializedForm()); result = deploy(decompressApplication(in, tempDir), prepareParams, logger); } finally { threadLockStats.stopRecording(); cleanupTempDirectory(tempDir, logger); } return result; }
@Test public void testResolveForAppId() { Version vespaVersion = VespaModelFactory.createTestFactory().version(); applicationRepository.deploy(app1, new PrepareParams.Builder() .applicationId(applicationId()) .vespaVersion(vespaVersion) .build()); SimpletypesConfig config = resolve(applicationId(), vespaVersion); assertEquals(1337, config.intval()); }
@Override public int compare(int i, int j) { final int segmentNumberI = i / this.indexEntriesPerSegment; final int segmentOffsetI = (i % this.indexEntriesPerSegment) * this.indexEntrySize; final int segmentNumberJ = j / this.indexEntriesPerSegment; final int segmentOffsetJ = (j % this.indexEntriesPerSegment) * this.indexEntrySize; return compare(segmentNumberI, segmentOffsetI, segmentNumberJ, segmentOffsetJ); }
@Test void testCompare() throws Exception { final int numSegments = MEMORY_SIZE / MEMORY_PAGE_SIZE; final List<MemorySegment> memory = this.memoryManager.allocatePages(new DummyInvokable(), numSegments); NormalizedKeySorter<Tuple2<Integer, String>> sorter = newSortBuffer(memory); TestData.TupleGenerator generator = new TestData.TupleGenerator( SEED, KEY_MAX, VALUE_LENGTH, KeyMode.SORTED, ValueMode.RANDOM_LENGTH); // write the records Tuple2<Integer, String> record = new Tuple2<>(); int num = -1; do { generator.next(record); num++; } while (sorter.write(record)); // compare random elements Random rnd = new Random(SEED << 1); for (int i = 0; i < 2 * num; i++) { int pos1 = rnd.nextInt(num); int pos2 = rnd.nextInt(num); int cmp = sorter.compare(pos1, pos2); if (pos1 < pos2) { assertThat(cmp).isLessThanOrEqualTo(0); } else { assertThat(cmp).isGreaterThanOrEqualTo(0); } } // release the memory occupied by the buffers sorter.dispose(); this.memoryManager.release(memory); }
@Override public void doAfterResponse(String remoteAddr, RemotingCommand request, RemotingCommand response) { if (RequestCode.GET_ROUTEINFO_BY_TOPIC != request.getCode()) { return; } if (response == null || response.getBody() == null || ResponseCode.SUCCESS != response.getCode()) { return; } boolean zoneMode = Boolean.parseBoolean(request.getExtFields().get(MixAll.ZONE_MODE)); if (!zoneMode) { return; } String zoneName = request.getExtFields().get(MixAll.ZONE_NAME); if (StringUtils.isBlank(zoneName)) { return; } TopicRouteData topicRouteData = RemotingSerializable.decode(response.getBody(), TopicRouteData.class); response.setBody(filterByZoneName(topicRouteData, zoneName).encode()); }
@Test public void testDoAfterResponseWithNoZoneName() { HashMap<String, String> extFields = new HashMap<>(); extFields.put(MixAll.ZONE_MODE, "true"); RemotingCommand request = RemotingCommand.createRequestCommand(105,null); request.setExtFields(extFields); RemotingCommand response = RemotingCommand.createResponseCommand(null); response.setCode(ResponseCode.SUCCESS); response.setBody(RemotingSerializable.encode(createSampleTopicRouteData())); zoneRouteRPCHook.doAfterResponse("", request, response); }
@Override public EncodedMessage transform(ActiveMQMessage message) throws Exception { if (message == null) { return null; } long messageFormat = 0; Header header = null; Properties properties = null; Map<Symbol, Object> daMap = null; Map<Symbol, Object> maMap = null; Map<String,Object> apMap = null; Map<Object, Object> footerMap = null; Section body = convertBody(message); if (message.isPersistent()) { if (header == null) { header = new Header(); } header.setDurable(true); } byte priority = message.getPriority(); if (priority != Message.DEFAULT_PRIORITY) { if (header == null) { header = new Header(); } header.setPriority(UnsignedByte.valueOf(priority)); } String type = message.getType(); if (type != null) { if (properties == null) { properties = new Properties(); } properties.setSubject(type); } MessageId messageId = message.getMessageId(); if (messageId != null) { if (properties == null) { properties = new Properties(); } properties.setMessageId(getOriginalMessageId(message)); } ActiveMQDestination destination = message.getDestination(); if (destination != null) { if (properties == null) { properties = new Properties(); } properties.setTo(destination.getQualifiedName()); if (maMap == null) { maMap = new HashMap<>(); } maMap.put(JMS_DEST_TYPE_MSG_ANNOTATION, destinationType(destination)); } ActiveMQDestination replyTo = message.getReplyTo(); if (replyTo != null) { if (properties == null) { properties = new Properties(); } properties.setReplyTo(replyTo.getQualifiedName()); if (maMap == null) { maMap = new HashMap<>(); } maMap.put(JMS_REPLY_TO_TYPE_MSG_ANNOTATION, destinationType(replyTo)); } String correlationId = message.getCorrelationId(); if (correlationId != null) { if (properties == null) { properties = new Properties(); } try { properties.setCorrelationId(AMQPMessageIdHelper.INSTANCE.toIdObject(correlationId)); } catch (AmqpProtocolException e) { properties.setCorrelationId(correlationId); } } long expiration = message.getExpiration(); if (expiration != 0) { long ttl = expiration - System.currentTimeMillis(); if (ttl < 0) { ttl = 1; } if (header == null) { header = new Header(); } header.setTtl(new UnsignedInteger((int) ttl)); if (properties == null) { properties = new Properties(); } properties.setAbsoluteExpiryTime(new Date(expiration)); } long timeStamp = message.getTimestamp(); if (timeStamp != 0) { if (properties == null) { properties = new Properties(); } properties.setCreationTime(new Date(timeStamp)); } // JMSX Message Properties int deliveryCount = message.getRedeliveryCounter(); if (deliveryCount > 0) { if (header == null) { header = new Header(); } header.setDeliveryCount(UnsignedInteger.valueOf(deliveryCount)); } String userId = message.getUserID(); if (userId != null) { if (properties == null) { properties = new Properties(); } properties.setUserId(new Binary(userId.getBytes(StandardCharsets.UTF_8))); } String groupId = message.getGroupID(); if (groupId != null) { if (properties == null) { properties = new Properties(); } properties.setGroupId(groupId); } int groupSequence = message.getGroupSequence(); if (groupSequence > 0) { if (properties == null) { properties = new Properties(); } properties.setGroupSequence(UnsignedInteger.valueOf(groupSequence)); } final Map<String, Object> entries; try { entries = message.getProperties(); } catch (IOException e) { throw JMSExceptionSupport.create(e); } for (Map.Entry<String, Object> entry : entries.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); if (key.startsWith(JMS_AMQP_PREFIX)) { if (key.startsWith(NATIVE, JMS_AMQP_PREFIX_LENGTH)) { // skip transformer appended properties continue; } else if (key.startsWith(ORIGINAL_ENCODING, JMS_AMQP_PREFIX_LENGTH)) { // skip transformer appended properties continue; } else if (key.startsWith(MESSAGE_FORMAT, JMS_AMQP_PREFIX_LENGTH)) { messageFormat = (long) TypeConversionSupport.convert(entry.getValue(), Long.class); continue; } else if (key.startsWith(HEADER, JMS_AMQP_PREFIX_LENGTH)) { if (header == null) { header = new Header(); } continue; } else if (key.startsWith(PROPERTIES, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } continue; } else if (key.startsWith(MESSAGE_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) { if (maMap == null) { maMap = new HashMap<>(); } String name = key.substring(JMS_AMQP_MESSAGE_ANNOTATION_PREFIX.length()); maMap.put(Symbol.valueOf(name), value); continue; } else if (key.startsWith(FIRST_ACQUIRER, JMS_AMQP_PREFIX_LENGTH)) { if (header == null) { header = new Header(); } header.setFirstAcquirer((boolean) TypeConversionSupport.convert(value, Boolean.class)); continue; } else if (key.startsWith(CONTENT_TYPE, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } properties.setContentType(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class))); continue; } else if (key.startsWith(CONTENT_ENCODING, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } properties.setContentEncoding(Symbol.getSymbol((String) TypeConversionSupport.convert(value, String.class))); continue; } else if (key.startsWith(REPLYTO_GROUP_ID, JMS_AMQP_PREFIX_LENGTH)) { if (properties == null) { properties = new Properties(); } properties.setReplyToGroupId((String) TypeConversionSupport.convert(value, String.class)); continue; } else if (key.startsWith(DELIVERY_ANNOTATION_PREFIX, JMS_AMQP_PREFIX_LENGTH)) { if (daMap == null) { daMap = new HashMap<>(); } String name = key.substring(JMS_AMQP_DELIVERY_ANNOTATION_PREFIX.length()); daMap.put(Symbol.valueOf(name), value); continue; } else if (key.startsWith(FOOTER_PREFIX, JMS_AMQP_PREFIX_LENGTH)) { if (footerMap == null) { footerMap = new HashMap<>(); } String name = key.substring(JMS_AMQP_FOOTER_PREFIX.length()); footerMap.put(Symbol.valueOf(name), value); continue; } } else if (key.startsWith(AMQ_SCHEDULED_MESSAGE_PREFIX )) { // strip off the scheduled message properties continue; } // The property didn't map into any other slot so we store it in the // Application Properties section of the message. if (apMap == null) { apMap = new HashMap<>(); } apMap.put(key, value); int messageType = message.getDataStructureType(); if (messageType == CommandTypes.ACTIVEMQ_MESSAGE) { // Type of command to recognize advisory message Object data = message.getDataStructure(); if(data != null) { apMap.put("ActiveMqDataStructureType", data.getClass().getSimpleName()); } } } final AmqpWritableBuffer buffer = new AmqpWritableBuffer(); encoder.setByteBuffer(buffer); if (header != null) { encoder.writeObject(header); } if (daMap != null) { encoder.writeObject(new DeliveryAnnotations(daMap)); } if (maMap != null) { encoder.writeObject(new MessageAnnotations(maMap)); } if (properties != null) { encoder.writeObject(properties); } if (apMap != null) { encoder.writeObject(new ApplicationProperties(apMap)); } if (body != null) { encoder.writeObject(body); } if (footerMap != null) { encoder.writeObject(new Footer(footerMap)); } return new EncodedMessage(messageFormat, buffer.getArray(), 0, buffer.getArrayLength()); }
@Test public void testConvertTextMessageCreatesAmqpValueStringBody() throws Exception { String contentString = "myTextMessageContent"; ActiveMQTextMessage outbound = createTextMessage(contentString); outbound.onSend(); outbound.storeContent(); JMSMappingOutboundTransformer transformer = new JMSMappingOutboundTransformer(); EncodedMessage encoded = transformer.transform(outbound); assertNotNull(encoded); Message amqp = encoded.decode(); assertNotNull(amqp.getBody()); assertTrue(amqp.getBody() instanceof AmqpValue); assertEquals(contentString, ((AmqpValue) amqp.getBody()).getValue()); }
public static void extractZipFile(ZipFile zipFile, File toDir, String prefix) throws IOException { ensureDirectory(toDir); final String base = toDir.getCanonicalPath(); Enumeration<? extends ZipEntry> entries = zipFile.entries(); while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); if (!entry.isDirectory()) { if (prefix != null && !entry.getName().startsWith(prefix)) { //No need to extract it, it is not what we are looking for. continue; } String entryName; if (prefix != null) { entryName = entry.getName().substring(prefix.length()); LOG.debug("Extracting {} shortened to {} into {}", entry.getName(), entryName, toDir); } else { entryName = entry.getName(); } File file = new File(toDir, entryName); String found = file.getCanonicalPath(); if (!found.startsWith(base)) { LOG.error("Invalid location {} is outside of {}", found, base); continue; } try (InputStream in = zipFile.getInputStream(entry)) { ensureDirectory(file.getParentFile()); try (OutputStream out = new FileOutputStream(file)) { IOUtils.copy(in, out); } } } } }
@Test public void testExtractZipFileDisallowsPathTraversal() throws Exception { try (TmpPath path = new TmpPath()) { Path testRoot = Paths.get(path.getPath()); Path extractionDest = testRoot.resolve("dest"); Files.createDirectories(extractionDest); /* * Contains good.txt and ../evil.txt. Evil.txt will path outside the target dir, and should not be extracted. */ try (ZipFile zip = new ZipFile(Paths.get("src/test/resources/evil-path-traversal.jar").toFile())) { ServerUtils.extractZipFile(zip, extractionDest.toFile(), null); } assertThat(Files.exists(extractionDest.resolve("good.txt")), is(true)); assertThat(Files.exists(testRoot.resolve("evil.txt")), is(false)); } }
public boolean downloadIfNecessary(final DownloadableFile downloadableFile) { boolean updated = false; boolean downloaded = false; while (!updated) try { fetchUpdateCheckHeaders(downloadableFile); if (downloadableFile.doesNotExist() || !downloadableFile.isChecksumEquals(getMd5())) { PerfTimer timer = PerfTimer.start("Downloading new " + downloadableFile + " with md5 signature: " + md5); downloaded = download(downloadableFile); timer.stop(); } updated = true; } catch (Exception e) { try { int period = Integer.parseInt(System.getProperty("sleep.for.download", DEFAULT_FAILED_DOWNLOAD_SLEEP_MS)); LOG.error("Couldn't update {}. Sleeping for {}s. Error: ", downloadableFile, TimeUnit.SECONDS.convert(period, TimeUnit.MILLISECONDS), e); Thread.sleep(period); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); } } return downloaded; }
@Test public void shouldDownloadAgentJarFile() { ServerBinaryDownloader downloader = new ServerBinaryDownloader(new GoAgentServerHttpClientBuilder(null, SslVerificationMode.NONE, null, null, null), ServerUrlGeneratorMother.generatorFor("localhost", server.getPort())); assertThat(DownloadableFile.AGENT.doesNotExist(), is(true)); downloader.downloadIfNecessary(DownloadableFile.AGENT); assertThat(DownloadableFile.AGENT.getLocalFile().exists(), is(true)); }
@Override public Path copy(final Path source, final Path target, final TransferStatus status, final ConnectionCallback callback, final StreamListener listener) throws BackgroundException { if(null == status.getStorageClass()) { // Keep same storage class status.setStorageClass(new S3StorageClassFeature(session, acl).getClass(source)); } if(Encryption.Algorithm.NONE == status.getEncryption()) { // Keep encryption setting status.setEncryption(new S3EncryptionFeature(session, acl).getEncryption(source)); } if(Acl.EMPTY == status.getAcl()) { // Apply non-standard ACL try { // Verify target bucket allows ACLs if(acl.getPermission(containerService.getContainer(target)).isEditable()) { status.setAcl(acl.getPermission(source)); } } catch(AccessDeniedException | InteroperabilityException e) { log.warn(String.format("Ignore failure %s", e)); } } final S3Object destination = new S3WriteFeature(session, acl).getDetails(target, status); destination.setAcl(acl.toAcl(status.getAcl())); final Path bucket = containerService.getContainer(target); destination.setBucketName(bucket.isRoot() ? StringUtils.EMPTY : bucket.getName()); destination.replaceAllMetadata(new HashMap<>(new S3MetadataFeature(session, acl).getMetadata(source))); final String versionId = this.copy(source, destination, status, listener); return target.withAttributes(new PathAttributes(source.attributes()).withVersionId(versionId)); }
@Test public void testCopyFileZeroLength() throws Exception { final Path container = new Path("test-eu-central-1-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path test = new Path(container, new AsciiRandomStringService().random(), EnumSet.of(Path.Type.file)); test.attributes().setSize(0L); new S3TouchFeature(session, new S3AccessControlListFeature(session)).touch(test, new TransferStatus()); final Path copy = new Path(container, new AsciiRandomStringService().random(), EnumSet.of(Path.Type.file)); new S3CopyFeature(session, new S3AccessControlListFeature(session)).copy(test, copy, new TransferStatus(), new DisabledConnectionCallback(), new DisabledStreamListener()); assertTrue(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(test)); assertNull(copy.attributes().getVersionId()); new S3DefaultDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); assertTrue(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(copy)); ; new S3DefaultDeleteFeature(session).delete(Collections.singletonList(copy), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static ReadOnlyHttp2Headers trailers(boolean validateHeaders, AsciiString... otherHeaders) { return new ReadOnlyHttp2Headers(validateHeaders, EMPTY_ASCII_STRINGS, otherHeaders); }
@Test public void nullValuesAreNotAllowed() { assertThrows(IllegalArgumentException.class, new Executable() { @Override public void execute() { ReadOnlyHttp2Headers.trailers(true, new AsciiString("foo"), null); } }); }
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) { return api.send(request); }
@Test public void sendMessageToChannel() { String url = "https://google.com/"; SendMessage request = new SendMessage(channelName, "channel message [GG](" + url + ")").parseMode(ParseMode.Markdown); SendResponse sendResponse = bot.execute(request); Message message = sendResponse.message(); MessageTest.checkTextMessage(message); assertEquals(url, message.entities()[0].url()); assertEquals(channelId, message.senderChat().id()); }
public static double spearman(int[] x, int[] y) { if (x.length != y.length) { throw new IllegalArgumentException("Input vector sizes are different."); } int n = x.length; double[] wksp1 = new double[n]; double[] wksp2 = new double[n]; for (int j = 0; j < n; j++) { wksp1[j] = x[j]; wksp2[j] = y[j]; } QuickSort.sort(wksp1, wksp2); crank(wksp1); QuickSort.sort(wksp2, wksp1); crank(wksp2); return cor(wksp1, wksp2); }
@Test public void testSpearman_doubleArr_doubleArr() { System.out.println("spearman"); double[] x = {-2.1968219, -0.9559913, -0.0431738, 1.0567679, 0.3853515}; double[] y = {-1.7781325, -0.6659839, 0.9526148, -0.9460919, -0.3925300}; assertEquals(0.3, MathEx.spearman(x, y), 1E-7); }
public final void setStrictness(Strictness strictness) { this.strictness = Objects.requireNonNull(strictness); }
@Test public void testNonFiniteNumbersWhenStrict() throws IOException { StringWriter stringWriter = new StringWriter(); JsonWriter jsonWriter = new JsonWriter(stringWriter); jsonWriter.setStrictness(Strictness.STRICT); assertNonFiniteNumbersExceptions(jsonWriter); }
public List<ColumnMatchResult<?>> getMismatchedColumns(List<Column> columns, ChecksumResult controlChecksum, ChecksumResult testChecksum) { return columns.stream() .flatMap(column -> columnValidators.get(column.getCategory()).get().validate(column, controlChecksum, testChecksum).stream()) .filter(columnMatchResult -> !columnMatchResult.isMatched()) .collect(toImmutableList()); }
@Test public void testMap() { List<Column> columns = ImmutableList.of(MAP_COLUMN); ChecksumResult controlChecksum = new ChecksumResult( 5, ImmutableMap.<String, Object>builder() .put("map$checksum", new SqlVarbinary(new byte[] {0xa})) .put("map$keys_checksum", new SqlVarbinary(new byte[] {0xb})) .put("map$values_checksum", new SqlVarbinary(new byte[] {0xc})) .put("map$cardinality_sum", 3L) .put("map$cardinality_checksum", new SqlVarbinary(new byte[] {0xd})) .build()); // Matched assertTrue(checksumValidator.getMismatchedColumns(columns, controlChecksum, controlChecksum).isEmpty()); // Mismatched map checksum ChecksumResult testChecksum = new ChecksumResult( 5, ImmutableMap.<String, Object>builder() .put("map$checksum", new SqlVarbinary(new byte[] {0x1a})) .put("map$keys_checksum", new SqlVarbinary(new byte[] {0xb})) .put("map$values_checksum", new SqlVarbinary(new byte[] {0xc})) .put("map$cardinality_sum", 3L) .put("map$cardinality_checksum", new SqlVarbinary(new byte[] {0xd})) .build()); assertMismatchedColumns(columns, controlChecksum, testChecksum, MAP_COLUMN); // Mismatched keys checksum testChecksum = new ChecksumResult( 5, ImmutableMap.<String, Object>builder() .put("map$checksum", new SqlVarbinary(new byte[] {0xa})) .put("map$keys_checksum", new SqlVarbinary(new byte[] {0x1b})) .put("map$values_checksum", new SqlVarbinary(new byte[] {0xc})) .put("map$cardinality_checksum", new SqlVarbinary(new byte[] {0xd})) .put("map$cardinality_sum", 3L) .build()); assertMismatchedColumns(columns, controlChecksum, testChecksum, MAP_COLUMN); // Mismatched cardinality checksum testChecksum = new ChecksumResult( 5, ImmutableMap.<String, Object>builder() .put("map$checksum", new SqlVarbinary(new byte[] {0xa})) .put("map$keys_checksum", new SqlVarbinary(new byte[] {0xb})) .put("map$values_checksum", new SqlVarbinary(new byte[] {0xc})) .put("map$cardinality_checksum", new SqlVarbinary(new byte[] {0x1d})) .put("map$cardinality_sum", 3L) .build()); assertMismatchedColumns(columns, controlChecksum, testChecksum, MAP_COLUMN); // Mismatched cardinality sum testChecksum = new ChecksumResult( 5, ImmutableMap.<String, Object>builder() .put("map$checksum", new SqlVarbinary(new byte[] {0xa})) .put("map$keys_checksum", new SqlVarbinary(new byte[] {0xb})) .put("map$values_checksum", new SqlVarbinary(new byte[] {0xc})) .put("map$cardinality_checksum", new SqlVarbinary(new byte[] {0xd})) .put("map$cardinality_sum", 4L) .build()); assertMismatchedColumns(columns, controlChecksum, testChecksum, MAP_COLUMN); columns = ImmutableList.of(MAP_FLOAT_NON_FLOAT_COLUMN); controlChecksum = new ChecksumResult( 5, ImmutableMap.<String, Object>builder() .put("map_float_non_float$checksum", new SqlVarbinary(new byte[] {0xa})) .put("map_float_non_float$keys_checksum", new SqlVarbinary(new byte[] {0xb})) .put("map_float_non_float$values_checksum", new SqlVarbinary(new byte[] {0xc})) .put("map_float_non_float$cardinality_sum", 3L) .put("map_float_non_float$cardinality_checksum", new SqlVarbinary(new byte[] {0xd})) .build()); // Mismatched values checksum testChecksum = new ChecksumResult( 5, ImmutableMap.<String, Object>builder() .put("map_float_non_float$checksum", new SqlVarbinary(new byte[] {0xa})) .put("map_float_non_float$keys_checksum", new SqlVarbinary(new byte[] {0xb})) .put("map_float_non_float$values_checksum", new SqlVarbinary(new byte[] {0x1c})) .put("map_float_non_float$cardinality_checksum", new SqlVarbinary(new byte[] {0xd})) .put("map_float_non_float$cardinality_sum", 3L) .build()); assertMismatchedColumns(columns, controlChecksum, testChecksum, MAP_FLOAT_NON_FLOAT_COLUMN); }
@Override public ParSeqBasedCompletionStage<Void> runAfterEither(CompletionStage<?> other, Runnable action) { return produceEitherStage("runAfterEither", cast(other, (v) -> null), (t) -> { action.run(); return null; }); }
@Test public void testRunAfterEither_Success_Success() throws Exception { Runnable runnable = mock(Runnable.class); CompletionStage<String> completionStage = createTestStage(TESTVALUE1); CompletionStage<String> completionStage2 = createTestStage(TESTVALUE2); finish(completionStage.runAfterEither(completionStage2, runnable)); verify(runnable, times(1)).run(); }