focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public final void calculate() { Set<String> keys = count.keys(); for (String key : keys) { Long s = summation.get(key); if (Objects.isNull(s)) { continue; } Long c = count.get(key); if (Objects.isNull(c)) { continue; } long result = s / c; if (result == 0 && s > 0) { result = 1; } value.put(key, result); } }
@Test public void testCalculate() { function.accept( MeterEntity.newService("request_count", Layer.GENERAL), build(asList("200", "404"), asList(10L, 2L))); function.accept( MeterEntity.newService("request_count", Layer.GENERAL), build(asList("200", "500"), asList(2L, 3L))); function.calculate(); assertThat(function.getValue().sortedKeys(Comparator.naturalOrder())).isEqualTo(asList("200", "404", "500")); assertThat(function.getValue().sortedValues(Comparator.naturalOrder())).isEqualTo(asList(6L, 2L, 3L)); }
@VisibleForTesting String makeUserAgent() { if (!JibSystemProperties.isUserAgentEnabled()) { return ""; } StringBuilder userAgentBuilder = new StringBuilder("jib"); userAgentBuilder.append(" ").append(toolVersion); userAgentBuilder.append(" ").append(toolName); if (!Strings.isNullOrEmpty(System.getProperty(JibSystemProperties.UPSTREAM_CLIENT))) { userAgentBuilder.append(" ").append(System.getProperty(JibSystemProperties.UPSTREAM_CLIENT)); } return userAgentBuilder.toString(); }
@Test public void testGetUserAgentWithUpstreamClient() throws CacheDirectoryCreationException { System.setProperty(JibSystemProperties.UPSTREAM_CLIENT, "skaffold/0.34.0"); BuildContext buildContext = createBasicTestBuilder().setToolName("test-name").setToolVersion("test-version").build(); String generatedUserAgent = buildContext.makeUserAgent(); Assert.assertEquals("jib test-version test-name skaffold/0.34.0", generatedUserAgent); }
public static boolean sizeIsEmpty(Object object) { if (object instanceof Collection) { return ((Collection) object).isEmpty(); } else if (object instanceof Map) { return ((Map) object).isEmpty(); } else if (object instanceof Object[]) { return ((Object[]) object).length == 0; } else if (object instanceof Iterator) { return ((Iterator) object).hasNext() == false; } else if (object instanceof Enumeration) { return ((Enumeration) object).hasMoreElements() == false; } else if (object == null) { throw new IllegalArgumentException("Unsupported object type: null"); } else { try { return Array.getLength(object) == 0; } catch (IllegalArgumentException ex) { throw new IllegalArgumentException("Unsupported object type: " + object.getClass().getName()); } } }
@Test void testSizeIsEmpty1() { assertThrows(IllegalArgumentException.class, () -> { CollectionUtils.sizeIsEmpty(null); }); }
@Override @Deprecated public void isNoneOf( @Nullable Object first, @Nullable Object second, @Nullable Object @Nullable ... rest) { super.isNoneOf(first, second, rest); }
@Test @SuppressWarnings({"IncompatibleArgumentType", "deprecation"}) // test of a mistaken call public void isNoneOf() { ImmutableList<String> actual = ImmutableList.of("a"); assertThat(actual).isNoneOf(ImmutableList.of("b"), ImmutableList.of("c")); expectFailureWhenTestingThat(actual).isNoneOf("a", "b"); assertThat(expectFailure.getFailure()) .hasMessageThat() .isEqualTo( "The actual value is an Iterable, and you've written a test that compares it to some " + "objects that are not Iterables. Did you instead mean to check whether its " + "*contents* match any of the *contents* of the given values? If so, call " + "containsNoneOf(...)/containsNoneIn(...) instead. Non-iterables: [a, b]"); }
public List<R> scanForResourcesUri(URI classpathResourceUri) { requireNonNull(classpathResourceUri, "classpathResourceUri must not be null"); if (CLASSPATH_SCHEME.equals(classpathResourceUri.getScheme())) { return scanForClasspathResource(resourceName(classpathResourceUri), NULL_FILTER); } return findResourcesForUri(classpathResourceUri, DEFAULT_PACKAGE_NAME, NULL_FILTER, createUriResource()); }
@Test void scanForResourcesFileUri() { File file = new File("src/test/resources/io/cucumber/core/resource/test/resource.txt"); List<URI> resources = resourceScanner.scanForResourcesUri(file.toURI()); assertThat(resources, contains(file.toURI())); }
@Override public void setWriteChecksum(boolean writeChecksum) { fs.setWriteChecksum(writeChecksum); }
@Test public void testWriteChecksumPassthru() { FileSystem mockFs = mock(FileSystem.class); FileSystem fs = new FilterFileSystem(mockFs); fs.setWriteChecksum(false); verify(mockFs).setWriteChecksum(eq(false)); reset(mockFs); fs.setWriteChecksum(true); verify(mockFs).setWriteChecksum(eq(true)); }
public static byte[] bigIntegerToBytes(BigInteger b, int numBytes) { checkArgument(b.signum() >= 0, () -> "b must be positive or zero: " + b); checkArgument(numBytes > 0, () -> "numBytes must be positive: " + numBytes); byte[] src = b.toByteArray(); byte[] dest = new byte[numBytes]; boolean isFirstByteOnlyForSign = src[0] == 0; int length = isFirstByteOnlyForSign ? src.length - 1 : src.length; checkArgument(length <= numBytes, () -> "The given number does not fit in " + numBytes); int srcPos = isFirstByteOnlyForSign ? 1 : 0; int destPos = numBytes - length; System.arraycopy(src, srcPos, dest, destPos, length); return dest; }
@Test(expected = IllegalArgumentException.class) public void bigIntegerToBytes_insufficientLength() { BigInteger b = BigInteger.valueOf(0b1000__0000_0000); // base 2 ByteUtils.bigIntegerToBytes(b, 1); }
@Override public boolean shouldFire(TriggerStateMachine.TriggerContext context) throws Exception { return context.trigger().subTrigger(ACTUAL).invokeShouldFire(context) || context.trigger().subTrigger(UNTIL).invokeShouldFire(context); }
@Test public void testShouldFireAfterMerge() throws Exception { tester = TriggerStateMachineTester.forTrigger( AfterEachStateMachine.inOrder( AfterPaneStateMachine.elementCountAtLeast(5) .orFinally(AfterWatermarkStateMachine.pastEndOfWindow()), RepeatedlyStateMachine.forever(AfterPaneStateMachine.elementCountAtLeast(1))), Sessions.withGapDuration(Duration.millis(10))); // Finished the orFinally in the first window tester.injectElements(1); IntervalWindow firstWindow = new IntervalWindow(new Instant(1), new Instant(11)); assertFalse(tester.shouldFire(firstWindow)); tester.advanceInputWatermark(new Instant(11)); assertTrue(tester.shouldFire(firstWindow)); tester.fireIfShouldFire(firstWindow); // Set up second window where it is not done tester.injectElements(5); IntervalWindow secondWindow = new IntervalWindow(new Instant(5), new Instant(15)); assertFalse(tester.shouldFire(secondWindow)); // Merge them, if the merged window were on the second trigger, it would be ready tester.mergeWindows(); IntervalWindow mergedWindow = new IntervalWindow(new Instant(1), new Instant(15)); assertFalse(tester.shouldFire(mergedWindow)); // Now adding 3 more makes the main trigger ready to fire tester.injectElements(1, 2, 3, 4, 5); tester.mergeWindows(); assertTrue(tester.shouldFire(mergedWindow)); }
public ByteBuf remove(int bytes, ChannelPromise aggregatePromise) { return remove(channel.alloc(), bytes, aggregatePromise); }
@Test public void testReadExactAddedBufferSizeReturnsOriginal() { writeQueue.add(cat, catPromise); writeQueue.add(mouse, mouseListener); ChannelPromise aggregatePromise = newPromise(); assertSame(cat, writeQueue.remove(3, aggregatePromise)); assertFalse(catPromise.isSuccess()); aggregatePromise.setSuccess(); assertTrue(catPromise.isSuccess()); assertEquals(1, cat.refCnt()); cat.release(); aggregatePromise = newPromise(); assertSame(mouse, writeQueue.remove(5, aggregatePromise)); assertFalse(mouseDone); aggregatePromise.setSuccess(); assertTrue(mouseSuccess); assertEquals(1, mouse.refCnt()); mouse.release(); }
@NonNull @Override public Object instantiateItem(@NonNull ViewGroup container, int position) { View root = mLayoutInflater.inflate(R.layout.quick_text_popup_autorowkeyboard_view, container, false); ScrollViewWithDisable scrollViewWithDisable = root.findViewById(R.id.scroll_root_for_quick_test_keyboard); scrollViewWithDisable.setPadding( scrollViewWithDisable.getPaddingLeft(), scrollViewWithDisable.getPaddingTop(), scrollViewWithDisable.getPaddingRight(), scrollViewWithDisable.getPaddingBottom() + mBottomPadding); container.addView(root); final QuickKeysKeyboardView keyboardView = root.findViewById(R.id.keys_container); keyboardView.setKeyboardTheme(mKeyboardTheme); keyboardView.setOnPopupShownListener( new PopupKeyboardShownHandler(mViewPager, scrollViewWithDisable)); keyboardView.setOnKeyboardActionListener(mKeyboardActionListener); QuickTextKey addOn = mAddOns[position]; AnyPopupKeyboard keyboard = mPopupKeyboards[position]; if (keyboard == null || position == 0 /*ALWAYS re-create history, in case it has changed*/) { if (addOn.isPopupKeyboardUsed()) { keyboard = new AnyPopupKeyboard( addOn, mContext, addOn.getPopupKeyboardResId(), keyboardView.getThemedKeyboardDimens(), addOn.getName(), mDefaultSkinTonePrefTracker.getDefaultSkinTone(), mDefaultGenderPrefTracker.getDefaultGender()); } else { keyboard = new PopupListKeyboard( mDefaultLocalAddOn, mContext, keyboardView.getThemedKeyboardDimens(), addOn.getPopupListNames(), addOn.getPopupListValues(), addOn.getName()); } mPopupKeyboards[position] = keyboard; final int keyboardViewMaxWidth = keyboardView.getThemedKeyboardDimens().getKeyboardMaxWidth(); mIsAutoFitKeyboards[position] = keyboard.getMinWidth() > keyboardViewMaxWidth || addOn instanceof HistoryQuickTextKey; if (mIsAutoFitKeyboards[position]) { // fixing up the keyboard, so it will fit nicely in the width int currentY = 0; int xSub = 0; for (Keyboard.Key key : keyboard.getKeys()) { key.y = currentY; key.x -= xSub; if (Keyboard.Key.getEndX(key) > keyboardViewMaxWidth) { currentY += key.height; xSub += key.x; key.y = currentY; key.x = 0; } } keyboard.resetDimensions(); } } keyboardView.setKeyboard(keyboard); return root; }
@Test public void testInstantiateItem() throws Exception { ViewGroup container = new LinearLayout(getApplicationContext()); Object instance0 = mUnderTest.instantiateItem(container, 0); Assert.assertNotNull(instance0); Assert.assertTrue(instance0 instanceof ScrollViewWithDisable); Assert.assertEquals(1, container.getChildCount()); Assert.assertSame(instance0, container.getChildAt(0)); //noinspection Mockito.verify(mSkinTonePrefTracker).getDefaultSkinTone(); Mockito.verify(mGenderTracker).getDefaultGender(); final QuickKeysKeyboardView keyboardView0 = ((View) instance0).findViewById(R.id.keys_container); Assert.assertNotNull(keyboardView0); Object instance1 = mUnderTest.instantiateItem(container, 1); Assert.assertNotNull(instance1); Assert.assertNotSame(instance0, instance1); final QuickKeysKeyboardView keyboardView1 = ((View) instance1).findViewById(R.id.keys_container); Assert.assertNotNull(keyboardView1); Assert.assertNotEquals( keyboardView0.getKeyboard().getKeyboardAddOn().getId(), keyboardView1.getKeyboard().getKeyboardAddOn().getId()); Object instance0Again = mUnderTest.instantiateItem(container, 0); Assert.assertNotNull(instance0Again); Assert.assertNotSame(instance0, instance0Again); final QuickKeysKeyboardView keyboardView0Again = ((View) instance0Again).findViewById(R.id.keys_container); Assert.assertNotNull(keyboardView0Again); // the history is always recreated! Assert.assertNotSame(keyboardView0.getKeyboard(), keyboardView0Again.getKeyboard()); // making sure the keyboard DOES NOT have a background - this is because we want the // background to be used in the pager container. Assert.assertNull(keyboardView0.getBackground()); Assert.assertNull(null, keyboardView1.getBackground()); // adds padding Assert.assertEquals(11, ((View) instance0).getPaddingBottom()); // the other views (not history) ARE NOT RECREATED! Object instance1Again = mUnderTest.instantiateItem(container, 1); Assert.assertNotNull(instance1Again); Assert.assertNotSame(instance1, instance1Again); final QuickKeysKeyboardView keyboardView1Again = ((View) instance1Again).findViewById(R.id.keys_container); Assert.assertNotNull(keyboardView1Again); // non history is not recreated! Assert.assertSame(keyboardView1.getKeyboard(), keyboardView1Again.getKeyboard()); }
public void addVariable(String variable) throws YarnException { if (immutableVariables.contains(variable)) { throw new YarnException("Variable '" + variable + "' is immutable " + "cannot add to the modified variable list."); } knownVariables.add(variable); }
@Test public void testManagedQueueValidation() { //Setting up queue manager and emulated queue hierarchy CapacitySchedulerQueueManager qm = mock(CapacitySchedulerQueueManager.class); MockQueueHierarchyBuilder.create() .withQueueManager(qm) .withQueue("root.unmanaged") .withManagedParentQueue("root.managed") .withQueue("root.unmanagedwithchild.child") .withQueue("root.leaf") .build(); when(qm.getQueue(isNull())).thenReturn(null); MappingRuleValidationContextImpl ctx = new MappingRuleValidationContextImpl(qm); try { ctx.addVariable("%dynamic"); ctx.addVariable("%user"); } catch (YarnException e) { fail("We don't expect the add variable to fail: " + e.getMessage()); } assertValidPath(ctx, "%dynamic"); assertValidPath(ctx, "root.%dynamic"); assertValidPath(ctx, "%user.%dynamic"); assertValidPath(ctx, "root.managed.%dynamic"); assertValidPath(ctx, "managed.%dynamic"); assertInvalidPath(ctx, "root.invalid.%dynamic"); assertInvalidPath(ctx, "root.unmanaged.%dynamic"); assertValidPath(ctx, "root.unmanagedwithchild.%user"); assertValidPath(ctx, "unmanagedwithchild.%user"); }
public static Named as(final String name) { Objects.requireNonNull(name, "name can't be null"); return new Named(name); }
@Test public void shouldThrowExceptionGivenNullName() { assertThrows(NullPointerException.class, () -> Named.as(null)); }
public static void convert(File srcImageFile, File destImageFile) { Assert.notNull(srcImageFile); Assert.notNull(destImageFile); Assert.isFalse(srcImageFile.equals(destImageFile), "Src file is equals to dest file!"); final String srcExtName = FileUtil.extName(srcImageFile); final String destExtName = FileUtil.extName(destImageFile); if (StrUtil.equalsIgnoreCase(srcExtName, destExtName)) { // 扩展名相同直接复制文件 FileUtil.copy(srcImageFile, destImageFile, true); } Img img = null; try { img = Img.from(srcImageFile); img.write(destImageFile); } finally { IoUtil.flush(img); } }
@Test @Disabled public void convertTest() { ImgUtil.convert(FileUtil.file("e:/test2.png"), FileUtil.file("e:/test2Convert.jpg")); }
static String escapeAndJoin(List<String> parts) { return parts.stream() .map(ZetaSqlIdUtils::escapeSpecialChars) .map(ZetaSqlIdUtils::replaceWhitespaces) .map(ZetaSqlIdUtils::backtickIfNeeded) .collect(joining(".")); }
@Test public void testHandlesSimpleIds() { List<String> id = Arrays.asList("aaa", "BbB", "zAzzz00"); assertEquals("aaa.BbB.zAzzz00", ZetaSqlIdUtils.escapeAndJoin(id)); }
public static List<String> parseAddressList(String addressInfo) { if (StringUtils.isBlank(addressInfo)) { return Collections.emptyList(); } List<String> addressList = new ArrayList<>(); String[] addresses = addressInfo.split(ADDRESS_SEPARATOR); for (String address : addresses) { URI uri = URI.create(address.trim()); addressList.add(uri.getAuthority()); } return addressList; }
@Test public void testOneStr() { String host1 = "http://localhost"; List<String> result = AddressUtils.parseAddressList(host1); assertThat(result.size()).isEqualTo(1); assertThat(result).contains("localhost"); }
public void destroy() { mGeneratingDisposable.dispose(); mGenerateStateSubject.onNext(LoadingState.NOT_LOADED); mGenerateStateSubject.onComplete(); }
@Test @Ignore("I'm not sure how this is two dictionaries") public void testCalculatesCornersInBackgroundWithTwoDictionaries() { TestRxSchedulers.backgroundRunOneJob(); Assert.assertEquals(GestureTypingDetector.LoadingState.LOADING, mCurrentState.get()); TestRxSchedulers.backgroundRunOneJob(); Assert.assertEquals(GestureTypingDetector.LoadingState.LOADED, mCurrentState.get()); mDetectorUnderTest.destroy(); TestRxSchedulers.drainAllTasks(); Assert.assertEquals(GestureTypingDetector.LoadingState.NOT_LOADED, mCurrentState.get()); }
public static String formatSql(final AstNode root) { final StringBuilder builder = new StringBuilder(); new Formatter(builder).process(root, 0); return StringUtils.stripEnd(builder.toString(), "\n"); }
@Test public void shouldFormatShowTables() { // Given: final ListTables listTables = new ListTables(Optional.empty(), false); // When: final String formatted = SqlFormatter.formatSql(listTables); // Then: assertThat(formatted, is("SHOW TABLES")); }
@SuppressWarnings({ "nullness" // TODO(https://github.com/apache/beam/issues/20497) }) public static Row structToBeamRow(Struct struct, Schema schema) { Map<String, @Nullable Object> structValues = schema.getFields().stream() .collect( HashMap::new, (map, field) -> map.put(field.getName(), getStructValue(struct, field)), Map::putAll); return Row.withSchema(schema).withFieldValues(structValues).build(); }
@Test public void testStructToBeamRowFailsTypesDontMatch() { Schema schema = Schema.builder().addInt64Field("f_int64").build(); Struct struct = Struct.newBuilder().set("f_int64").to("string_value").build(); Exception exception = assertThrows(ClassCastException.class, () -> StructUtils.structToBeamRow(struct, schema)); checkMessage("java.lang.String cannot be cast to", exception.getMessage()); checkMessage("java.lang.Long", exception.getMessage()); }
@Deprecated public static String getJwt(JwtClaims claims) throws JoseException { String jwt; RSAPrivateKey privateKey = (RSAPrivateKey) getPrivateKey( jwtConfig.getKey().getFilename(),jwtConfig.getKey().getPassword(), jwtConfig.getKey().getKeyName()); // A JWT is a JWS and/or a JWE with JSON claims as the payload. // In this example it is a JWS nested inside a JWE // So we first create a JsonWebSignature object. JsonWebSignature jws = new JsonWebSignature(); // The payload of the JWS is JSON content of the JWT Claims jws.setPayload(claims.toJson()); // The JWT is signed using the sender's private key jws.setKey(privateKey); // Get provider from security config file, it should be two digit // And the provider id will set as prefix for keyid in the token header, for example: 05100 // if there is no provider id, we use "00" for the default value String provider_id = ""; if (jwtConfig.getProviderId() != null) { provider_id = jwtConfig.getProviderId(); if (provider_id.length() == 1) { provider_id = "0" + provider_id; } else if (provider_id.length() > 2) { logger.error("provider_id defined in the security.yml file is invalid; the length should be 2"); provider_id = provider_id.substring(0, 2); } } jws.setKeyIdHeaderValue(provider_id + jwtConfig.getKey().getKid()); // Set the signature algorithm on the JWT/JWS that will integrity protect the claims jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.RSA_USING_SHA256); // Sign the JWS and produce the compact serialization, which will be the inner JWT/JWS // representation, which is a string consisting of three dot ('.') separated // base64url-encoded parts in the form Header.Payload.Signature jwt = jws.getCompactSerialization(); return jwt; }
@Test public void longlivedTokenizationJwt() throws Exception { JwtClaims claims = ClaimsUtil.getTestClaims("steve", "EMPLOYEE", "f7d42348-c647-4efb-a52d-4c5787421e72", Arrays.asList("token.r", "token.w", "scheme.r"), "user"); claims.setExpirationTimeMinutesInTheFuture(5256000); String jwt = JwtIssuer.getJwt(claims, long_kid, KeyUtil.deserializePrivateKey(long_key, KeyUtil.RSA)); System.out.println("***Long lived token for tokenizaiton***: " + jwt); }
@Override @Transactional(rollbackFor = Exception.class) public void updateJobStatus(Long id, Integer status) throws SchedulerException { // 校验 status if (!containsAny(status, JobStatusEnum.NORMAL.getStatus(), JobStatusEnum.STOP.getStatus())) { throw exception(JOB_CHANGE_STATUS_INVALID); } // 校验存在 JobDO job = validateJobExists(id); // 校验是否已经为当前状态 if (job.getStatus().equals(status)) { throw exception(JOB_CHANGE_STATUS_EQUALS); } // 更新 Job 状态 JobDO updateObj = JobDO.builder().id(id).status(status).build(); jobMapper.updateById(updateObj); // 更新状态 Job 到 Quartz 中 if (JobStatusEnum.NORMAL.getStatus().equals(status)) { // 开启 schedulerManager.resumeJob(job.getHandlerName()); } else { // 暂停 schedulerManager.pauseJob(job.getHandlerName()); } }
@Test public void testUpdateJobStatus_stopSuccess() throws SchedulerException { // mock 数据 JobDO job = randomPojo(JobDO.class, o -> o.setStatus(JobStatusEnum.NORMAL.getStatus())); jobMapper.insert(job); // 调用 jobService.updateJobStatus(job.getId(), JobStatusEnum.STOP.getStatus()); // 校验记录的属性是否正确 JobDO dbJob = jobMapper.selectById(job.getId()); assertEquals(JobStatusEnum.STOP.getStatus(), dbJob.getStatus()); // 校验调用 verify(schedulerManager).pauseJob(eq(job.getHandlerName())); }
@Override public ExecuteContext before(ExecuteContext context) { Object object = context.getObject(); if (object instanceof HandlerExecutionChain) { HandlerExecutionChain chain = (HandlerExecutionChain) object; chain.addInterceptor(getInterceptor()); } return context; }
@Test public void testBefore() { interceptor.before(context); HandlerExecutionChain chain = (HandlerExecutionChain) context.getObject(); Assert.assertNotNull(chain.getInterceptors()); Assert.assertEquals(1, chain.getInterceptors().length); Assert.assertEquals(RouteHandlerInterceptor.class, chain.getInterceptors()[0].getClass()); }
@Override public NativeEntity<PipelineDao> createNativeEntity(Entity entity, Map<String, ValueReference> parameters, Map<EntityDescriptor, Object> nativeEntities, String username) { if (entity instanceof EntityV1) { return decode((EntityV1) entity, parameters, nativeEntities); } else { throw new IllegalArgumentException("Unsupported entity version: " + entity.getClass()); } }
@Test public void createNativeEntityWithDefaultStream() throws NotFoundException { final Entity entity = EntityV1.builder() .id(ModelId.of("1")) .type(ModelTypes.PIPELINE_V1) .data(objectMapper.convertValue(PipelineEntity.create( ValueReference.of("Title"), ValueReference.of("Description"), ValueReference.of("pipeline \"Title\"\nstage 0 match either\nrule \"debug\"\nrule \"no-op\"\nend"), Collections.singleton(ValueReference.of(Stream.DEFAULT_STREAM_ID))), JsonNode.class)) .build(); final FakeStream fakeDefaultStream = new FakeStream("All message Fake") { @Override protected ObjectId getObjectId() { return new ObjectId(Stream.DEFAULT_STREAM_ID); } }; when(streamService.load(Stream.DEFAULT_STREAM_ID)).thenReturn(fakeDefaultStream); final Map<EntityDescriptor, Object> nativeEntities = Collections.emptyMap(); final NativeEntity<PipelineDao> nativeEntity = facade.createNativeEntity(entity, Collections.emptyMap(), nativeEntities, "username"); assertThat(connectionsService.load(fakeDefaultStream.getId()).pipelineIds()) .containsOnly(nativeEntity.entity().id()); }
public final void addStateStore(final StoreBuilder<?> storeBuilder, final String... processorNames) { addStateStore(new StoreBuilderWrapper(storeBuilder), false, processorNames); }
@Test public void shouldNotAllowToAddStoresWithSameName() { final StoreBuilder<KeyValueStore<Object, Object>> otherBuilder = new MockKeyValueStoreBuilder("testStore", false); builder.addStateStore(storeBuilder); final TopologyException exception = assertThrows( TopologyException.class, () -> builder.addStateStore(otherBuilder) ); assertThat( exception.getMessage(), equalTo("Invalid topology: A different StateStore has already been added with the name testStore") ); }
public CompletableFuture<Void> subscriptionCreated( byte[] user, String subscriptionId, Instant subscriptionCreatedAt, long level) { checkUserLength(user); UpdateItemRequest request = UpdateItemRequest.builder() .tableName(table) .key(Map.of(KEY_USER, b(user))) .returnValues(ReturnValue.NONE) .updateExpression("SET " + "#accessed_at = :accessed_at, " + "#subscription_id = :subscription_id, " + "#subscription_created_at = :subscription_created_at, " + "#subscription_level = :subscription_level, " + "#subscription_level_changed_at = :subscription_level_changed_at") .expressionAttributeNames(Map.of( "#accessed_at", KEY_ACCESSED_AT, "#subscription_id", KEY_SUBSCRIPTION_ID, "#subscription_created_at", KEY_SUBSCRIPTION_CREATED_AT, "#subscription_level", KEY_SUBSCRIPTION_LEVEL, "#subscription_level_changed_at", KEY_SUBSCRIPTION_LEVEL_CHANGED_AT)) .expressionAttributeValues(Map.of( ":accessed_at", n(subscriptionCreatedAt.getEpochSecond()), ":subscription_id", s(subscriptionId), ":subscription_created_at", n(subscriptionCreatedAt.getEpochSecond()), ":subscription_level", n(level), ":subscription_level_changed_at", n(subscriptionCreatedAt.getEpochSecond()))) .build(); return client.updateItem(request).thenApply(updateItemResponse -> null); }
@Test void testSubscriptionCreated() { String subscriptionId = Base64.getEncoder().encodeToString(TestRandomUtil.nextBytes(16)); Instant subscriptionCreated = Instant.ofEpochSecond(NOW_EPOCH_SECONDS + 1); long level = 42; assertThat(subscriptions.create(user, password, created)).succeedsWithin(DEFAULT_TIMEOUT); assertThat(subscriptions.subscriptionCreated(user, subscriptionId, subscriptionCreated, level)). succeedsWithin(DEFAULT_TIMEOUT); assertThat(subscriptions.get(user, password)).succeedsWithin(DEFAULT_TIMEOUT).satisfies(getResult -> { assertThat(getResult).isNotNull(); assertThat(getResult.type).isEqualTo(FOUND); assertThat(getResult.record).isNotNull().satisfies(record -> { assertThat(record.accessedAt).isEqualTo(subscriptionCreated); assertThat(record.subscriptionId).isEqualTo(subscriptionId); assertThat(record.subscriptionCreatedAt).isEqualTo(subscriptionCreated); assertThat(record.subscriptionLevel).isEqualTo(level); assertThat(record.subscriptionLevelChangedAt).isEqualTo(subscriptionCreated); }); }); }
public void execute() { new PathAwareCrawler<>( FormulaExecutorComponentVisitor.newBuilder(metricRepository, measureRepository).buildFor(formulas)) .visit(treeRootHolder.getReportTreeRoot()); }
@Test public void compute_and_aggregate_duplicated_blocks_from_single_duplication() { addDuplicatedBlock(FILE_1_REF, 10); addDuplicatedBlock(FILE_2_REF, 40); addDuplicatedBlock(FILE_4_REF, 5); underTest.execute(); assertRawMeasureValue(FILE_1_REF, DUPLICATED_BLOCKS_KEY, 10); assertRawMeasureValue(FILE_2_REF, DUPLICATED_BLOCKS_KEY, 40); assertRawMeasureValue(FILE_3_REF, DUPLICATED_BLOCKS_KEY, 0); assertRawMeasureValue(FILE_4_REF, DUPLICATED_BLOCKS_KEY, 5); assertRawMeasureValue(DIRECTORY_REF, DUPLICATED_BLOCKS_KEY, 50); assertRawMeasureValue(ROOT_REF, DUPLICATED_BLOCKS_KEY, 55); }
@Override public void emit(String emitKey, List<Metadata> metadataList, ParseContext parseContext) throws IOException, TikaEmitterException { if (metadataList == null || metadataList.size() < 1) { return; } List<EmitData> emitDataList = new ArrayList<>(); emitDataList.add(new EmitData(new EmitKey("", emitKey), metadataList)); emit(emitDataList); }
@Test public void testTableExists(@TempDir Path tmpDir) throws Exception { String createTable = "create table test (path varchar(512) primary key," + "k1 boolean,k2 varchar(512),k3 integer,k4 long);"; Files.createDirectories(tmpDir.resolve("db")); Path dbDir = tmpDir.resolve("db/h2"); Path config = tmpDir.resolve("tika-config.xml"); String connectionString = "jdbc:h2:file:" + dbDir.toAbsolutePath(); writeConfig("/configs/tika-config-jdbc-emitter-existing-table.xml", connectionString, config); try (Connection connection = DriverManager.getConnection(connectionString)) { connection.createStatement().execute(createTable); } EmitterManager emitterManager = EmitterManager.load(config); Emitter emitter = emitterManager.getEmitter(); List<String[]> data = new ArrayList<>(); data.add(new String[]{"k1", "true", "k2", "some string1", "k3", "4", "k4", "100"}); data.add(new String[]{"k1", "false", "k2", "some string2", "k3", "5", "k4", "101"}); data.add(new String[]{"k1", "true", "k2", "some string3", "k3", "6", "k4", "102"}); int id = 0; for (String[] d : data) { emitter.emit("id" + id++, Collections.singletonList(m(d)), new ParseContext()); } try (Connection connection = DriverManager.getConnection(connectionString)) { try (Statement st = connection.createStatement()) { try (ResultSet rs = st.executeQuery("select * from test")) { int rows = 0; while (rs.next()) { assertEquals("id" + rows, rs.getString(1)); assertEquals(rows % 2 == 0, rs.getBoolean(2)); assertEquals("some string" + (rows + 1), rs.getString(3)); assertEquals(rows + 4, rs.getInt(4)); assertEquals(100 + rows, rs.getLong(5)); rows++; } } } } }
@Override public Set<UnloadDecision> findBundlesForUnloading(LoadManagerContext context, Map<String, Long> recentlyUnloadedBundles, Map<String, Long> recentlyUnloadedBrokers) { final var conf = context.brokerConfiguration(); decisionCache.clear(); stats.clear(); Map<String, BrokerLookupData> availableBrokers; try { availableBrokers = context.brokerRegistry().getAvailableBrokerLookupDataAsync() .get(context.brokerConfiguration().getMetadataStoreOperationTimeoutSeconds(), TimeUnit.SECONDS); } catch (ExecutionException | InterruptedException | TimeoutException e) { counter.update(Failure, Unknown); log.warn("Failed to fetch available brokers. Stop unloading.", e); return decisionCache; } try { final var loadStore = context.brokerLoadDataStore(); stats.setLoadDataStore(loadStore); boolean debugMode = ExtensibleLoadManagerImpl.debug(conf, log); var skipReason = stats.update( context.brokerLoadDataStore(), availableBrokers, recentlyUnloadedBrokers, conf); if (skipReason.isPresent()) { if (debugMode) { log.warn(CANNOT_CONTINUE_UNLOAD_MSG + " Skipped the load stat update. Reason:{}.", skipReason.get()); } counter.update(Skip, skipReason.get()); return decisionCache; } counter.updateLoadData(stats.avg, stats.std); if (debugMode) { log.info("brokers' load stats:{}", stats); } // skip metrics int numOfBrokersWithEmptyLoadData = 0; int numOfBrokersWithFewBundles = 0; final double targetStd = conf.getLoadBalancerBrokerLoadTargetStd(); boolean transfer = conf.isLoadBalancerTransferEnabled(); if (stats.std() > targetStd || isUnderLoaded(context, stats.peekMinBroker(), stats) || isOverLoaded(context, stats.peekMaxBroker(), stats.avg)) { unloadConditionHitCount++; } else { unloadConditionHitCount = 0; } if (unloadConditionHitCount <= conf.getLoadBalancerSheddingConditionHitCountThreshold()) { if (debugMode) { log.info(CANNOT_CONTINUE_UNLOAD_MSG + " Shedding condition hit count:{} is less than or equal to the threshold:{}.", unloadConditionHitCount, conf.getLoadBalancerSheddingConditionHitCountThreshold()); } counter.update(Skip, HitCount); return decisionCache; } while (true) { if (!stats.hasTransferableBrokers()) { if (debugMode) { log.info(CANNOT_CONTINUE_UNLOAD_MSG + " Exhausted target transfer brokers."); } break; } UnloadDecision.Reason reason; if (stats.std() > targetStd) { reason = Overloaded; } else if (isUnderLoaded(context, stats.peekMinBroker(), stats)) { reason = Underloaded; if (debugMode) { log.info(String.format("broker:%s is underloaded:%s although " + "load std:%.2f <= targetStd:%.2f. " + "Continuing unload for this underloaded broker.", stats.peekMinBroker(), context.brokerLoadDataStore().get(stats.peekMinBroker()).get(), stats.std(), targetStd)); } } else if (isOverLoaded(context, stats.peekMaxBroker(), stats.avg)) { reason = Overloaded; if (debugMode) { log.info(String.format("broker:%s is overloaded:%s although " + "load std:%.2f <= targetStd:%.2f. " + "Continuing unload for this overloaded broker.", stats.peekMaxBroker(), context.brokerLoadDataStore().get(stats.peekMaxBroker()).get(), stats.std(), targetStd)); } } else { if (debugMode) { log.info(CANNOT_CONTINUE_UNLOAD_MSG + "The overall cluster load meets the target, std:{} <= targetStd:{}." + "minBroker:{} is not underloaded. maxBroker:{} is not overloaded.", stats.std(), targetStd, stats.peekMinBroker(), stats.peekMaxBroker()); } break; } String maxBroker = stats.pollMaxBroker(); String minBroker = stats.peekMinBroker(); Optional<BrokerLoadData> maxBrokerLoadData = context.brokerLoadDataStore().get(maxBroker); Optional<BrokerLoadData> minBrokerLoadData = context.brokerLoadDataStore().get(minBroker); if (maxBrokerLoadData.isEmpty()) { log.error(String.format(CANNOT_UNLOAD_BROKER_MSG + " MaxBrokerLoadData is empty.", maxBroker)); numOfBrokersWithEmptyLoadData++; continue; } if (minBrokerLoadData.isEmpty()) { log.error("Can't transfer load to broker:{}. MinBrokerLoadData is empty.", minBroker); numOfBrokersWithEmptyLoadData++; continue; } double maxLoad = maxBrokerLoadData.get().getWeightedMaxEMA(); double minLoad = minBrokerLoadData.get().getWeightedMaxEMA(); double offload = (maxLoad - minLoad) / 2; BrokerLoadData brokerLoadData = maxBrokerLoadData.get(); double maxBrokerThroughput = brokerLoadData.getMsgThroughputIn() + brokerLoadData.getMsgThroughputOut(); double minBrokerThroughput = minBrokerLoadData.get().getMsgThroughputIn() + minBrokerLoadData.get().getMsgThroughputOut(); double offloadThroughput = maxBrokerThroughput * offload / maxLoad; if (debugMode) { log.info(String.format( "Attempting to shed load from broker:%s%s, which has the max resource " + "usage:%.2f%%, targetStd:%.2f," + " -- Trying to offload %.2f%%, %.2f KByte/s of traffic.", maxBroker, transfer ? " to broker:" + minBroker : "", maxLoad * 100, targetStd, offload * 100, offloadThroughput / KB )); } double trafficMarkedToOffload = 0; double trafficMarkedToGain = 0; Optional<TopBundlesLoadData> bundlesLoadData = context.topBundleLoadDataStore().get(maxBroker); if (bundlesLoadData.isEmpty() || bundlesLoadData.get().getTopBundlesLoadData().isEmpty()) { log.error(String.format(CANNOT_UNLOAD_BROKER_MSG + " TopBundlesLoadData is empty.", maxBroker)); numOfBrokersWithEmptyLoadData++; continue; } var maxBrokerTopBundlesLoadData = bundlesLoadData.get().getTopBundlesLoadData(); if (maxBrokerTopBundlesLoadData.size() == 1) { numOfBrokersWithFewBundles++; log.warn(String.format(CANNOT_UNLOAD_BROKER_MSG + " Sole namespace bundle:%s is overloading the broker. ", maxBroker, maxBrokerTopBundlesLoadData.iterator().next())); continue; } Optional<TopBundlesLoadData> minBundlesLoadData = context.topBundleLoadDataStore().get(minBroker); var minBrokerTopBundlesLoadDataIter = minBundlesLoadData.isPresent() ? minBundlesLoadData.get().getTopBundlesLoadData().iterator() : null; if (maxBrokerTopBundlesLoadData.isEmpty()) { numOfBrokersWithFewBundles++; log.warn(String.format(CANNOT_UNLOAD_BROKER_MSG + " Broker overloaded despite having no bundles", maxBroker)); continue; } int remainingTopBundles = maxBrokerTopBundlesLoadData.size(); for (var e : maxBrokerTopBundlesLoadData) { String bundle = e.bundleName(); if (channel != null && !channel.isOwner(bundle, maxBroker)) { if (debugMode) { log.warn(String.format(CANNOT_UNLOAD_BUNDLE_MSG + " MaxBroker:%s is not the owner.", bundle, maxBroker)); } continue; } if (recentlyUnloadedBundles.containsKey(bundle)) { if (debugMode) { log.info(String.format(CANNOT_UNLOAD_BUNDLE_MSG + " Bundle has been recently unloaded at ts:%d.", bundle, recentlyUnloadedBundles.get(bundle))); } continue; } if (!isTransferable(context, availableBrokers, bundle, maxBroker, Optional.of(minBroker))) { if (debugMode) { log.info(String.format(CANNOT_UNLOAD_BUNDLE_MSG + " This unload can't meet " + "affinity(isolation) or anti-affinity group policies.", bundle)); } continue; } if (remainingTopBundles <= 1) { if (debugMode) { log.info(String.format(CANNOT_UNLOAD_BUNDLE_MSG + " The remaining bundles in TopBundlesLoadData from the maxBroker:%s is" + " less than or equal to 1.", bundle, maxBroker)); } break; } var bundleData = e.stats(); double maxBrokerBundleThroughput = bundleData.msgThroughputIn + bundleData.msgThroughputOut; boolean swap = false; List<Unload> minToMaxUnloads = new ArrayList<>(); double minBrokerBundleSwapThroughput = 0.0; if (trafficMarkedToOffload - trafficMarkedToGain + maxBrokerBundleThroughput > offloadThroughput) { // see if we can swap bundles from min to max broker to balance better. if (transfer && minBrokerTopBundlesLoadDataIter != null) { var maxBrokerNewThroughput = maxBrokerThroughput - trafficMarkedToOffload + trafficMarkedToGain - maxBrokerBundleThroughput; var minBrokerNewThroughput = minBrokerThroughput + trafficMarkedToOffload - trafficMarkedToGain + maxBrokerBundleThroughput; while (minBrokerTopBundlesLoadDataIter.hasNext()) { var minBrokerBundleData = minBrokerTopBundlesLoadDataIter.next(); if (!isTransferable(context, availableBrokers, minBrokerBundleData.bundleName(), minBroker, Optional.of(maxBroker))) { continue; } var minBrokerBundleThroughput = minBrokerBundleData.stats().msgThroughputIn + minBrokerBundleData.stats().msgThroughputOut; var maxBrokerNewThroughputTmp = maxBrokerNewThroughput + minBrokerBundleThroughput; var minBrokerNewThroughputTmp = minBrokerNewThroughput - minBrokerBundleThroughput; if (maxBrokerNewThroughputTmp < maxBrokerThroughput && minBrokerNewThroughputTmp < maxBrokerThroughput) { minToMaxUnloads.add(new Unload(minBroker, minBrokerBundleData.bundleName(), Optional.of(maxBroker))); maxBrokerNewThroughput = maxBrokerNewThroughputTmp; minBrokerNewThroughput = minBrokerNewThroughputTmp; minBrokerBundleSwapThroughput += minBrokerBundleThroughput; if (minBrokerNewThroughput <= maxBrokerNewThroughput && maxBrokerNewThroughput < maxBrokerThroughput * 0.75) { swap = true; break; } } } } if (!swap) { if (debugMode) { log.info(String.format(CANNOT_UNLOAD_BUNDLE_MSG + " The traffic to unload:%.2f - gain:%.2f = %.2f KByte/s is " + "greater than the target :%.2f KByte/s.", bundle, (trafficMarkedToOffload + maxBrokerBundleThroughput) / KB, trafficMarkedToGain / KB, (trafficMarkedToOffload - trafficMarkedToGain + maxBrokerBundleThroughput) / KB, offloadThroughput / KB)); } break; } } Unload unload; if (transfer) { if (swap) { minToMaxUnloads.forEach(minToMaxUnload -> { if (debugMode) { log.info("Decided to gain bundle:{} from min broker:{}", minToMaxUnload.serviceUnit(), minToMaxUnload.sourceBroker()); } var decision = new UnloadDecision(); decision.setUnload(minToMaxUnload); decision.succeed(reason); decisionCache.add(decision); }); if (debugMode) { log.info(String.format( "Total traffic %.2f KByte/s to transfer from min broker:%s to max broker:%s.", minBrokerBundleSwapThroughput / KB, minBroker, maxBroker)); trafficMarkedToGain += minBrokerBundleSwapThroughput; } } unload = new Unload(maxBroker, bundle, Optional.of(minBroker)); } else { unload = new Unload(maxBroker, bundle); } var decision = new UnloadDecision(); decision.setUnload(unload); decision.succeed(reason); decisionCache.add(decision); trafficMarkedToOffload += maxBrokerBundleThroughput; remainingTopBundles--; if (debugMode) { log.info(String.format("Decided to unload bundle:%s, throughput:%.2f KByte/s." + " The traffic marked to unload:%.2f - gain:%.2f = %.2f KByte/s." + " Target:%.2f KByte/s.", bundle, maxBrokerBundleThroughput / KB, trafficMarkedToOffload / KB, trafficMarkedToGain / KB, (trafficMarkedToOffload - trafficMarkedToGain) / KB, offloadThroughput / KB)); } } if (trafficMarkedToOffload > 0) { var adjustedOffload = (trafficMarkedToOffload - trafficMarkedToGain) * maxLoad / maxBrokerThroughput; stats.offload(maxLoad, minLoad, adjustedOffload); if (debugMode) { log.info( String.format("brokers' load stats:%s, after offload{max:%.2f, min:%.2f, offload:%.2f}", stats, maxLoad, minLoad, adjustedOffload)); } } else { numOfBrokersWithFewBundles++; log.warn(String.format(CANNOT_UNLOAD_BROKER_MSG + " There is no bundle that can be unloaded in top bundles load data. " + "Consider splitting bundles owned by the broker " + "to make each bundle serve less traffic " + "or increasing loadBalancerMaxNumberOfBundlesInBundleLoadReport" + " to report more bundles in the top bundles load data.", maxBroker)); } } // while end if (debugMode) { log.info("decisionCache:{}", decisionCache); } if (decisionCache.isEmpty()) { UnloadDecision.Reason reason; if (numOfBrokersWithEmptyLoadData > 0) { reason = NoLoadData; } else if (numOfBrokersWithFewBundles > 0) { reason = NoBundles; } else { reason = HitCount; } counter.update(Skip, reason); } else { unloadConditionHitCount = 0; } } catch (Throwable e) { log.error("Failed to process unloading. ", e); this.counter.update(Failure, Unknown); } return decisionCache; }
@Test public void testRemainingTopBundles() { UnloadCounter counter = new UnloadCounter(); TransferShedder transferShedder = new TransferShedder(counter); var ctx = setupContext(); var topBundlesLoadDataStore = ctx.topBundleLoadDataStore(); topBundlesLoadDataStore.pushAsync("broker5:8080", getTopBundlesLoad("my-tenant/my-namespaceE", 2000000, 3000000)); var res = transferShedder.findBundlesForUnloading(ctx, Map.of(), Map.of()); var expected = new HashSet<UnloadDecision>(); expected.add(new UnloadDecision(new Unload("broker5:8080", bundleE1, Optional.of("broker1:8080")), Success, Overloaded)); expected.add(new UnloadDecision(new Unload("broker4:8080", bundleD1, Optional.of("broker2:8080")), Success, Overloaded)); assertEquals(res, expected); assertEquals(counter.getLoadAvg(), setupLoadAvg); assertEquals(counter.getLoadStd(), setupLoadStd); }
public boolean shouldAssignWork(PluginDescriptor plugin, AgentMetadata agent, String environment, Map<String, String> configuration, Map<String, String> clusterProfileProperties, JobIdentifier identifier) { LOGGER.debug("Processing should assign work for plugin: {} with agent: {} with environment: {} with configuration: {} in cluster: {}", plugin.id(), agent, environment, configuration, clusterProfileProperties); boolean result = extension.shouldAssignWork(plugin.id(), agent, environment, configuration, clusterProfileProperties, identifier); LOGGER.debug("Done processing should assign work (result: {}) for plugin: {} with agent: {} with environment: {} with configuration {} in cluster: {}", result, plugin.id(), agent, environment, configuration, clusterProfileProperties); return result; }
@Test public void shouldTalkToExtensionToExecuteShouldAssignWorkCall() { final String environment = "test-env"; final JobIdentifier jobIdentifier = new JobIdentifier(); final Map<String, String> configuration = Map.of("Image", "alpine:latest"); final Map<String, String> clusterProfileProperties = Map.of("GoServerURL", "foo"); final AgentMetadata agentMetadata = new AgentMetadata("som-id", "Idle", "Idle", "Enabled"); elasticAgentPluginRegistry.shouldAssignWork(pluginDescriptor, agentMetadata, environment, configuration, clusterProfileProperties, jobIdentifier); verify(elasticAgentExtension, times(1)).shouldAssignWork(PLUGIN_ID, agentMetadata, environment, configuration, clusterProfileProperties, jobIdentifier); verifyNoMoreInteractions(elasticAgentExtension); }
@Override public RequestHandlerSpec requestHandlerSpec() { return RequestHandlerSpec.builder() .withAclMapping(requestView -> getAclMapping(requestView.method(), requestView.uri())) .build(); }
@Test void resolves_correct_acl_action() { AclMapping.Action customAclAction = AclMapping.Action.custom("custom-action"); RestApi restApi = RestApi.builder() .addRoute(route("/api1") .get(ctx -> new MessageResponse(ctx.aclAction().name()), handlerConfig().withCustomAclAction(customAclAction))) .addRoute(route("/api2") .post(ctx -> new MessageResponse(ctx.aclAction().name()))) .build(); verifyJsonResponse(restApi, Method.GET, "/api1", null, 200, "{\"message\":\"custom-action\"}"); verifyJsonResponse(restApi, Method.POST, "/api2", "ignored", 200, "{\"message\":\"write\"}"); RequestHandlerSpec spec = restApi.requestHandlerSpec(); assertRequestHandlerSpecAclMapping(spec, customAclAction, Method.GET, "/api1"); assertRequestHandlerSpecAclMapping(spec, AclMapping.Action.WRITE, Method.POST, "/api2"); }
public static String decode(String s) { final int n = s.length(); StringBuilder result = new StringBuilder(n); for (int i = 0; i < n; i++) { char c = s.charAt(i); if (c == '%') { int numCharsConsumed = decodeConsecutiveOctets(result, s, i); i += numCharsConsumed - 1; } else { result.append(c); } } return result.toString(); }
@Test(dataProvider = "invalidEncodedText") public void testDecodeInvalidStrings(String encoded, String expectedErrorMessage) { IllegalArgumentException exception = null; try { URIDecoderUtils.decode(encoded); } catch (IllegalArgumentException e) { exception = e; } Assert.assertNotNull(exception, "Expected exception when decoding string \"" + encoded + "\"."); Assert.assertEquals(exception.getMessage(), expectedErrorMessage, "Unexpected error message during decoding."); }
@Override public String getWelcomeMessage(User user) { if (UserGroup.isPaid(user)) { return "You're amazing " + user + ". Thanks for paying for this awesome software."; } return "I suppose you can use this software."; }
@Test void testGetWelcomeMessageForFreeUser() { final var welcomeMessage = service.getWelcomeMessage(freeUser); final var expected = "I suppose you can use this software."; assertEquals(expected, welcomeMessage); }
public static Map<String, String> getKiePMMLTreeModelSourcesMap(final DroolsCompilationDTO<TreeModel> compilationDTO) { logger.trace("getKiePMMLTreeModelSourcesMap {} {} {}", compilationDTO.getFields(), compilationDTO.getModel(), compilationDTO.getPackageName()); CompilationUnit cloneCU = getKiePMMLModelCompilationUnit(compilationDTO, KIE_PMML_TREE_MODEL_TEMPLATE_JAVA, KIE_PMML_TREE_MODEL_TEMPLATE); String className = compilationDTO.getSimpleClassName(); ClassOrInterfaceDeclaration modelTemplate = cloneCU.getClassByName(className) .orElseThrow(() -> new KiePMMLException(MAIN_CLASS_NOT_FOUND + ": " + className)); setConstructor(compilationDTO, modelTemplate); Map<String, String> toReturn = new HashMap<>(); String fullClassName = compilationDTO.getPackageCanonicalClassName(); toReturn.put(fullClassName, cloneCU.toString()); return toReturn; }
@Test void getKiePMMLScorecardModelSourcesMap() { final Map<String, KiePMMLOriginalTypeGeneratedType> fieldTypeMap = getFieldTypeMap(pmml.getDataDictionary(), pmml.getTransformationDictionary(), treeModel.getLocalTransformations()); final CommonCompilationDTO<TreeModel> compilationDTO = CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME, pmml, treeModel, new PMMLCompilationContextMock(), "FILENAME"); final DroolsCompilationDTO<TreeModel> droolsCompilationDTO = DroolsCompilationDTO.fromCompilationDTO(compilationDTO, fieldTypeMap); Map<String, String> retrieved = KiePMMLTreeModelFactory.getKiePMMLTreeModelSourcesMap(droolsCompilationDTO); assertThat(retrieved).isNotNull(); assertThat(retrieved).hasSize(1); }
public TopicList getTopicsByCluster(final String cluster, final long timeoutMillis) throws RemotingException, MQClientException, InterruptedException { GetTopicsByClusterRequestHeader requestHeader = new GetTopicsByClusterRequestHeader(); requestHeader.setCluster(cluster); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.GET_TOPICS_BY_CLUSTER, requestHeader); RemotingCommand response = this.remotingClient.invokeSync(null, request, timeoutMillis); assert response != null; switch (response.getCode()) { case ResponseCode.SUCCESS: { byte[] body = response.getBody(); if (body != null) { TopicList topicList = TopicList.decode(body, TopicList.class); return topicList; } } default: break; } throw new MQClientException(response.getCode(), response.getRemark()); }
@Test public void assertGetTopicsByCluster() throws RemotingException, InterruptedException, MQClientException { mockInvokeSync(); TopicList responseBody = new TopicList(); responseBody.setBrokerAddr(defaultBrokerAddr); responseBody.setTopicList(Collections.singleton(defaultTopic)); setResponseBody(responseBody); TopicList actual = mqClientAPI.getTopicsByCluster(clusterName, defaultTimeout); assertNotNull(actual); assertEquals(defaultBrokerAddr, actual.getBrokerAddr()); assertEquals(1, actual.getTopicList().size()); assertTrue(actual.getTopicList().contains(defaultTopic)); }
public static void mergeParams( Map<String, ParamDefinition> params, Map<String, ParamDefinition> paramsToMerge, MergeContext context) { if (paramsToMerge == null) { return; } Stream.concat(params.keySet().stream(), paramsToMerge.keySet().stream()) .forEach( name -> { ParamDefinition paramToMerge = paramsToMerge.get(name); if (paramToMerge == null) { return; } if (paramToMerge.getType() == ParamType.MAP && paramToMerge.isLiteral()) { Map<String, ParamDefinition> baseMap = mapValueOrEmpty(params, name); Map<String, ParamDefinition> toMergeMap = mapValueOrEmpty(paramsToMerge, name); mergeParams( baseMap, toMergeMap, MergeContext.copyWithParentMode( context, params.getOrDefault(name, paramToMerge).getMode())); params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, baseMap)); } else if (paramToMerge.getType() == ParamType.STRING_MAP && paramToMerge.isLiteral()) { Map<String, String> baseMap = stringMapValueOrEmpty(params, name); Map<String, String> toMergeMap = stringMapValueOrEmpty(paramsToMerge, name); baseMap.putAll(toMergeMap); params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, baseMap)); } else { params.put( name, buildMergedParamDefinition( name, paramToMerge, params.get(name), context, paramToMerge.getValue())); } }); }
@Test public void testMergeAllowImmutableNewRun() throws JsonProcessingException { ParamMode mode = ParamMode.MUTABLE_ON_START; Map<String, ParamDefinition> allParams = parseParamDefMap( String.format("{'tomerge': {'type': 'STRING','value': 'hello', 'mode': '%s'}}", mode)); Map<String, ParamDefinition> paramsToMerge = parseParamDefMap("{'tomerge': {'type': 'STRING', 'value': 'goodbye'}}"); ParamsMergeHelper.mergeParams( allParams, paramsToMerge, new ParamsMergeHelper.MergeContext(ParamSource.LAUNCH, false, false, false)); }
@Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Credentials Credentials = (Credentials) o; return Objects.equals(username, Credentials.username) && Objects.equals(password, Credentials.password); }
@Test public void testEquals() { String username = "Prometheus"; String password = "secret"; Credentials credentials1 = new Credentials(username, password); Credentials credentials2 = new Credentials(username, password); assertTrue(credentials1.equals(credentials2)); }
public KsqlTarget target(final URI server) { return target(server, Collections.emptyMap()); }
@Test public void shouldRequestServerHealthcheck() { // Given: Map<String, HealthCheckResponseDetail> map = new HashMap<>(); map.put("foo", new HealthCheckResponseDetail(true)); HealthCheckResponse healthCheckResponse = new HealthCheckResponse(true, map, Optional.empty()); server.setResponseObject(healthCheckResponse); // When: KsqlTarget target = ksqlClient.target(serverUri); RestResponse<HealthCheckResponse> response = target.getServerHealth(); // Then: assertThat(server.getHttpMethod(), is(HttpMethod.GET)); assertThat(server.getBody(), nullValue()); assertThat(server.getPath(), is("/healthcheck")); assertThat(server.getHeaders().get("Accept"), is("application/json")); assertThat(response.get(), is(healthCheckResponse)); }
public static boolean isImage( final InputStream stream ) { try { // This attempts to read the bytes as an image, returning null if it cannot parse the bytes as an image. return null != ImageIO.read( stream ); } catch ( IOException e ) { Log.debug( "An exception occurred while determining if data represents an image.", e ); return false; } }
@Test public void testIsImageWithImage() throws Exception { // Setup test fixture. final InputStream input = getClass().getResourceAsStream( "/check.gif" ); // Execute system under test. final boolean result = GraphicsUtils.isImage( input ); // Verify result. assertTrue( result ); }
public void createOrUpdateTableUsingBqManifestFile(String tableName, String bqManifestFileUri, String sourceUriPrefix, Schema schema) { try { String withClauses = String.format("( %s )", BigQuerySchemaResolver.schemaToSqlString(schema)); String extraOptions = "enable_list_inference=true,"; if (!StringUtils.isNullOrEmpty(sourceUriPrefix)) { withClauses += " WITH PARTITION COLUMNS"; extraOptions += String.format(" hive_partition_uri_prefix=\"%s\", require_hive_partition_filter=%s,", sourceUriPrefix, requirePartitionFilter); } if (!StringUtils.isNullOrEmpty(bigLakeConnectionId)) { withClauses += String.format(" WITH CONNECTION `%s`", bigLakeConnectionId); } String query = String.format( "CREATE OR REPLACE EXTERNAL TABLE `%s.%s.%s` %s OPTIONS (%s " + "uris=[\"%s\"], format=\"PARQUET\", file_set_spec_type=\"NEW_LINE_DELIMITED_MANIFEST\")", projectId, datasetName, tableName, withClauses, extraOptions, bqManifestFileUri); QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query) .setUseLegacySql(false) .build(); JobId jobId = JobId.newBuilder().setProject(projectId).setRandomJob().build(); Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build()); queryJob = queryJob.waitFor(); if (queryJob == null) { LOG.error("Job for table creation no longer exists"); } else if (queryJob.getStatus().getError() != null) { LOG.error("Job for table creation failed: {}", queryJob.getStatus().getError().toString()); } else { LOG.info("External table created using manifest file."); } } catch (InterruptedException | BigQueryException e) { throw new HoodieBigQuerySyncException("Failed to create external table using manifest file. ", e); } }
@Test void createTableWithManifestFile_nonPartitioned() throws Exception { BigQuerySyncConfig config = new BigQuerySyncConfig(properties); client = new HoodieBigQuerySyncClient(config, mockBigQuery); Schema schema = Schema.of(Field.of("field", StandardSQLTypeName.STRING)); ArgumentCaptor<JobInfo> jobInfoCaptor = ArgumentCaptor.forClass(JobInfo.class); Job mockJob = mock(Job.class); when(mockBigQuery.create(jobInfoCaptor.capture())).thenReturn(mockJob); Job mockJobFinished = mock(Job.class); when(mockJob.waitFor()).thenReturn(mockJobFinished); JobStatus mockJobStatus = mock(JobStatus.class); when(mockJobFinished.getStatus()).thenReturn(mockJobStatus); when(mockJobStatus.getError()).thenReturn(null); client.createOrUpdateTableUsingBqManifestFile(TEST_TABLE, MANIFEST_FILE_URI, "", schema); QueryJobConfiguration configuration = jobInfoCaptor.getValue().getConfiguration(); assertEquals(configuration.getQuery(), String.format("CREATE OR REPLACE EXTERNAL TABLE `%s.%s.%s` ( `field` STRING ) OPTIONS (enable_list_inference=true, uris=[\"%s\"], format=\"PARQUET\", " + "file_set_spec_type=\"NEW_LINE_DELIMITED_MANIFEST\")", PROJECT_ID, TEST_DATASET, TEST_TABLE, MANIFEST_FILE_URI)); }
public boolean hasOobLog(String secretString) { // making a blocking call to get result Optional<PollingResult> result = sendPollingRequest(secretString); if (result.isPresent()) { // In the future we may refactor hasOobLog() to return finer grained info about what kind // of oob is logged return result.get().getHasDnsInteraction() || result.get().getHasHttpInteraction(); } else { // we may choose to retry sendPollingRequest() if oob interactions do arrive late. return false; } }
@Test public void isVulnerable_requestFailed_returnsFalse() { client = new TcsClient(VALID_DOMAIN, VALID_PORT, "http://unknownhost/path", httpClient); boolean detectionResult = client.hasOobLog(SECRET); assertThat(detectionResult).isFalse(); }
@Override public String toString() { return (value == null) ? "(null)" : '"' + getValue() + '"'; }
@Test void testEmpty() { assertEquals("(null)", new ModelNode().toString()); }
@Override public void recordLoadSuccess(long loadTime) { loadSuccess.update(loadTime, TimeUnit.NANOSECONDS); totalLoadTime.add(loadTime); }
@Test public void loadSuccess() { stats.recordLoadSuccess(256); assertThat(registry.timer(PREFIX + ".loads-success").getCount()).isEqualTo(1); }
@Override protected void doUpdate(final List<RuleData> dataList) { dataList.forEach(pluginDataSubscriber::onRuleSubscribe); }
@Test public void testDoUpdate() { List<RuleData> ruleDataList = createFakeRuleDateObjects(4); ruleDataHandler.doUpdate(ruleDataList); ruleDataList.forEach(verify(subscriber)::onRuleSubscribe); }
public static void negate(Slice decimal) { setNegative(decimal, !isNegative(decimal)); }
@Test public void testNegate() { assertEquals(negate(negate(MIN_DECIMAL)), MIN_DECIMAL); assertEquals(negate(MIN_DECIMAL), MAX_DECIMAL); assertEquals(negate(MIN_DECIMAL), MAX_DECIMAL); assertEquals(negate(unscaledDecimal(1)), unscaledDecimal(-1)); assertEquals(negate(unscaledDecimal(-1)), unscaledDecimal(1)); assertEquals(negate(negate(unscaledDecimal(0))), unscaledDecimal(0)); }
@Override public Path<V, E> toImmutable() { return new DefaultPath<>(edges, cost); }
@Test public void toImmutable() { MutablePath<TestVertex, TestEdge> p = new DefaultMutablePath<>(); p.appendEdge(new TestEdge(A, B)); p.appendEdge(new TestEdge(B, C)); validatePath(p, A, C, 2); assertEquals("immutables should equal", p.toImmutable(), p.toImmutable()); validatePath(p.toImmutable(), A, C, 2); }
@VisibleForTesting Map<ExecutionVertexID, Collection<ExecutionAttemptID>> findSlowTasks( final ExecutionGraph executionGraph) { final long currentTimeMillis = System.currentTimeMillis(); final Map<ExecutionVertexID, Collection<ExecutionAttemptID>> slowTasks = new HashMap<>(); final List<ExecutionJobVertex> jobVerticesToCheck = getJobVerticesToCheck(executionGraph); for (ExecutionJobVertex ejv : jobVerticesToCheck) { final ExecutionTimeWithInputBytes baseline = getBaseline(ejv, currentTimeMillis); for (ExecutionVertex ev : ejv.getTaskVertices()) { if (ev.getExecutionState().isTerminal()) { continue; } final List<ExecutionAttemptID> slowExecutions = findExecutionsExceedingBaseline( ev.getCurrentExecutions(), baseline, currentTimeMillis); if (!slowExecutions.isEmpty()) { slowTasks.put(ev.getID(), slowExecutions); } } } return slowTasks; }
@Test void testNoFinishedTaskButRatioIsZero() throws Exception { final int parallelism = 3; final JobVertex jobVertex = createNoOpVertex(parallelism); final ExecutionGraph executionGraph = createExecutionGraph(jobVertex); final ExecutionTimeBasedSlowTaskDetector slowTaskDetector = createSlowTaskDetector(0, 1, 0); final Map<ExecutionVertexID, Collection<ExecutionAttemptID>> slowTasks = slowTaskDetector.findSlowTasks(executionGraph); assertThat(slowTasks).hasSize(parallelism); }
@Override public int run(String[] args) throws Exception { if (args.length != 2) { return usage(args); } String action = args[0]; String name = args[1]; int result; if (A_LOAD.equals(action)) { result = loadClass(name); } else if (A_CREATE.equals(action)) { //first load to separate load errors from create result = loadClass(name); if (result == SUCCESS) { //class loads, so instantiate it result = createClassInstance(name); } } else if (A_RESOURCE.equals(action)) { result = loadResource(name); } else if (A_PRINTRESOURCE.equals(action)) { result = dumpResource(name); } else { result = usage(args); } return result; }
@Test public void testLoadFailsNoSuchClass() throws Throwable { run(FindClass.E_NOT_FOUND, FindClass.A_LOAD, "org.apache.hadoop.util.ThereIsNoSuchClass"); }
@Override protected void write(final PostgreSQLPacketPayload payload) { payload.writeInt2(data.size()); for (Object each : data) { if (each instanceof BinaryCell) { writeBinaryValue(payload, (BinaryCell) each); } else { writeTextValue(payload, each); } } }
@Test void assertWriteWithString() { PostgreSQLDataRowPacket actual = new PostgreSQLDataRowPacket(Collections.singleton("value")); assertThat(actual.getData(), is(Collections.singleton("value"))); actual.write(payload); byte[] valueBytes = "value".getBytes(StandardCharsets.UTF_8); verify(payload).writeInt4(valueBytes.length); verify(payload).writeBytes(valueBytes); }
public static byte[] compress(byte[] bytes) { if (bytes == null) { throw new NullPointerException("bytes is null"); } int lenght = 0; Deflater deflater = new Deflater(); deflater.setInput(bytes); deflater.finish(); byte[] outputBytes = new byte[BUFFER_SIZE]; try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) { while (!deflater.finished()) { lenght = deflater.deflate(outputBytes); bos.write(outputBytes, 0, lenght); } deflater.end(); return bos.toByteArray(); } catch (IOException e) { throw new RuntimeException("Deflater compress error", e); } }
@Test public void test_compress() { Assertions.assertThrows(NullPointerException.class, () -> { DeflaterUtil.compress(null); }); }
@Override public Response createNewReservation(HttpServletRequest hsr) throws AuthorizationException, IOException, InterruptedException { long startTime = clock.getTime(); try { Map<SubClusterId, SubClusterInfo> subClustersActive = federationFacade.getSubClusters(true); // We declare blackList and retries. List<SubClusterId> blackList = new ArrayList<>(); int actualRetryNums = federationFacade.getRetryNumbers(numSubmitRetries); Response response = ((FederationActionRetry<Response>) (retryCount) -> invokeCreateNewReservation(subClustersActive, blackList, hsr, retryCount)). runWithRetries(actualRetryNums, submitIntervalTime); // If the response is not empty and the status is SC_OK, // this request can be returned directly. if (response != null && response.getStatus() == HttpServletResponse.SC_OK) { long stopTime = clock.getTime(); RouterAuditLogger.logSuccess(getUser().getShortUserName(), GET_NEW_RESERVATION, TARGET_WEB_SERVICE); routerMetrics.succeededGetNewReservationRetrieved(stopTime - startTime); return response; } } catch (FederationPolicyException e) { // If a FederationPolicyException is thrown, the service is unavailable. routerMetrics.incrGetNewReservationFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), GET_NEW_RESERVATION, UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage()); return Response.status(Status.SERVICE_UNAVAILABLE).entity(e.getLocalizedMessage()).build(); } catch (Exception e) { routerMetrics.incrGetNewReservationFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), GET_NEW_RESERVATION, UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage()); return Response.status(Status.INTERNAL_SERVER_ERROR).entity(e.getLocalizedMessage()).build(); } // return error message directly. String errMsg = "Fail to create a new reservation."; LOG.error(errMsg); routerMetrics.incrGetNewReservationFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), GET_NEW_RESERVATION, UNKNOWN, TARGET_WEB_SERVICE, errMsg); return Response.status(Status.INTERNAL_SERVER_ERROR).entity(errMsg).build(); }
@Test public void testCreateNewReservation() throws Exception { Response response = interceptor.createNewReservation(null); Assert.assertNotNull(response); Object entity = response.getEntity(); Assert.assertNotNull(entity); Assert.assertTrue(entity instanceof NewReservation); NewReservation newReservation = (NewReservation) entity; Assert.assertNotNull(newReservation); Assert.assertTrue(newReservation.getReservationId().contains("reservation")); }
public BoardUpdateResult update( final String title, final String content, final List<MultipartFile> imageFiles, final List<Long> deletedImageIds, final ImageConverter imageConverter ) { post.update(title, content); List<Image> addedImages = imageConverter.convertImageFilesToImages(imageFiles); List<Image> deletedImages = imageConverter.convertImageIdsToImages(deletedImageIds, this.images); this.images.addAll(addedImages); this.images.removeAll(deletedImages); return new BoardUpdateResult(addedImages, deletedImages); }
@Test void 게시글을_업데이트한다() { // given Board board = 게시글_생성_사진없음(); // when BoardUpdateResult result = board.update("수정", "수정", new ArrayList<>(), new ArrayList<>(), new ImageConverterImpl()); // then assertSoftly(softly -> { softly.assertThat(result.addedImages()).isEmpty(); softly.assertThat(result.deletedImages()).isEmpty(); softly.assertThat(board.getPost().getTitle()).isEqualTo("수정"); softly.assertThat(board.getPost().getContent()).isEqualTo("수정"); }); }
@Override /** * {@inheritDoc} Handles the bundle's completion report. Parses the monitoringInfos in the * response, then updates the MetricsRegistry. */ public void onCompleted(BeamFnApi.ProcessBundleResponse response) { response.getMonitoringInfosList().stream() .filter(monitoringInfo -> !monitoringInfo.getPayload().isEmpty()) .map(this::parseAndUpdateMetric) .distinct() .forEach(samzaMetricsContainer::updateMetrics); }
@Test public void testGauge() { // TimeStamp = 0, Value = 123 byte[] payload = "\000\173".getBytes(Charset.defaultCharset()); MetricsApi.MonitoringInfo monitoringInfo = MetricsApi.MonitoringInfo.newBuilder() .setType(LATEST_INT64_TYPE) .setPayload(ByteString.copyFrom(payload)) .putLabels(MonitoringInfoConstants.Labels.NAMESPACE, EXPECTED_NAMESPACE) .putLabels(MonitoringInfoConstants.Labels.NAME, EXPECTED_COUNTER_NAME) .build(); BeamFnApi.ProcessBundleResponse response = BeamFnApi.ProcessBundleResponse.newBuilder().addMonitoringInfos(monitoringInfo).build(); // Execute samzaMetricsBundleProgressHandler.onCompleted(response); // Verify MetricName metricName = MetricName.named(EXPECTED_NAMESPACE, EXPECTED_COUNTER_NAME); GaugeCell gauge = (GaugeCell) samzaMetricsContainer.getContainer(stepName).getGauge(metricName); assertEquals(123L, gauge.getCumulative().value()); assertTrue( gauge.getCumulative().timestamp().isBefore(Instant.now().plus(Duration.millis(500)))); assertTrue( gauge.getCumulative().timestamp().isAfter(Instant.now().minus(Duration.millis(500)))); }
@Override public void removeListener(int listenerId) { removeListener(listenerId, "__keyevent@*:hexpired"); super.removeListener(listenerId); }
@Test public void testRemoveListener() { testWithParams(redisson -> { RMapCacheNative<Long, String> rMapCache = redisson.getMapCacheNative("test"); AtomicBoolean removed = new AtomicBoolean(); rMapCache.addListener(new MapRemoveListener() { @Override public void onRemove(String name) { removed.set(true); } }); rMapCache.put(1L, "1"); rMapCache.remove(1L); Awaitility.await().atMost(Duration.ofSeconds(5)).untilTrue(removed); }, NOTIFY_KEYSPACE_EVENTS, "Eh"); }
@Override public void applyFlowRules(FlowRule... flowRules) { checkPermission(FLOWRULE_WRITE); apply(buildFlowRuleOperations(true, null, flowRules)); }
@Test public void fallbackFlowRemoved() { FlowRule f1 = flowRule(FOO_DID, 1, 1); mgr.applyFlowRules(f1); flowRules.clear(); providerService.flowRemoved(new DefaultFlowEntry(f1)); assertTrue("flow rule not reapplied", flowRules.contains(f1)); }
@Override public void process(Exchange exchange) throws Exception { @SuppressWarnings("unchecked") Map<String, Object> body = exchange.getIn().getBody(Map.class); LdapOperation operation = endpoint.getOperation(); if (null == operation) { throw new UnsupportedOperationException("LDAP operation must not be empty, but you provided an empty operation"); } LdapTemplate ldapTemplate = endpoint.getLdapTemplate(); String dn = (String) body.get(DN); boolean dnSetOnLdapTemplate = false; ContextSource contextSource = ldapTemplate.getContextSource(); if (contextSource instanceof BaseLdapPathContextSource) { if (ObjectHelper.isNotEmpty(((BaseLdapPathContextSource) contextSource).getBaseLdapPathAsString())) { dn = ""; // DN already set on the ldapTemplate dnSetOnLdapTemplate = true; } } if (operation != LdapOperation.FUNCTION_DRIVEN && ObjectHelper.isEmpty(dn) && !dnSetOnLdapTemplate) { throw new UnsupportedOperationException("DN must not be empty, but you provided an empty DN"); } switch (operation) { case SEARCH: String filter = (String) body.get(FILTER); exchange.getIn().setBody(ldapTemplate.search(dn, filter, endpoint.scopeValue(), mapper)); break; case BIND: Attributes attributes = (Attributes) body.get(ATTRIBUTES); ldapTemplate.bind(dn, null, attributes); break; case UNBIND: ldapTemplate.unbind(dn); break; case AUTHENTICATE: ldapTemplate.authenticate(LdapQueryBuilder.query().base(dn).filter((String) body.get(FILTER)), (String) body.get(PASSWORD)); break; case MODIFY_ATTRIBUTES: ModificationItem[] modificationItems = (ModificationItem[]) body.get(MODIFICATION_ITEMS); ldapTemplate.modifyAttributes(dn, modificationItems); break; case FUNCTION_DRIVEN: BiFunction<LdapOperations, Object, ?> ldapOperationFunction = (BiFunction<LdapOperations, Object, ?>) body.get(FUNCTION); Object ldapOperationRequest = body.get(REQUEST); exchange.getIn().setBody(ldapOperationFunction.apply(ldapTemplate, ldapOperationRequest)); break; default: throw new UnsupportedOperationException( "Bug in the Spring-LDAP component. Despite of all assertions, you managed to call an unsupported operation '" + operation + "'"); } }
@Test public void testWrongBodyType() throws Exception { Exchange exchange = new DefaultExchange(context); Message in = new DefaultMessage(context); in.setBody(""); exchange.setIn(in); assertThrows(UnsupportedOperationException.class, () -> ldapProducer.process(exchange)); }
public static Node build(final List<JoinInfo> joins) { Node root = null; for (final JoinInfo join : joins) { if (root == null) { root = new Leaf(join.getLeftSource()); } if (root.containsSource(join.getRightSource()) && root.containsSource(join.getLeftSource())) { throw new KsqlException("Cannot perform circular join - both " + join.getRightSource() + " and " + join.getLeftJoinExpression() + " are already included in the current join tree: " + root.debugString(0)); } else if (root.containsSource(join.getLeftSource())) { root = new Join(root, new Leaf(join.getRightSource()), join); } else if (root.containsSource(join.getRightSource())) { root = new Join(root, new Leaf(join.getLeftSource()), join.flip()); } else { throw new KsqlException( "Cannot build JOIN tree; neither source in the join is the FROM source or included " + "in a previous JOIN: " + join + ". The current join tree is " + root.debugString(0) ); } } return root; }
@Test public void shouldThrowOnSelfJoin() { // Given: when(j1.getLeftSource()).thenReturn(a); when(j1.getRightSource()).thenReturn(a); final List<JoinInfo> joins = ImmutableList.of(j1); // When: final KsqlException e = assertThrows(KsqlException.class, () -> JoinTree.build(joins)); // Then: assertThat(e.getMessage(), containsString("Cannot perform circular join")); }
public static Object getNestedFieldVal(GenericRecord record, String fieldName, boolean returnNullIfNotFound, boolean consistentLogicalTimestampEnabled) { String[] parts = fieldName.split("\\."); GenericRecord valueNode = record; for (int i = 0; i < parts.length; i++) { String part = parts[i]; Object val; try { val = HoodieAvroUtils.getFieldVal(valueNode, part, returnNullIfNotFound); } catch (AvroRuntimeException e) { if (returnNullIfNotFound) { return null; } else { throw new HoodieException( fieldName + "(Part -" + parts[i] + ") field not found in record. Acceptable fields were :" + valueNode.getSchema().getFields().stream().map(Field::name).collect(Collectors.toList())); } } if (i == parts.length - 1) { // return, if last part of name if (val == null) { return null; } else { Schema fieldSchema = valueNode.getSchema().getField(part).schema(); return convertValueForSpecificDataTypes(fieldSchema, val, consistentLogicalTimestampEnabled); } } else { if (!(val instanceof GenericRecord)) { if (returnNullIfNotFound) { return null; } else { throw new HoodieException("Cannot find a record at part value :" + part); } } else { valueNode = (GenericRecord) val; } } } // This can only be reached if the length of parts is 0 if (returnNullIfNotFound) { return null; } else { throw new HoodieException( fieldName + " field not found in record. Acceptable fields were :" + valueNode.getSchema().getFields().stream().map(Field::name).collect(Collectors.toList())); } }
@Test public void testGetNestedFieldVal() { GenericRecord rec = new GenericData.Record(new Schema.Parser().parse(EXAMPLE_SCHEMA)); rec.put("_row_key", "key1"); rec.put("non_pii_col", "val1"); rec.put("pii_col", "val2"); Object rowKey = HoodieAvroUtils.getNestedFieldVal(rec, "_row_key", true, false); assertEquals("key1", rowKey); Object rowKeyNotExist = HoodieAvroUtils.getNestedFieldVal(rec, "fake_key", true, false); assertNull(rowKeyNotExist); // Field does not exist assertEquals("fake_key(Part -fake_key) field not found in record. Acceptable fields were :[timestamp, _row_key, non_pii_col, pii_col]", assertThrows(HoodieException.class, () -> HoodieAvroUtils.getNestedFieldVal(rec, "fake_key", false, false)).getMessage()); // Field exists while value not assertNull(HoodieAvroUtils.getNestedFieldVal(rec, "timestamp", false, false)); }
public static ShardingRouteEngine newInstance(final ShardingRule shardingRule, final ShardingSphereDatabase database, final QueryContext queryContext, final ShardingConditions shardingConditions, final ConfigurationProperties props, final ConnectionContext connectionContext) { SQLStatementContext sqlStatementContext = queryContext.getSqlStatementContext(); SQLStatement sqlStatement = sqlStatementContext.getSqlStatement(); if (sqlStatement instanceof TCLStatement) { return new ShardingDatabaseBroadcastRoutingEngine(); } if (sqlStatement instanceof DDLStatement) { if (sqlStatementContext instanceof CursorAvailable) { return getCursorRouteEngine(shardingRule, database, sqlStatementContext, queryContext.getHintValueContext(), shardingConditions, props); } return getDDLRoutingEngine(shardingRule, database, sqlStatementContext); } if (sqlStatement instanceof DALStatement) { return getDALRoutingEngine(shardingRule, database, sqlStatementContext, connectionContext); } if (sqlStatement instanceof DCLStatement) { return getDCLRoutingEngine(shardingRule, database, sqlStatementContext); } return getDQLRoutingEngine(shardingRule, database, sqlStatementContext, queryContext.getHintValueContext(), shardingConditions, props, connectionContext); }
@Test void assertNewInstanceForDALShow() { DALStatement dalStatement = mock(MySQLShowDatabasesStatement.class); when(sqlStatementContext.getSqlStatement()).thenReturn(dalStatement); QueryContext queryContext = new QueryContext(sqlStatementContext, "", Collections.emptyList(), new HintValueContext(), mockConnectionContext(), mock(ShardingSphereMetaData.class)); ShardingRouteEngine actual = ShardingRouteEngineFactory.newInstance(shardingRule, database, queryContext, shardingConditions, props, new ConnectionContext(Collections::emptySet)); assertThat(actual, instanceOf(ShardingDatabaseBroadcastRoutingEngine.class)); }
public static int computeMinGEPower2(int num) { num -= 1; num |= (num >>> 1); num |= (num >>> 2); num |= (num >>> 4); num |= (num >>> 8); num |= (num >>> 16); return num < 0 ? 1 : num + 1; }
@Test public void testComputeMinGEPower2() { Assert.assertEquals(1, Utils.computeMinGEPower2(0)); for (int i = 1; i < 10000; i++) { int out = Utils.computeMinGEPower2(i); // The number i belongs to the range (out/2, out]. Assert.assertTrue(out >= i); Assert.assertTrue(out / 2 < i); } }
@Override public Collection<Subscriber> getSubscribers(String namespaceId, String serviceName) { String serviceNameWithoutGroup = NamingUtils.getServiceName(serviceName); String groupName = NamingUtils.getGroupName(serviceName); Service service = Service.newService(namespaceId, groupName, serviceNameWithoutGroup); return getSubscribers(service); }
@Test void testGetSubscribersByString() { Collection<Subscriber> actual = subscriberService.getSubscribers(service.getNamespace(), service.getGroupedServiceName()); assertEquals(1, actual.size()); assertEquals(service.getGroupedServiceName(), actual.iterator().next().getServiceName()); }
@Override protected void route(List<SendingMailbox> destinations, TransferableBlock block) throws Exception { int numMailboxes = destinations.size(); if (numMailboxes == 1 || _keySelector == EmptyKeySelector.INSTANCE) { sendBlock(destinations.get(0), block); return; } //noinspection unchecked List<Object[]>[] mailboxIdToRowsMap = new List[numMailboxes]; for (int i = 0; i < numMailboxes; i++) { mailboxIdToRowsMap[i] = new ArrayList<>(); } List<Object[]> rows = block.getContainer(); for (Object[] row : rows) { int mailboxId = _keySelector.computeHash(row) % numMailboxes; mailboxIdToRowsMap[mailboxId].add(row); } for (int i = 0; i < numMailboxes; i++) { if (!mailboxIdToRowsMap[i].isEmpty()) { sendBlock(destinations.get(i), new TransferableBlock(mailboxIdToRowsMap[i], block.getDataSchema(), block.getType())); } } }
@Test public void shouldSplitAndRouteBlocksBasedOnPartitionKey() throws Exception { // Given: TestSelector selector = new TestSelector(Iterators.forArray(2, 0, 1)); Mockito.when(_block.getContainer()).thenReturn(ImmutableList.of(new Object[]{0}, new Object[]{1}, new Object[]{2})); ImmutableList<SendingMailbox> destinations = ImmutableList.of(_mailbox1, _mailbox2); // When: new HashExchange(destinations, selector, TransferableBlockUtils::splitBlock).route(destinations, _block); // Then: ArgumentCaptor<TransferableBlock> captor = ArgumentCaptor.forClass(TransferableBlock.class); Mockito.verify(_mailbox1, Mockito.times(1)).send(captor.capture()); Assert.assertEquals(captor.getValue().getContainer().get(0), new Object[]{0}); Assert.assertEquals(captor.getValue().getContainer().get(1), new Object[]{1}); Mockito.verify(_mailbox2, Mockito.times(1)).send(captor.capture()); Assert.assertEquals(captor.getValue().getContainer().get(0), new Object[]{2}); }
@Override public boolean equals(Object obj) { if (!(obj instanceof HyperLogLogPlus)) { return false; } HyperLogLogPlus other = (HyperLogLogPlus) obj; if (format == Format.SPARSE) { mergeTempList(); } if (other.format == Format.SPARSE) { other.mergeTempList(); } if (other.format != format) { return false; } if (format == Format.NORMAL) { return Arrays.equals(registerSet.readOnlyBits(), other.registerSet.readOnlyBits()); } else { return Arrays.equals(sparseSet, other.sparseSet); } }
@Test public void testEquals() { HyperLogLogPlus hll1 = new HyperLogLogPlus(5, 25); HyperLogLogPlus hll2 = new HyperLogLogPlus(5, 25); hll1.offer("A"); hll2.offer("A"); assertEquals(hll1, hll2); hll2.offer("B"); hll2.offer("C"); hll2.offer("D"); assertNotEquals(hll1, hll2); HyperLogLogPlus hll3 = new HyperLogLogPlus(5, 25); for (int i = 0; i < 50000; i++) { hll3.offer("" + i); } assertNotEquals(hll1, hll3); }
public Multimap<DeviceId, VlanId> ignoredVlans() { Multimap<DeviceId, VlanId> ignored = ArrayListMultimap.create(); array.forEach(node -> { DeviceId deviceId = DeviceId.deviceId(node.get(DEVICE_ID).asText()); VlanId vlanId = VlanId.vlanId((short) node.get(VLAN_ID).asInt()); ignored.put(deviceId, vlanId); }); return ignored; }
@Test public void testIgnoredDhcpConfig() throws IOException { ObjectMapper om = new ObjectMapper(); JsonNode json = om.readTree(Resources.getResource(CONFIG_FILE_PATH)); IgnoreDhcpConfig config = new IgnoreDhcpConfig(); json = json.path("apps").path(DHCP_RELAY_APP).path(IgnoreDhcpConfig.KEY); config.init(APP_ID, IgnoreDhcpConfig.KEY, json, om, null); assertEquals(2, config.ignoredVlans().size()); Collection<VlanId> vlanForDev1 = config.ignoredVlans().get(DEV_1_ID); Collection<VlanId> vlanForDev2 = config.ignoredVlans().get(DEV_2_ID); assertEquals(1, vlanForDev1.size()); assertEquals(1, vlanForDev2.size()); assertTrue(vlanForDev1.contains(IGNORED_VLAN)); assertTrue(vlanForDev2.contains(IGNORED_VLAN)); }
public void decode(ByteBuf buffer) { boolean last; int statusCode; while (true) { switch(state) { case READ_COMMON_HEADER: if (buffer.readableBytes() < SPDY_HEADER_SIZE) { return; } int frameOffset = buffer.readerIndex(); int flagsOffset = frameOffset + SPDY_HEADER_FLAGS_OFFSET; int lengthOffset = frameOffset + SPDY_HEADER_LENGTH_OFFSET; buffer.skipBytes(SPDY_HEADER_SIZE); boolean control = (buffer.getByte(frameOffset) & 0x80) != 0; int version; int type; if (control) { // Decode control frame common header version = getUnsignedShort(buffer, frameOffset) & 0x7FFF; type = getUnsignedShort(buffer, frameOffset + SPDY_HEADER_TYPE_OFFSET); streamId = 0; // Default to session Stream-ID } else { // Decode data frame common header version = spdyVersion; // Default to expected version type = SPDY_DATA_FRAME; streamId = getUnsignedInt(buffer, frameOffset); } flags = buffer.getByte(flagsOffset); length = getUnsignedMedium(buffer, lengthOffset); // Check version first then validity if (version != spdyVersion) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SPDY Version"); } else if (!isValidFrameHeader(streamId, type, flags, length)) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid Frame Error"); } else { state = getNextState(type, length); } break; case READ_DATA_FRAME: if (length == 0) { state = State.READ_COMMON_HEADER; delegate.readDataFrame(streamId, hasFlag(flags, SPDY_DATA_FLAG_FIN), Unpooled.buffer(0)); break; } // Generate data frames that do not exceed maxChunkSize int dataLength = Math.min(maxChunkSize, length); // Wait until entire frame is readable if (buffer.readableBytes() < dataLength) { return; } ByteBuf data = buffer.alloc().buffer(dataLength); data.writeBytes(buffer, dataLength); length -= dataLength; if (length == 0) { state = State.READ_COMMON_HEADER; } last = length == 0 && hasFlag(flags, SPDY_DATA_FLAG_FIN); delegate.readDataFrame(streamId, last, data); break; case READ_SYN_STREAM_FRAME: if (buffer.readableBytes() < 10) { return; } int offset = buffer.readerIndex(); streamId = getUnsignedInt(buffer, offset); int associatedToStreamId = getUnsignedInt(buffer, offset + 4); byte priority = (byte) (buffer.getByte(offset + 8) >> 5 & 0x07); last = hasFlag(flags, SPDY_FLAG_FIN); boolean unidirectional = hasFlag(flags, SPDY_FLAG_UNIDIRECTIONAL); buffer.skipBytes(10); length -= 10; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SYN_STREAM Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readSynStreamFrame(streamId, associatedToStreamId, priority, last, unidirectional); } break; case READ_SYN_REPLY_FRAME: if (buffer.readableBytes() < 4) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); last = hasFlag(flags, SPDY_FLAG_FIN); buffer.skipBytes(4); length -= 4; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SYN_REPLY Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readSynReplyFrame(streamId, last); } break; case READ_RST_STREAM_FRAME: if (buffer.readableBytes() < 8) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); statusCode = getSignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); if (streamId == 0 || statusCode == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid RST_STREAM Frame"); } else { state = State.READ_COMMON_HEADER; delegate.readRstStreamFrame(streamId, statusCode); } break; case READ_SETTINGS_FRAME: if (buffer.readableBytes() < 4) { return; } boolean clear = hasFlag(flags, SPDY_SETTINGS_CLEAR); numSettings = getUnsignedInt(buffer, buffer.readerIndex()); buffer.skipBytes(4); length -= 4; // Validate frame length against number of entries. Each ID/Value entry is 8 bytes. if ((length & 0x07) != 0 || length >> 3 != numSettings) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SETTINGS Frame"); } else { state = State.READ_SETTING; delegate.readSettingsFrame(clear); } break; case READ_SETTING: if (numSettings == 0) { state = State.READ_COMMON_HEADER; delegate.readSettingsEnd(); break; } if (buffer.readableBytes() < 8) { return; } byte settingsFlags = buffer.getByte(buffer.readerIndex()); int id = getUnsignedMedium(buffer, buffer.readerIndex() + 1); int value = getSignedInt(buffer, buffer.readerIndex() + 4); boolean persistValue = hasFlag(settingsFlags, SPDY_SETTINGS_PERSIST_VALUE); boolean persisted = hasFlag(settingsFlags, SPDY_SETTINGS_PERSISTED); buffer.skipBytes(8); --numSettings; delegate.readSetting(id, value, persistValue, persisted); break; case READ_PING_FRAME: if (buffer.readableBytes() < 4) { return; } int pingId = getSignedInt(buffer, buffer.readerIndex()); buffer.skipBytes(4); state = State.READ_COMMON_HEADER; delegate.readPingFrame(pingId); break; case READ_GOAWAY_FRAME: if (buffer.readableBytes() < 8) { return; } int lastGoodStreamId = getUnsignedInt(buffer, buffer.readerIndex()); statusCode = getSignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); state = State.READ_COMMON_HEADER; delegate.readGoAwayFrame(lastGoodStreamId, statusCode); break; case READ_HEADERS_FRAME: if (buffer.readableBytes() < 4) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); last = hasFlag(flags, SPDY_FLAG_FIN); buffer.skipBytes(4); length -= 4; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid HEADERS Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readHeadersFrame(streamId, last); } break; case READ_WINDOW_UPDATE_FRAME: if (buffer.readableBytes() < 8) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); int deltaWindowSize = getUnsignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); if (deltaWindowSize == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid WINDOW_UPDATE Frame"); } else { state = State.READ_COMMON_HEADER; delegate.readWindowUpdateFrame(streamId, deltaWindowSize); } break; case READ_HEADER_BLOCK: if (length == 0) { state = State.READ_COMMON_HEADER; delegate.readHeaderBlockEnd(); break; } if (!buffer.isReadable()) { return; } int compressedBytes = Math.min(buffer.readableBytes(), length); ByteBuf headerBlock = buffer.alloc().buffer(compressedBytes); headerBlock.writeBytes(buffer, compressedBytes); length -= compressedBytes; delegate.readHeaderBlock(headerBlock); break; case DISCARD_FRAME: int numBytes = Math.min(buffer.readableBytes(), length); buffer.skipBytes(numBytes); length -= numBytes; if (length == 0) { state = State.READ_COMMON_HEADER; break; } return; case FRAME_ERROR: buffer.skipBytes(buffer.readableBytes()); return; default: throw new Error("Shouldn't reach here."); } } }
@Test public void testEmptySpdyDataFrame() throws Exception { int streamId = RANDOM.nextInt() & 0x7FFFFFFF | 0x01; byte flags = 0; int length = 0; ByteBuf buf = Unpooled.buffer(SPDY_HEADER_SIZE + length); encodeDataFrameHeader(buf, streamId, flags, length); decoder.decode(buf); verify(delegate).readDataFrame(streamId, false, Unpooled.EMPTY_BUFFER); assertFalse(buf.isReadable()); buf.release(); }
public static boolean supports(Map<Integer, SortedSet<LayoutFeature>> map, final LayoutFeature f, final int lv) { final SortedSet<LayoutFeature> set = map.get(lv); return set != null && set.contains(f); }
@Test public void testRelease203() { assertTrue(NameNodeLayoutVersion.supports(LayoutVersion.Feature.DELEGATION_TOKEN, Feature.RESERVED_REL20_203.getInfo().getLayoutVersion())); }
@Subscribe public void onVarbitChanged(VarbitChanged event) { if (event.getVarbitId() == Varbits.IN_RAID) { removeVarTimer(OVERLOAD_RAID); removeGameTimer(PRAYER_ENHANCE); } if (event.getVarbitId() == Varbits.VENGEANCE_COOLDOWN && config.showVengeance()) { if (event.getValue() == 1) { createGameTimer(VENGEANCE); } else { removeGameTimer(VENGEANCE); } } if (event.getVarbitId() == Varbits.SPELLBOOK_SWAP && config.showSpellbookSwap()) { if (event.getValue() == 1) { createGameTimer(SPELLBOOK_SWAP); } else { removeGameTimer(SPELLBOOK_SWAP); } } if (event.getVarbitId() == Varbits.HEAL_GROUP_COOLDOWN && config.showHealGroup()) { if (event.getValue() == 1) { createGameTimer(HEAL_GROUP); } else { removeGameTimer(HEAL_GROUP); } } if (event.getVarbitId() == Varbits.DEATH_CHARGE_COOLDOWN && config.showArceuusCooldown()) { if (event.getValue() == 1) { createGameTimer(DEATH_CHARGE_COOLDOWN); } else { removeGameTimer(DEATH_CHARGE_COOLDOWN); } } if (event.getVarbitId() == Varbits.CORRUPTION_COOLDOWN && config.showArceuusCooldown()) { if (event.getValue() == 1) { createGameTimer(CORRUPTION_COOLDOWN); } else { removeGameTimer(CORRUPTION_COOLDOWN); } } if (event.getVarbitId() == Varbits.RESURRECT_THRALL_COOLDOWN && config.showArceuusCooldown()) { if (event.getValue() == 1) { createGameTimer(RESURRECT_THRALL_COOLDOWN); } else { removeGameTimer(RESURRECT_THRALL_COOLDOWN); } } if (event.getVarbitId() == Varbits.SHADOW_VEIL_COOLDOWN && config.showArceuusCooldown()) { if (event.getValue() == 1) { createGameTimer(SHADOW_VEIL_COOLDOWN); } else { removeGameTimer(SHADOW_VEIL_COOLDOWN); } } if (event.getVarbitId() == Varbits.WARD_OF_ARCEUUS_COOLDOWN && config.showArceuusCooldown()) { if (event.getValue() == 1) { createGameTimer(WARD_OF_ARCEUUS_COOLDOWN); } else { removeGameTimer(WARD_OF_ARCEUUS_COOLDOWN); } } if (event.getVarbitId() == Varbits.VENGEANCE_ACTIVE && config.showVengeanceActive()) { updateVarCounter(VENGEANCE_ACTIVE, event.getValue()); } if (event.getVarbitId() == Varbits.DEATH_CHARGE && config.showArceuus()) { if (event.getValue() == 1) { createGameTimer(DEATH_CHARGE, Duration.of(client.getRealSkillLevel(Skill.MAGIC), RSTimeUnit.GAME_TICKS)); } else { removeGameTimer(DEATH_CHARGE); } } if (event.getVarbitId() == Varbits.RESURRECT_THRALL && event.getValue() == 0 && config.showArceuus()) { removeGameTimer(RESURRECT_THRALL); } if (event.getVarbitId() == Varbits.SHADOW_VEIL && event.getValue() == 0 && config.showArceuus()) { removeGameTimer(SHADOW_VEIL); } if (event.getVarpId() == VarPlayer.POISON && config.showAntiPoison()) { final int poisonVarp = event.getValue(); final int tickCount = client.getTickCount(); if (poisonVarp == 0) { nextPoisonTick = -1; } else if (nextPoisonTick - tickCount <= 0) { nextPoisonTick = tickCount + POISON_TICK_LENGTH; } updateVarTimer(ANTIPOISON, event.getValue(), i -> i >= 0 || i < VENOM_VALUE_CUTOFF, i -> nextPoisonTick - tickCount + Math.abs((i + 1) * POISON_TICK_LENGTH)); updateVarTimer(ANTIVENOM, event.getValue(), i -> i >= VENOM_VALUE_CUTOFF, i -> nextPoisonTick - tickCount + Math.abs((i + 1 - VENOM_VALUE_CUTOFF) * POISON_TICK_LENGTH)); } if ((event.getVarbitId() == Varbits.NMZ_OVERLOAD_REFRESHES_REMAINING || event.getVarbitId() == Varbits.COX_OVERLOAD_REFRESHES_REMAINING) && config.showOverload()) { final int overloadVarb = event.getValue(); final int tickCount = client.getTickCount(); if (overloadVarb <= 0) { nextOverloadRefreshTick = -1; } else if (nextOverloadRefreshTick - tickCount <= 0) { nextOverloadRefreshTick = tickCount + OVERLOAD_TICK_LENGTH; } GameTimer overloadTimer = client.getVarbitValue(Varbits.IN_RAID) == 1 ? OVERLOAD_RAID : OVERLOAD; updateVarTimer(overloadTimer, overloadVarb, i -> nextOverloadRefreshTick - tickCount + (i - 1) * OVERLOAD_TICK_LENGTH); } if (event.getVarbitId() == Varbits.TELEBLOCK && config.showTeleblock()) { updateVarTimer(TELEBLOCK, event.getValue() - 100, i -> i <= 0, IntUnaryOperator.identity()); } if (event.getVarpId() == VarPlayer.CHARGE_GOD_SPELL && config.showCharge()) { updateVarTimer(CHARGE, event.getValue(), i -> i * 2); } if (event.getVarbitId() == Varbits.IMBUED_HEART_COOLDOWN && config.showImbuedHeart()) { updateVarTimer(IMBUEDHEART, event.getValue(), i -> i * 10); } if (event.getVarbitId() == Varbits.DRAGONFIRE_SHIELD_COOLDOWN && config.showDFSSpecial()) { updateVarTimer(DRAGON_FIRE_SHIELD, event.getValue(), i -> i * 8); } if (event.getVarpId() == LAST_HOME_TELEPORT && config.showHomeMinigameTeleports()) { checkTeleport(LAST_HOME_TELEPORT); } if (event.getVarpId() == LAST_MINIGAME_TELEPORT && config.showHomeMinigameTeleports()) { checkTeleport(LAST_MINIGAME_TELEPORT); } if (event.getVarbitId() == Varbits.RUN_SLOWED_DEPLETION_ACTIVE || event.getVarbitId() == Varbits.STAMINA_EFFECT || event.getVarbitId() == Varbits.RING_OF_ENDURANCE_EFFECT) { // staminaEffectActive is checked to match https://github.com/Joshua-F/cs2-scripts/blob/741271f0c3395048c1bad4af7881a13734516adf/scripts/%5Bproc%2Cbuff_bar_get_value%5D.cs2#L25 int staminaEffectActive = client.getVarbitValue(Varbits.RUN_SLOWED_DEPLETION_ACTIVE); int staminaPotionEffectVarb = client.getVarbitValue(Varbits.STAMINA_EFFECT); int enduranceRingEffectVarb = client.getVarbitValue(Varbits.RING_OF_ENDURANCE_EFFECT); final int totalStaminaEffect = staminaPotionEffectVarb + enduranceRingEffectVarb; if (staminaEffectActive == 1 && config.showStamina()) { updateVarTimer(STAMINA, totalStaminaEffect, i -> i * 10); } } if (event.getVarbitId() == Varbits.ANTIFIRE && config.showAntiFire()) { final int antifireVarb = event.getValue(); final int tickCount = client.getTickCount(); if (antifireVarb == 0) { nextAntifireTick = -1; } else if (nextAntifireTick - tickCount <= 0) { nextAntifireTick = tickCount + ANTIFIRE_TICK_LENGTH; } updateVarTimer(ANTIFIRE, antifireVarb, i -> nextAntifireTick - tickCount + (i - 1) * ANTIFIRE_TICK_LENGTH); } if (event.getVarbitId() == Varbits.SUPER_ANTIFIRE && config.showAntiFire()) { final int superAntifireVarb = event.getValue(); final int tickCount = client.getTickCount(); if (superAntifireVarb == 0) { nextSuperAntifireTick = -1; } else if (nextSuperAntifireTick - tickCount <= 0) { nextSuperAntifireTick = tickCount + SUPERANTIFIRE_TICK_LENGTH; } updateVarTimer(SUPERANTIFIRE, event.getValue(), i -> nextSuperAntifireTick - tickCount + (i - 1) * SUPERANTIFIRE_TICK_LENGTH); } if (event.getVarbitId() == Varbits.MAGIC_IMBUE && config.showMagicImbue()) { updateVarTimer(MAGICIMBUE, event.getValue(), i -> i * 10); } if (event.getVarbitId() == Varbits.DIVINE_SUPER_ATTACK && config.showDivine()) { if (client.getVarbitValue(Varbits.DIVINE_SUPER_COMBAT) > event.getValue()) { return; } updateVarTimer(DIVINE_SUPER_ATTACK, event.getValue(), IntUnaryOperator.identity()); } if (event.getVarbitId() == Varbits.DIVINE_SUPER_STRENGTH && config.showDivine()) { if (client.getVarbitValue(Varbits.DIVINE_SUPER_COMBAT) > event.getValue()) { return; } updateVarTimer(DIVINE_SUPER_STRENGTH, event.getValue(), IntUnaryOperator.identity()); } if (event.getVarbitId() == Varbits.DIVINE_SUPER_DEFENCE && config.showDivine()) { if (client.getVarbitValue(Varbits.DIVINE_SUPER_COMBAT) > event.getValue() || client.getVarbitValue(Varbits.DIVINE_BASTION) > event.getValue() || client.getVarbitValue(Varbits.DIVINE_BATTLEMAGE) > event.getValue() // When drinking a dose of moonlight potion while already under its effects, desync between // Varbits.MOONLIGHT_POTION and Varbits.DIVINE_SUPER_DEFENCE can occur, with the latter being 1 tick // greater || client.getVarbitValue(Varbits.MOONLIGHT_POTION) >= event.getValue()) { return; } if (client.getVarbitValue(Varbits.MOONLIGHT_POTION) < event.getValue()) { removeVarTimer(MOONLIGHT_POTION); } updateVarTimer(DIVINE_SUPER_DEFENCE, event.getValue(), IntUnaryOperator.identity()); } if (event.getVarbitId() == Varbits.DIVINE_RANGING && config.showDivine()) { if (client.getVarbitValue(Varbits.DIVINE_BASTION) > event.getValue()) { return; } updateVarTimer(DIVINE_RANGING, event.getValue(), IntUnaryOperator.identity()); } if (event.getVarbitId() == Varbits.DIVINE_MAGIC && config.showDivine()) { if (client.getVarbitValue(Varbits.DIVINE_BATTLEMAGE) > event.getValue()) { return; } updateVarTimer(DIVINE_MAGIC, event.getValue(), IntUnaryOperator.identity()); } if (event.getVarbitId() == Varbits.DIVINE_SUPER_COMBAT && config.showDivine()) { if (client.getVarbitValue(Varbits.DIVINE_SUPER_ATTACK) == event.getValue()) { removeVarTimer(DIVINE_SUPER_ATTACK); } if (client.getVarbitValue(Varbits.DIVINE_SUPER_STRENGTH) == event.getValue()) { removeVarTimer(DIVINE_SUPER_STRENGTH); } if (client.getVarbitValue(Varbits.DIVINE_SUPER_DEFENCE) == event.getValue()) { removeVarTimer(DIVINE_SUPER_DEFENCE); } updateVarTimer(DIVINE_SUPER_COMBAT, event.getValue(), IntUnaryOperator.identity()); } if (event.getVarbitId() == Varbits.DIVINE_BASTION && config.showDivine()) { if (client.getVarbitValue(Varbits.DIVINE_RANGING) == event.getValue()) { removeVarTimer(DIVINE_RANGING); } if (client.getVarbitValue(Varbits.DIVINE_SUPER_DEFENCE) == event.getValue()) { removeVarTimer(DIVINE_SUPER_DEFENCE); } updateVarTimer(DIVINE_BASTION, event.getValue(), IntUnaryOperator.identity()); } if (event.getVarbitId() == Varbits.DIVINE_BATTLEMAGE && config.showDivine()) { if (client.getVarbitValue(Varbits.DIVINE_MAGIC) == event.getValue()) { removeVarTimer(DIVINE_MAGIC); } if (client.getVarbitValue(Varbits.DIVINE_SUPER_DEFENCE) == event.getValue()) { removeVarTimer(DIVINE_SUPER_DEFENCE); } updateVarTimer(DIVINE_BATTLEMAGE, event.getValue(), IntUnaryOperator.identity()); } if (event.getVarbitId() == Varbits.BUFF_STAT_BOOST && config.showOverload()) { updateVarTimer(SMELLING_SALTS, event.getValue(), i -> i * 25); } if (event.getVarbitId() == Varbits.MENAPHITE_REMEDY && config.showMenaphiteRemedy()) { updateVarTimer(MENAPHITE_REMEDY, event.getValue(), i -> i * 25); } if (event.getVarbitId() == Varbits.LIQUID_ADERNALINE_ACTIVE && event.getValue() == 0 && config.showLiquidAdrenaline()) { removeGameTimer(LIQUID_ADRENALINE); } if (event.getVarbitId() == Varbits.FARMERS_AFFINITY && config.showFarmersAffinity()) { updateVarTimer(FARMERS_AFFINITY, event.getValue(), i -> i * 20); } if (event.getVarbitId() == Varbits.GOD_WARS_ALTAR_COOLDOWN && config.showGodWarsAltar()) { updateVarTimer(GOD_WARS_ALTAR, event.getValue(), i -> i * 100); } if (event.getVarbitId() == Varbits.CURSE_OF_THE_MOONS && config.showCurseOfTheMoons()) { final int regionID = WorldPoint.fromLocal(client, client.getLocalPlayer().getLocalLocation()).getRegionID(); if (regionID == ECLIPSE_MOON_REGION_ID) { updateVarCounter(CURSE_OF_THE_MOONS_ECLIPSE, event.getValue()); } else { updateVarCounter(CURSE_OF_THE_MOONS_BLUE, event.getValue()); } } if (event.getVarbitId() == Varbits.COLOSSEUM_DOOM && config.showColosseumDoom()) { updateVarCounter(COLOSSEUM_DOOM, event.getValue()); } if (event.getVarbitId() == Varbits.MOONLIGHT_POTION && config.showMoonlightPotion()) { int moonlightValue = event.getValue(); // Increase the timer by 1 tick in case of desync due to drinking a dose of moonlight potion while already // under its effects. Otherwise, the timer would be 1 tick shorter than it is meant to be. if (client.getVarbitValue(Varbits.DIVINE_SUPER_DEFENCE) == moonlightValue + 1) { moonlightValue++; } updateVarTimer(MOONLIGHT_POTION, moonlightValue, IntUnaryOperator.identity()); } }
@Test public void testEndurance() { when(timersAndBuffsConfig.showStamina()).thenReturn(true); when(client.getVarbitValue(Varbits.RUN_SLOWED_DEPLETION_ACTIVE)).thenReturn(1); when(client.getVarbitValue(Varbits.STAMINA_EFFECT)).thenReturn(20); when(client.getVarbitValue(Varbits.RING_OF_ENDURANCE_EFFECT)).thenReturn(20); VarbitChanged varbitChanged = new VarbitChanged(); varbitChanged.setVarbitId(Varbits.RUN_SLOWED_DEPLETION_ACTIVE); // just has to be one of the vars timersAndBuffsPlugin.onVarbitChanged(varbitChanged); ArgumentCaptor<InfoBox> captor = ArgumentCaptor.forClass(InfoBox.class); verify(infoBoxManager).addInfoBox(captor.capture()); TimerTimer infoBox = (TimerTimer) captor.getValue(); assertEquals(GameTimer.STAMINA, infoBox.getTimer()); assertEquals(Duration.ofMinutes(4), infoBox.getDuration()); // unwield ring when(client.getVarbitValue(Varbits.RING_OF_ENDURANCE_EFFECT)).thenReturn(0); timersAndBuffsPlugin.onVarbitChanged(varbitChanged); int mins = (int) infoBox.getDuration().toMinutes(); assertEquals(2, mins); }
@Udf public String rpad( @UdfParameter(description = "String to be padded") final String input, @UdfParameter(description = "Target length") final Integer targetLen, @UdfParameter(description = "Padding string") final String padding) { if (input == null) { return null; } if (padding == null || padding.isEmpty() || targetLen == null || targetLen < 0) { return null; } final StringBuilder sb = new StringBuilder(targetLen + padding.length()); sb.append(input); final int padChars = Math.max(targetLen - input.length(), 0); for (int i = 0; i < padChars; i += padding.length()) { sb.append(padding); } sb.setLength(targetLen); return sb.toString(); }
@Test public void shouldReturnNullForEmptyPaddingBytes() { final ByteBuffer result = udf.rpad(BYTES_123, 4, EMPTY_BYTES); assertThat(result, is(nullValue())); }
public static boolean isBaggageEnable() { return BAGGAGE_ENABLE; }
@Test public void isBaggageEnable() throws Exception { }
@Override public void configure(String encodedAuthParamString) { if (StringUtils.isBlank(encodedAuthParamString)) { throw new IllegalArgumentException("No authentication parameters were provided"); } Map<String, String> params; try { params = AuthenticationUtil.configureFromJsonString(encodedAuthParamString); } catch (IOException e) { throw new IllegalArgumentException("Malformed authentication parameters", e); } String type = params.getOrDefault(CONFIG_PARAM_TYPE, TYPE_CLIENT_CREDENTIALS); switch(type) { case TYPE_CLIENT_CREDENTIALS: this.flow = ClientCredentialsFlow.fromParameters(params); break; default: throw new IllegalArgumentException("Unsupported authentication type: " + type); } }
@Test(expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = ".*No.*") public void testConfigureNoParams() throws Exception { this.auth.configure(""); }
public static void createImage(String str, Font font, Color backgroundColor, Color fontColor, ImageOutputStream out) throws IORuntimeException { writePng(createImage(str, font, backgroundColor, fontColor, BufferedImage.TYPE_INT_ARGB), out); }
@Test @Disabled public void createImageTest() throws IORuntimeException, IOException { ImgUtil.createImage( "版权所有", new Font("黑体", Font.BOLD, 50), Color.WHITE, Color.BLACK, ImageIO.createImageOutputStream(new File("d:/test/createImageTest.png")) ); }
@Override public ResultSet getPrimaryKeys(final String catalog, final String schema, final String table) throws SQLException { return createDatabaseMetaDataResultSet(getDatabaseMetaData().getPrimaryKeys(getActualCatalog(catalog), getActualSchema(schema), getActualTable(getActualCatalog(catalog), table))); }
@Test void assertGetPrimaryKeys() throws SQLException { when(databaseMetaData.getPrimaryKeys("test", null, null)).thenReturn(resultSet); assertThat(shardingSphereDatabaseMetaData.getPrimaryKeys("test", null, null), instanceOf(DatabaseMetaDataResultSet.class)); }
public Certificate add(X509Certificate cert) { final Certificate db; try { db = Certificate.from(cert); } catch (CertificateEncodingException e) { logger.error("Encoding error in certificate", e); throw new RuntimeException("Encoding error in certificate", e); } try { // Special case for first CSCA certificate for this document type if (repository.countByDocumentType(db.getDocumentType()) == 0) { cert.verify(cert.getPublicKey()); logger.warn("Added first CSCA certificate for {}, set trusted flag manually", db.getDocumentType()); } else { verify(cert, allowAddingExpired ? cert.getNotAfter() : null); } } catch (GeneralSecurityException | VerificationException e) { logger.error( String.format("Could not verify certificate of %s issued by %s", cert.getSubjectX500Principal(), cert.getIssuerX500Principal() ), e ); throw new BadRequestException("Could not verify certificate", e); } return repository.saveAndFlush(db); }
@Test public void shouldDisallowToAddCRLIfCertificateIsNotLoaded() throws Exception { Exception exception = assertThrows(BadRequestException.class, () -> { service.add(readCRL("rdw/02.crl")); }); assertEquals("Could not get certificate to verify", exception.getMessage()); }
@Override public Response getData(ReadRequest request) throws Exception { CompletableFuture<Response> future = aGetData(request); return future.get(5_000L, TimeUnit.MILLISECONDS); }
@Test void testGetData() throws Exception { raftProtocol.getData(readRequest); verify(serverMock).get(readRequest); }
@Override public byte[] encode(ILoggingEvent event) { final int initialCapacity = event.getThrowableProxy() == null ? DEFAULT_SIZE : DEFAULT_SIZE_WITH_THROWABLE; StringBuilder sb = new StringBuilder(initialCapacity); sb.append(OPEN_OBJ); if (withSequenceNumber) { appenderMemberWithLongValue(sb, SEQUENCE_NUMBER_ATTR_NAME, event.getSequenceNumber()); sb.append(VALUE_SEPARATOR); } if (withTimestamp) { appenderMemberWithLongValue(sb, TIMESTAMP_ATTR_NAME, event.getTimeStamp()); sb.append(VALUE_SEPARATOR); } if (withNanoseconds) { appenderMemberWithLongValue(sb, NANOSECONDS_ATTR_NAME, event.getNanoseconds()); sb.append(VALUE_SEPARATOR); } if (withLevel) { String levelStr = event.getLevel() != null ? event.getLevel().levelStr : NULL_STR; appenderMember(sb, LEVEL_ATTR_NAME, levelStr); sb.append(VALUE_SEPARATOR); } if (withThreadName) { appenderMember(sb, THREAD_NAME_ATTR_NAME, jsonEscape(event.getThreadName())); sb.append(VALUE_SEPARATOR); } if (withLoggerName) { appenderMember(sb, LOGGER_ATTR_NAME, event.getLoggerName()); sb.append(VALUE_SEPARATOR); } if (withContext) { appendLoggerContext(sb, event.getLoggerContextVO()); sb.append(VALUE_SEPARATOR); } if (withMarkers) appendMarkers(sb, event); if (withMDC) appendMDC(sb, event); if (withKVPList) appendKeyValuePairs(sb, event); if (withMessage) { appenderMember(sb, MESSAGE_ATTR_NAME, jsonEscape(event.getMessage())); sb.append(VALUE_SEPARATOR); } if (withFormattedMessage) { appenderMember(sb, FORMATTED_MESSAGE_ATTR_NAME, jsonEscape(event.getFormattedMessage())); sb.append(VALUE_SEPARATOR); } if (withArguments) appendArgumentArray(sb, event); if (withThrowable) appendThrowableProxy(sb, THROWABLE_ATTR_NAME, event.getThrowableProxy()); sb.append(CLOSE_OBJ); sb.append(CoreConstants.JSON_LINE_SEPARATOR); return sb.toString().getBytes(UTF_8_CHARSET); }
@Test void withThrowable() throws JsonProcessingException { Throwable t = new RuntimeException("test"); LoggingEvent event = new LoggingEvent("in withThrowable test", logger, Level.WARN, "hello kvp", t, null); byte[] resultBytes = jsonEncoder.encode(event); String resultString = new String(resultBytes, StandardCharsets.UTF_8); JsonLoggingEvent resultEvent = stringToLoggingEventMapper.mapStringToLoggingEvent(resultString); compareEvents(event, resultEvent); }
@Override public InputStream getInputStream() throws FileSystemException { return requireResolvedFileObject().getInputStream(); }
@Test public void testDelegatesGetInputStream() throws FileSystemException { InputStream inputStream = mock( InputStream.class ); when( resolvedFileObject.getInputStream() ).thenReturn( inputStream ); assertSame( inputStream, fileObject.getInputStream() ); }
protected static Client createJerseyClient(Configuration conf) { Client client = Client.create(); long checkConnectTimeOut = conf.getLong(YarnConfiguration.ROUTER_WEBAPP_CONNECT_TIMEOUT, 0); int connectTimeOut = (int) conf.getTimeDuration(YarnConfiguration.ROUTER_WEBAPP_CONNECT_TIMEOUT, YarnConfiguration.DEFAULT_ROUTER_WEBAPP_CONNECT_TIMEOUT, TimeUnit.MILLISECONDS); if (checkConnectTimeOut <= 0 || checkConnectTimeOut > Integer.MAX_VALUE) { LOG.warn("Configuration {} = {} ms error. We will use the default value({} ms).", YarnConfiguration.ROUTER_WEBAPP_CONNECT_TIMEOUT, connectTimeOut, YarnConfiguration.DEFAULT_ROUTER_WEBAPP_CONNECT_TIMEOUT); connectTimeOut = (int) TimeUnit.MILLISECONDS.convert( YarnConfiguration.DEFAULT_ROUTER_WEBAPP_CONNECT_TIMEOUT, TimeUnit.MILLISECONDS); } client.setConnectTimeout(connectTimeOut); long checkReadTimeout = conf.getLong(YarnConfiguration.ROUTER_WEBAPP_READ_TIMEOUT, 0); int readTimeout = (int) conf.getTimeDuration(YarnConfiguration.ROUTER_WEBAPP_READ_TIMEOUT, YarnConfiguration.DEFAULT_ROUTER_WEBAPP_READ_TIMEOUT, TimeUnit.MILLISECONDS); if (checkReadTimeout < 0) { LOG.warn("Configuration {} = {} ms error. We will use the default value({} ms).", YarnConfiguration.ROUTER_WEBAPP_CONNECT_TIMEOUT, connectTimeOut, YarnConfiguration.DEFAULT_ROUTER_WEBAPP_CONNECT_TIMEOUT); readTimeout = (int) TimeUnit.MILLISECONDS.convert( YarnConfiguration.DEFAULT_ROUTER_WEBAPP_CONNECT_TIMEOUT, TimeUnit.MILLISECONDS); } client.setReadTimeout(readTimeout); return client; }
@Test public void testCreateJerseyClient() { // Case1, default timeout, The default timeout is 30s. YarnConfiguration configuration = new YarnConfiguration(); Client client01 = RouterWebServiceUtil.createJerseyClient(configuration); Map<String, Object> properties = client01.getProperties(); int readTimeOut = (int) properties.get(ClientConfig.PROPERTY_READ_TIMEOUT); int connectTimeOut = (int) properties.get(ClientConfig.PROPERTY_CONNECT_TIMEOUT); Assert.assertEquals(30000, readTimeOut); Assert.assertEquals(30000, connectTimeOut); client01.destroy(); // Case2, set a negative timeout, We'll get the default timeout(30s) YarnConfiguration configuration2 = new YarnConfiguration(); configuration2.setLong(YarnConfiguration.ROUTER_WEBAPP_CONNECT_TIMEOUT, -1L); configuration2.setLong(YarnConfiguration.ROUTER_WEBAPP_READ_TIMEOUT, -1L); Client client02 = RouterWebServiceUtil.createJerseyClient(configuration2); Map<String, Object> properties02 = client02.getProperties(); int readTimeOut02 = (int) properties02.get(ClientConfig.PROPERTY_READ_TIMEOUT); int connectTimeOut02 = (int) properties02.get(ClientConfig.PROPERTY_CONNECT_TIMEOUT); Assert.assertEquals(30000, readTimeOut02); Assert.assertEquals(30000, connectTimeOut02); client02.destroy(); // Case3, Set the maximum value that exceeds the integer // We'll get the default timeout(30s) YarnConfiguration configuration3 = new YarnConfiguration(); long connectTimeOutLong = (long) Integer.MAX_VALUE + 1; long readTimeOutLong = (long) Integer.MAX_VALUE + 1; configuration3.setLong(YarnConfiguration.ROUTER_WEBAPP_CONNECT_TIMEOUT, connectTimeOutLong); configuration3.setLong(YarnConfiguration.ROUTER_WEBAPP_READ_TIMEOUT, readTimeOutLong); Client client03 = RouterWebServiceUtil.createJerseyClient(configuration3); Map<String, Object> properties03 = client03.getProperties(); int readTimeOut03 = (int) properties03.get(ClientConfig.PROPERTY_READ_TIMEOUT); int connectTimeOut03 = (int) properties03.get(ClientConfig.PROPERTY_CONNECT_TIMEOUT); Assert.assertEquals(30000, readTimeOut03); Assert.assertEquals(30000, connectTimeOut03); client03.destroy(); }
public static String resolveIpAddress(String hostname) throws UnknownHostException { Preconditions.checkNotNull(hostname, "hostname"); Preconditions.checkArgument(!hostname.isEmpty(), "Cannot resolve IP address for empty hostname"); return InetAddress.getByName(hostname).getHostAddress(); }
@Test(expected = NullPointerException.class) public void resolveNullIpAddress() throws UnknownHostException { NetworkAddressUtils.resolveIpAddress(null); }
public int format(String... args) throws UsageException { CommandLineOptions parameters = processArgs(args); if (parameters.version()) { errWriter.println(versionString()); return 0; } if (parameters.help()) { throw new UsageException(); } JavaFormatterOptions options = JavaFormatterOptions.builder() .style(parameters.aosp() ? Style.AOSP : Style.GOOGLE) .formatJavadoc(parameters.formatJavadoc()) .build(); if (parameters.stdin()) { return formatStdin(parameters, options); } else { return formatFiles(parameters, options); } }
@Test public void importRemovalLines() throws Exception { String[] input = { "import java.util.ArrayList;", "import java.util.List;", "class Test {", "ArrayList<String> a = new ArrayList<>();", "ArrayList<String> b = new ArrayList<>();", "}", }; String[] expected = { "import java.util.ArrayList;", "", "class Test {", " ArrayList<String> a = new ArrayList<>();", "ArrayList<String> b = new ArrayList<>();", "}", }; StringWriter out = new StringWriter(); Main main = new Main( new PrintWriter(out, true), new PrintWriter(new BufferedWriter(new OutputStreamWriter(System.err, UTF_8)), true), new ByteArrayInputStream(joiner.join(input).getBytes(UTF_8))); assertThat(main.format("-", "-lines", "4")).isEqualTo(0); assertThat(out.toString()).isEqualTo(joiner.join(expected)); }
synchronized void promoteDelayedOffsetSyncs() { pendingOffsetSyncs.putAll(delayedOffsetSyncs); delayedOffsetSyncs.clear(); }
@Test public void testPromoteDelayedOffsetSyncs() { int maxOffsetLag = 50; @SuppressWarnings("unchecked") KafkaProducer<byte[], byte[]> producer = mock(KafkaProducer.class); Semaphore outstandingOffsetSyncs = new Semaphore(1); OffsetSyncWriter offsetSyncWriter = new OffsetSyncWriter(producer, topicName, outstandingOffsetSyncs, maxOffsetLag); offsetSyncWriter.maybeQueueOffsetSyncs(topicPartition, 0, 100); offsetSyncWriter.maybeQueueOffsetSyncs(topicPartition, 1, 101); offsetSyncWriter.promoteDelayedOffsetSyncs(); assertTrue(offsetSyncWriter.getDelayedOffsetSyncs().isEmpty()); Map<TopicPartition, OffsetSync> pendingOffsetSyncs = offsetSyncWriter.getPendingOffsetSyncs(); assertEquals(1, pendingOffsetSyncs.size()); assertEquals(1, pendingOffsetSyncs.get(topicPartition).upstreamOffset()); assertEquals(101, pendingOffsetSyncs.get(topicPartition).downstreamOffset()); }
@Override public Batch toBatch() { return new SparkBatch( sparkContext, table, readConf, groupingKeyType(), taskGroups(), expectedSchema, hashCode()); }
@TestTemplate public void testPartitionedYears() throws Exception { createPartitionedTable(spark, tableName, "years(ts)"); SparkScanBuilder builder = scanBuilder(); YearsFunction.TimestampToYearsFunction function = new YearsFunction.TimestampToYearsFunction(); UserDefinedScalarFunc udf = toUDF(function, expressions(fieldRef("ts"))); Predicate predicate = new Predicate( "=", expressions( udf, intLit(timestampStrToYearOrdinal("2017-11-22T00:00:00.000000+00:00")))); pushFilters(builder, predicate); Batch scan = builder.build().toBatch(); assertThat(scan.planInputPartitions().length).isEqualTo(5); // NOT Equal builder = scanBuilder(); predicate = new Not(predicate); pushFilters(builder, predicate); scan = builder.build().toBatch(); assertThat(scan.planInputPartitions().length).isEqualTo(5); }
public T send() throws IOException { return web3jService.send(this, responseType); }
@Test public void testEthCompileLLL() throws Exception { web3j.ethCompileLLL("(returnlll (suicide (caller)))").send(); verifyResult( "{\"jsonrpc\":\"2.0\",\"method\":\"eth_compileLLL\"," + "\"params\":[\"(returnlll (suicide (caller)))\"],\"id\":1}"); }
@VisibleForTesting static void validateFips(final KsqlConfig config, final KsqlRestConfig restConfig) { if (config.getBoolean(ConfluentConfigs.ENABLE_FIPS_CONFIG)) { final FipsValidator fipsValidator = ConfluentConfigs.buildFipsValidator(); // validate cipher suites and TLS version validateCipherSuites(fipsValidator, restConfig); // validate broker validateBroker(fipsValidator, config); // validate ssl endpoint algorithm validateSslEndpointAlgo(fipsValidator, restConfig); // validate schema registry url validateSrUrl(fipsValidator, restConfig); // validate all listeners validateListeners(fipsValidator, restConfig); log.info("FIPS mode enabled for ksqlDB!"); } }
@Test public void shouldFailOnNullBrokerSecurityProtocol() { // Given: final KsqlConfig config = configWith(ImmutableMap.of( ConfluentConfigs.ENABLE_FIPS_CONFIG, true )); final KsqlRestConfig restConfig = new KsqlRestConfig(ImmutableMap.<String, Object>builder() .put(KsqlRestConfig.SSL_CIPHER_SUITES_CONFIG, Collections.singletonList("TLS_RSA_WITH_AES_256_CCM")) .build() ); // When: final Exception e = assertThrows( SecurityException.class, () -> KsqlServerMain.validateFips(config, restConfig) ); // Then: assertThat(e.getMessage(), containsString( "The security protocol ('" + CommonClientConfigs.SECURITY_PROTOCOL_CONFIG + "') is not specified.")); }
public String processOrder() { if (validateProduct() && processPayment()) { return "Order processed successfully"; } return "Order processing failed"; }
@Test void testProcessOrder_FailureWithPaymentProcessingFailure() { // Arrange when(restTemplate.postForEntity(eq("http://localhost:30302/product/validate"), anyString(), eq(Boolean.class))) .thenReturn(ResponseEntity.ok(true)); when(restTemplate.postForEntity(eq("http://localhost:30301/payment/process"), anyString(), eq(Boolean.class))) .thenReturn(ResponseEntity.ok(false)); // Act String result = orderService.processOrder(); // Assert assertEquals("Order processing failed", result); }
public static String subtractPaths(String path, String prefix) throws InvalidPathException { String cleanedPath = cleanPath(path); String cleanedPrefix = cleanPath(prefix); if (cleanedPath.equals(cleanedPrefix)) { return ""; } if (!hasPrefix(cleanedPath, cleanedPrefix)) { throw new RuntimeException( String.format("Cannot subtract %s from %s because it is not a prefix", prefix, path)); } // The only clean path which ends in '/' is the root. int prefixLen = cleanedPrefix.length(); int charsToDrop = PathUtils.isRoot(cleanedPrefix) ? prefixLen : prefixLen + 1; return cleanedPath.substring(charsToDrop, cleanedPath.length()); }
@Test public void subtractPaths() throws InvalidPathException { assertEquals("b/c", PathUtils.subtractPaths("/a/b/c", "/a")); assertEquals("b/c", PathUtils.subtractPaths("/a/b/c", "/a/")); assertEquals("b/c", PathUtils.subtractPaths("/a/b/c", "/a/")); assertEquals("c", PathUtils.subtractPaths("/a/b/c", "/a/b")); assertEquals("a/b/c", PathUtils.subtractPaths("/a/b/c", "/")); assertEquals("", PathUtils.subtractPaths("/", "/")); assertEquals("", PathUtils.subtractPaths("/a/b/", "/a/b")); assertEquals("", PathUtils.subtractPaths("/a/b", "/a/b")); }
public static String concat(CharSequence... strings) { if (strings.length == 0) { return ""; } if (strings.length == 1) { return strings[0].toString(); } int length = 0; // -1 = no result, -2 = multiple results int indexOfSingleNonEmptyString = -1; for (int i = 0; i < strings.length; i++) { CharSequence charSequence = strings[i]; int len = charSequence.length(); length += len; if (indexOfSingleNonEmptyString != -2 && len > 0) { if (indexOfSingleNonEmptyString == -1) { indexOfSingleNonEmptyString = i; } else { indexOfSingleNonEmptyString = -2; } } } if (length == 0) { return ""; } if (indexOfSingleNonEmptyString > 0) { return strings[indexOfSingleNonEmptyString].toString(); } StringBuilder sb = new StringBuilder(length); for (CharSequence charSequence : strings) { sb.append(charSequence); } return sb.toString(); }
@Test public void testConcat() { Assertions.assertEquals("", Utils.concat()); Assertions.assertEquals("", Utils.concat("")); Assertions.assertEquals("", Utils.concat("", "")); Assertions.assertEquals("a", Utils.concat("a")); Assertions.assertEquals("a", Utils.concat("", "a", "")); Assertions.assertEquals("abc", Utils.concat("a", "b", "c")); }
public void importPackage() { String superEntity = entity.getSuperClass(); if (StringUtils.isNotBlank(superEntity)) { // 自定义父类 this.importPackages.add(superEntity); } else { if (entity.isActiveRecord()) { // 无父类开启 AR 模式 this.importPackages.add(Model.class.getCanonicalName()); } } if (entity.isSerialVersionUID() || entity.isActiveRecord()) { this.importPackages.add(Serializable.class.getCanonicalName()); } if (this.isConvert()) { this.importPackages.add(TableName.class.getCanonicalName()); } IdType idType = entity.getIdType(); if (null != idType && this.isHavePrimaryKey()) { // 指定需要 IdType 场景 this.importPackages.add(IdType.class.getCanonicalName()); this.importPackages.add(TableId.class.getCanonicalName()); } this.fields.forEach(field -> { IColumnType columnType = field.getColumnType(); if (null != columnType && null != columnType.getPkg()) { importPackages.add(columnType.getPkg()); } if (field.isKeyFlag()) { // 主键 if (field.isConvert() || field.isKeyIdentityFlag()) { importPackages.add(TableId.class.getCanonicalName()); } // 自增 if (field.isKeyIdentityFlag()) { importPackages.add(IdType.class.getCanonicalName()); } } else if (field.isConvert()) { // 普通字段 importPackages.add(com.baomidou.mybatisplus.annotation.TableField.class.getCanonicalName()); } if (null != field.getFill()) { // 填充字段 importPackages.add(com.baomidou.mybatisplus.annotation.TableField.class.getCanonicalName()); //TODO 好像default的不用处理也行,这个做优化项目. importPackages.add(FieldFill.class.getCanonicalName()); } if (field.isVersionField()) { this.importPackages.add(Version.class.getCanonicalName()); } if (field.isLogicDeleteField()) { this.importPackages.add(TableLogic.class.getCanonicalName()); } }); }
@Test void importPackageTest() { TableInfo tableInfo; StrategyConfig strategyConfig; ConfigBuilder configBuilder; tableInfo = new TableInfo(new ConfigBuilder(GeneratorBuilder.packageConfig(), dataSourceConfig, GeneratorBuilder.strategyConfig(), null, null, null), "user"); tableInfo.importPackage(); Assertions.assertEquals(1, tableInfo.getImportPackages().size()); Assertions.assertTrue(tableInfo.getImportPackages().contains(Serializable.class.getName())); tableInfo = new TableInfo(new ConfigBuilder(GeneratorBuilder.packageConfig(), dataSourceConfig, GeneratorBuilder.strategyConfig(), null, null, null), "user").setEntityName("userEntity").setConvert(); tableInfo.importPackage(); Assertions.assertEquals(2, tableInfo.getImportPackages().size()); Assertions.assertTrue(tableInfo.getImportPackages().contains(Serializable.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains(TableName.class.getName())); strategyConfig = GeneratorBuilder.strategyConfigBuilder().entityBuilder().superClass("con.baomihua.demo.SuperEntity").build(); configBuilder = new ConfigBuilder(GeneratorBuilder.packageConfig(), dataSourceConfig, strategyConfig, null, null, null); tableInfo = new TableInfo(configBuilder, "user"); tableInfo.importPackage(); Assertions.assertEquals(2, tableInfo.getImportPackages().size()); Assertions.assertTrue(tableInfo.getImportPackages().contains(Serializable.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains("con.baomihua.demo.SuperEntity")); tableInfo = new TableInfo(new ConfigBuilder(GeneratorBuilder.packageConfig(), dataSourceConfig, GeneratorBuilder.strategyConfigBuilder() .entityBuilder().enableActiveRecord().build(), null, null, null), "user"); tableInfo.importPackage(); Assertions.assertEquals(2, tableInfo.getImportPackages().size()); Assertions.assertTrue(tableInfo.getImportPackages().contains(Serializable.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains(Model.class.getName())); strategyConfig = GeneratorBuilder.strategyConfig(); configBuilder = new ConfigBuilder(GeneratorBuilder.packageConfig(), dataSourceConfig, strategyConfig, null, GeneratorBuilder.globalConfig(), null); tableInfo = new TableInfo(configBuilder, "user"); tableInfo.addField(new TableField(configBuilder, "u_id").setColumnName("u_id").primaryKey(true).setPropertyName("uid", DbColumnType.LONG)); tableInfo.importPackage(); Assertions.assertEquals(3, tableInfo.getImportPackages().size()); Assertions.assertTrue(tableInfo.getImportPackages().contains(Serializable.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains(TableId.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains(IdType.class.getName())); strategyConfig = GeneratorBuilder.strategyConfig(); configBuilder = new ConfigBuilder(GeneratorBuilder.packageConfig(), dataSourceConfig, strategyConfig, null, GeneratorBuilder.globalConfig(), null); tableInfo = new TableInfo(configBuilder, "user"); tableInfo.addField(new TableField(configBuilder, "u_id").setPropertyName("uid", DbColumnType.LONG).primaryKey(true)); tableInfo.importPackage(); Assertions.assertEquals(3, tableInfo.getImportPackages().size()); Assertions.assertTrue(tableInfo.getImportPackages().contains(Serializable.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains(TableId.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains(IdType.class.getName())); strategyConfig = GeneratorBuilder.strategyConfigBuilder().entityBuilder().logicDeleteColumnName("delete_flag").build(); configBuilder = new ConfigBuilder(GeneratorBuilder.packageConfig(), dataSourceConfig, strategyConfig, null, GeneratorBuilder.globalConfig(), null); tableInfo = new TableInfo(configBuilder, "user"); tableInfo.addField(new TableField(configBuilder, "u_id").setColumnName("u_id").primaryKey(true).setPropertyName("uid", DbColumnType.LONG)); tableInfo.addField(new TableField(configBuilder, "delete_flag").setColumnName("delete_flag").setPropertyName("deleteFlag", DbColumnType.BOOLEAN)); tableInfo.importPackage(); Assertions.assertEquals(4, tableInfo.getImportPackages().size()); Assertions.assertTrue(tableInfo.getImportPackages().contains(Serializable.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains(TableLogic.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains(TableId.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains(IdType.class.getName())); strategyConfig = GeneratorBuilder.strategyConfig(); configBuilder = new ConfigBuilder(GeneratorBuilder.packageConfig(), dataSourceConfig, strategyConfig.entityBuilder().idType(IdType.ASSIGN_ID).build(), null, null, null); tableInfo = new TableInfo(configBuilder, "user"); tableInfo.addField(new TableField(configBuilder, "name").setPropertyName("name", DbColumnType.STRING)); tableInfo.importPackage(); Assertions.assertEquals(1, tableInfo.getImportPackages().size()); Assertions.assertTrue(tableInfo.getImportPackages().contains(Serializable.class.getName())); strategyConfig = GeneratorBuilder.strategyConfig(); configBuilder = new ConfigBuilder(GeneratorBuilder.packageConfig(), dataSourceConfig, strategyConfig.entityBuilder().idType(IdType.ASSIGN_ID).build(), null, null, null); tableInfo = new TableInfo(configBuilder, "user").setHavePrimaryKey(true); tableInfo.addField(new TableField(configBuilder, "u_id").setColumnName("u_id").primaryKey(true).setPropertyName("uid", DbColumnType.LONG)); tableInfo.importPackage(); Assertions.assertEquals(3, tableInfo.getImportPackages().size()); Assertions.assertTrue(tableInfo.getImportPackages().contains(Serializable.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains(TableId.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains(IdType.class.getName())); strategyConfig = GeneratorBuilder.strategyConfig().entityBuilder().addTableFills(new Column("create_time", FieldFill.DEFAULT)).build(); configBuilder = new ConfigBuilder(GeneratorBuilder.packageConfig(), dataSourceConfig, strategyConfig, null, GeneratorBuilder.globalConfig(), null); tableInfo = new TableInfo(configBuilder, "user").setHavePrimaryKey(true); tableInfo.addField(new TableField(configBuilder, "u_id").setColumnName("u_id").primaryKey(true).setPropertyName("uid", DbColumnType.LONG)); tableInfo.addField(new TableField(configBuilder, "create_time").setColumnName("create_time").setPropertyName("createTime", DbColumnType.DATE)); tableInfo.importPackage(); Assertions.assertEquals(6, tableInfo.getImportPackages().size()); Assertions.assertTrue(tableInfo.getImportPackages().contains(Date.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains(Serializable.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains(IdType.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains(TableId.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains(com.baomidou.mybatisplus.annotation.TableField.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains(FieldFill.class.getName())); strategyConfig = GeneratorBuilder.strategyConfigBuilder().entityBuilder().versionColumnName("version").build(); configBuilder = new ConfigBuilder(GeneratorBuilder.packageConfig(), dataSourceConfig, strategyConfig, null, GeneratorBuilder.globalConfig(), null); tableInfo = new TableInfo(configBuilder, "user").setHavePrimaryKey(true); tableInfo.addField(new TableField(configBuilder, "u_id").primaryKey(true).setPropertyName("uid", DbColumnType.LONG)); tableInfo.addField(new TableField(configBuilder, "version").setPropertyName("version", DbColumnType.LONG)); tableInfo.importPackage(); Assertions.assertEquals(4, tableInfo.getImportPackages().size()); Assertions.assertTrue(tableInfo.getImportPackages().contains(Serializable.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains(IdType.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains(TableId.class.getName())); Assertions.assertTrue(tableInfo.getImportPackages().contains(Version.class.getName())); }
public static String normalizeUri(String uri) throws URISyntaxException { // try to parse using the simpler and faster Camel URI parser String[] parts = CamelURIParser.fastParseUri(uri); if (parts != null) { // we optimized specially if an empty array is returned if (parts == URI_ALREADY_NORMALIZED) { return uri; } // use the faster and more simple normalizer return doFastNormalizeUri(parts); } else { // use the legacy normalizer as the uri is complex and may have unsafe URL characters return doComplexNormalizeUri(uri); } }
@Test public void testNormalizeEndpointUriWithFragments() throws Exception { String out1 = URISupport.normalizeUri("irc://someserver/#camel?user=davsclaus"); String out2 = URISupport.normalizeUri("irc:someserver/#camel?user=davsclaus"); assertEquals(out1, out2); out1 = URISupport.normalizeUri("irc://someserver/#camel?user=davsclaus"); out2 = URISupport.normalizeUri("irc:someserver/#camel?user=hadrian"); assertNotSame(out1, out2); }
@Override public void handleWayTags(int edgeId, EdgeIntAccess edgeIntAccess, ReaderWay readerWay, IntsRef relationFlags) { List<Map<String, Object>> nodeTags = readerWay.getTag("node_tags", null); if (nodeTags == null) return; for (int i = 0; i < nodeTags.size(); i++) { Map<String, Object> tags = nodeTags.get(i); if ("crossing".equals(tags.get("railway")) || "level_crossing".equals(tags.get("railway"))) { String barrierVal = (String) tags.get("crossing:barrier"); crossingEnc.setEnum(false, edgeId, edgeIntAccess, (Helper.isEmpty(barrierVal) || "no".equals(barrierVal)) ? Crossing.RAILWAY : Crossing.RAILWAY_BARRIER); return; } String crossingSignals = (String) tags.get("crossing:signals"); if ("yes".equals(crossingSignals)) { crossingEnc.setEnum(false, edgeId, edgeIntAccess, Crossing.TRAFFIC_SIGNALS); return; } String crossingMarkings = (String) tags.get("crossing:markings"); if ("yes".equals(crossingMarkings)) { crossingEnc.setEnum(false, edgeId, edgeIntAccess, Crossing.MARKED); return; } String crossingValue = (String) tags.get("crossing"); // some crossing values like "no" do not require highway=crossing and sometimes no crossing value exists although highway=crossing if (Helper.isEmpty(crossingValue) && ("no".equals(crossingSignals) || "no".equals(crossingMarkings) || "crossing".equals(tags.get("highway")) || "crossing".equals(tags.get("footway")) || "crossing".equals(tags.get("cycleway")))) { crossingEnc.setEnum(false, edgeId, edgeIntAccess, Crossing.UNMARKED); // next node could have more specific Crossing value continue; } Crossing crossing = Crossing.find(crossingValue); if (crossing != Crossing.MISSING) crossingEnc.setEnum(false, edgeId, edgeIntAccess, crossing); } }
@Test public void testRailway() { EdgeIntAccess edgeIntAccess = new ArrayEdgeIntAccess(1); int edgeId = 0; parser.handleWayTags(edgeId, edgeIntAccess, createReader(new PMap().putObject("railway", "level_crossing").toMap()), null); assertEquals(Crossing.RAILWAY, crossingEV.getEnum(false, edgeId, edgeIntAccess)); }
public static String removeDoubleQuotes(String string) { return string != null ? string.replace("\"", "") : null; }
@Test void removeDoubleQuotesWhenStrNull() { assertNull(Numeric.removeDoubleQuotes(null)); }
@Override public BasicTypeDefine reconvert(Column column) { BasicTypeDefine.BasicTypeDefineBuilder builder = BasicTypeDefine.builder() .name(column.getName()) .nullable(column.isNullable()) .comment(column.getComment()) .defaultValue(column.getDefaultValue()); switch (column.getDataType().getSqlType()) { case BOOLEAN: builder.columnType(PG_BOOLEAN); builder.dataType(PG_BOOLEAN); break; case TINYINT: case SMALLINT: builder.columnType(PG_SMALLINT); builder.dataType(PG_SMALLINT); break; case INT: builder.columnType(PG_INTEGER); builder.dataType(PG_INTEGER); break; case BIGINT: builder.columnType(PG_BIGINT); builder.dataType(PG_BIGINT); break; case FLOAT: builder.columnType(PG_REAL); builder.dataType(PG_REAL); break; case DOUBLE: builder.columnType(PG_DOUBLE_PRECISION); builder.dataType(PG_DOUBLE_PRECISION); break; case DECIMAL: if (column.getSourceType() != null && column.getSourceType().equalsIgnoreCase(PG_MONEY)) { builder.columnType(PG_MONEY); builder.dataType(PG_MONEY); } else { DecimalType decimalType = (DecimalType) column.getDataType(); long precision = decimalType.getPrecision(); int scale = decimalType.getScale(); if (precision <= 0) { precision = DEFAULT_PRECISION; scale = DEFAULT_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is precision less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (precision > MAX_PRECISION) { scale = (int) Math.max(0, scale - (precision - MAX_PRECISION)); precision = MAX_PRECISION; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum precision of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_PRECISION, precision, scale); } if (scale < 0) { scale = 0; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is scale less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (scale > MAX_SCALE) { scale = MAX_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_SCALE, precision, scale); } builder.columnType(String.format("%s(%s,%s)", PG_NUMERIC, precision, scale)); builder.dataType(PG_NUMERIC); builder.precision(precision); builder.scale(scale); } break; case BYTES: builder.columnType(PG_BYTEA); builder.dataType(PG_BYTEA); break; case STRING: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType(PG_TEXT); builder.dataType(PG_TEXT); } else if (column.getColumnLength() <= MAX_VARCHAR_LENGTH) { builder.columnType( String.format("%s(%s)", PG_VARCHAR, column.getColumnLength())); builder.dataType(PG_VARCHAR); } else { builder.columnType(PG_TEXT); builder.dataType(PG_TEXT); } break; case DATE: builder.columnType(PG_DATE); builder.dataType(PG_DATE); break; case TIME: Integer timeScale = column.getScale(); if (timeScale != null && timeScale > MAX_TIME_SCALE) { timeScale = MAX_TIME_SCALE; log.warn( "The time column {} type time({}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to time({})", column.getName(), column.getScale(), MAX_SCALE, timeScale); } if (timeScale != null && timeScale > 0) { builder.columnType(String.format("%s(%s)", PG_TIME, timeScale)); } else { builder.columnType(PG_TIME); } builder.dataType(PG_TIME); builder.scale(timeScale); break; case TIMESTAMP: Integer timestampScale = column.getScale(); if (timestampScale != null && timestampScale > MAX_TIMESTAMP_SCALE) { timestampScale = MAX_TIMESTAMP_SCALE; log.warn( "The timestamp column {} type timestamp({}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to timestamp({})", column.getName(), column.getScale(), MAX_TIMESTAMP_SCALE, timestampScale); } if (timestampScale != null && timestampScale > 0) { builder.columnType(String.format("%s(%s)", PG_TIMESTAMP, timestampScale)); } else { builder.columnType(PG_TIMESTAMP); } builder.dataType(PG_TIMESTAMP); builder.scale(timestampScale); break; case ARRAY: ArrayType arrayType = (ArrayType) column.getDataType(); SeaTunnelDataType elementType = arrayType.getElementType(); switch (elementType.getSqlType()) { case BOOLEAN: builder.columnType(PG_BOOLEAN_ARRAY); builder.dataType(PG_BOOLEAN_ARRAY); break; case TINYINT: case SMALLINT: builder.columnType(PG_SMALLINT_ARRAY); builder.dataType(PG_SMALLINT_ARRAY); break; case INT: builder.columnType(PG_INTEGER_ARRAY); builder.dataType(PG_INTEGER_ARRAY); break; case BIGINT: builder.columnType(PG_BIGINT_ARRAY); builder.dataType(PG_BIGINT_ARRAY); break; case FLOAT: builder.columnType(PG_REAL_ARRAY); builder.dataType(PG_REAL_ARRAY); break; case DOUBLE: builder.columnType(PG_DOUBLE_PRECISION_ARRAY); builder.dataType(PG_DOUBLE_PRECISION_ARRAY); break; case BYTES: builder.columnType(PG_BYTEA); builder.dataType(PG_BYTEA); break; case STRING: builder.columnType(PG_TEXT_ARRAY); builder.dataType(PG_TEXT_ARRAY); break; default: throw CommonError.convertToConnectorTypeError( DatabaseIdentifier.POSTGRESQL, elementType.getSqlType().name(), column.getName()); } break; default: throw CommonError.convertToConnectorTypeError( DatabaseIdentifier.POSTGRESQL, column.getDataType().getSqlType().name(), column.getName()); } return builder.build(); }
@Test public void testReconvertArray() { Column column = PhysicalColumn.builder() .name("test") .dataType(ArrayType.BOOLEAN_ARRAY_TYPE) .build(); BasicTypeDefine typeDefine = PostgresTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals(PostgresTypeConverter.PG_BOOLEAN_ARRAY, typeDefine.getColumnType()); Assertions.assertEquals(PostgresTypeConverter.PG_BOOLEAN_ARRAY, typeDefine.getDataType()); column = PhysicalColumn.builder().name("test").dataType(ArrayType.BYTE_ARRAY_TYPE).build(); typeDefine = PostgresTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals( PostgresTypeConverter.PG_SMALLINT_ARRAY, typeDefine.getColumnType()); Assertions.assertEquals(PostgresTypeConverter.PG_SMALLINT_ARRAY, typeDefine.getDataType()); column = PhysicalColumn.builder().name("test").dataType(ArrayType.SHORT_ARRAY_TYPE).build(); typeDefine = PostgresTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals( PostgresTypeConverter.PG_SMALLINT_ARRAY, typeDefine.getColumnType()); Assertions.assertEquals(PostgresTypeConverter.PG_SMALLINT_ARRAY, typeDefine.getDataType()); column = PhysicalColumn.builder().name("test").dataType(ArrayType.INT_ARRAY_TYPE).build(); typeDefine = PostgresTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals(PostgresTypeConverter.PG_INTEGER_ARRAY, typeDefine.getColumnType()); Assertions.assertEquals(PostgresTypeConverter.PG_INTEGER_ARRAY, typeDefine.getDataType()); column = PhysicalColumn.builder().name("test").dataType(ArrayType.LONG_ARRAY_TYPE).build(); typeDefine = PostgresTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals(PostgresTypeConverter.PG_BIGINT_ARRAY, typeDefine.getColumnType()); Assertions.assertEquals(PostgresTypeConverter.PG_BIGINT_ARRAY, typeDefine.getDataType()); column = PhysicalColumn.builder().name("test").dataType(ArrayType.FLOAT_ARRAY_TYPE).build(); typeDefine = PostgresTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals(PostgresTypeConverter.PG_REAL_ARRAY, typeDefine.getColumnType()); Assertions.assertEquals(PostgresTypeConverter.PG_REAL_ARRAY, typeDefine.getDataType()); column = PhysicalColumn.builder().name("test").dataType(ArrayType.DOUBLE_ARRAY_TYPE).build(); typeDefine = PostgresTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals( PostgresTypeConverter.PG_DOUBLE_PRECISION_ARRAY, typeDefine.getColumnType()); Assertions.assertEquals( PostgresTypeConverter.PG_DOUBLE_PRECISION_ARRAY, typeDefine.getDataType()); column = PhysicalColumn.builder().name("test").dataType(ArrayType.STRING_ARRAY_TYPE).build(); typeDefine = PostgresTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals(PostgresTypeConverter.PG_TEXT_ARRAY, typeDefine.getColumnType()); Assertions.assertEquals(PostgresTypeConverter.PG_TEXT_ARRAY, typeDefine.getDataType()); column = PhysicalColumn.builder().name("test").dataType(ArrayType.BYTE_ARRAY_TYPE).build(); typeDefine = PostgresTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals( PostgresTypeConverter.PG_SMALLINT_ARRAY, typeDefine.getColumnType()); Assertions.assertEquals(PostgresTypeConverter.PG_SMALLINT_ARRAY, typeDefine.getDataType()); }
public void isInOrder() { isInOrder(Ordering.natural()); }
@Test public void iterableIsInOrder() { assertThat(asList()).isInOrder(); assertThat(asList(1)).isInOrder(); assertThat(asList(1, 1, 2, 3, 3, 3, 4)).isInOrder(); }
@Override public String buildContext() { final String handle = ((Collection<?>) getSource()) .stream() .map(s -> ((PluginHandleDO) s).getField()) .collect(Collectors.joining(",")); return String.format("the plugin handle[%s] is %s", handle, StringUtils.lowerCase(getType().getType().toString())); }
@Test public void batchChangePluginHandleContextTest() { BatchPluginHandleChangedEvent batchPluginHandleChangedEvent = new BatchPluginHandleChangedEvent(Arrays.asList(one, two), null, EventTypeEnum.PLUGIN_HANDLE_DELETE, "test-operator"); String context = String.format("the plugin handle[%s] is %s", "testFieldOne,testFieldTwo", EventTypeEnum.PLUGIN_HANDLE_DELETE.getType().toString().toLowerCase()); assertEquals(context, batchPluginHandleChangedEvent.buildContext()); }
public int getDefaultSelectedSchemaIndex() { List<String> schemaNames; try { schemaNames = schemasProvider.getPartitionSchemasNames( transMeta ); } catch ( KettleException e ) { schemaNames = Collections.emptyList(); } PartitionSchema partitioningSchema = stepMeta.getStepPartitioningMeta().getPartitionSchema(); int defaultSelectedSchemaIndex = 0; if ( partitioningSchema != null && partitioningSchema.getName() != null && !schemaNames.isEmpty() ) { defaultSelectedSchemaIndex = Const.indexOfString( partitioningSchema.getName(), schemaNames ); } return defaultSelectedSchemaIndex != -1 ? defaultSelectedSchemaIndex : 0; }
@Test public void defaultSelectedSchemaIndexIsFoundBySchemaName() throws Exception { PartitionSchema schema = new PartitionSchema( "qwerty", Collections.<String>emptyList() ); StepPartitioningMeta meta = mock( StepPartitioningMeta.class ); when( meta.getPartitionSchema() ).thenReturn( schema ); when( stepMeta.getStepPartitioningMeta() ).thenReturn( meta ); List<String> schemas = Arrays.asList( "1", plugin.getName(), "2" ); when( partitionSchemasProvider.getPartitionSchemasNames( any( TransMeta.class ) ) ).thenReturn( schemas ); assertEquals( 1, settings.getDefaultSelectedSchemaIndex() ); }
@Override public Stream<HoodieFileGroup> getAllFileGroups(String partitionPath) { return execute(partitionPath, preferredView::getAllFileGroups, (path) -> getSecondaryView().getAllFileGroups(path)); }
@Test public void testGetAllFileGroups() { Stream<HoodieFileGroup> actual; String partitionPath = "/table2"; Stream<HoodieFileGroup> expected = Collections.singleton( new HoodieFileGroup(partitionPath, "id1", new MockHoodieTimeline(Stream.empty(), Stream.empty()))).stream(); when(primary.getAllFileGroups(partitionPath)).thenReturn(expected); actual = fsView.getAllFileGroups(partitionPath); assertEquals(expected, actual); verify(secondaryViewSupplier, never()).get(); resetMocks(); when(secondaryViewSupplier.get()).thenReturn(secondary); when(primary.getAllFileGroups(partitionPath)).thenThrow(new RuntimeException()); when(secondary.getAllFileGroups(partitionPath)).thenReturn(expected); actual = fsView.getAllFileGroups(partitionPath); assertEquals(expected, actual); resetMocks(); when(secondary.getAllFileGroups(partitionPath)).thenReturn(expected); actual = fsView.getAllFileGroups(partitionPath); assertEquals(expected, actual); resetMocks(); when(secondary.getAllFileGroups(partitionPath)).thenThrow(new RuntimeException()); assertThrows(RuntimeException.class, () -> { fsView.getAllFileGroups(partitionPath); }); }
public Labels withAdditionalLabels(Map<String, String> additionalLabels) { if (additionalLabels == null || additionalLabels.isEmpty()) { return this; } else { Map<String, String> newLabels = new HashMap<>(labels.size()); newLabels.putAll(labels); newLabels.putAll(Labels.additionalLabels(additionalLabels).toMap()); return new Labels(newLabels); } }
@Test public void testParseNullLabelsInUserLabels() { assertThat(Labels.EMPTY.withAdditionalLabels(null), is(Labels.EMPTY)); }
public static Optional<Expression> convert( org.apache.flink.table.expressions.Expression flinkExpression) { if (!(flinkExpression instanceof CallExpression)) { return Optional.empty(); } CallExpression call = (CallExpression) flinkExpression; Operation op = FILTERS.get(call.getFunctionDefinition()); if (op != null) { switch (op) { case IS_NULL: return onlyChildAs(call, FieldReferenceExpression.class) .map(FieldReferenceExpression::getName) .map(Expressions::isNull); case NOT_NULL: return onlyChildAs(call, FieldReferenceExpression.class) .map(FieldReferenceExpression::getName) .map(Expressions::notNull); case LT: return convertFieldAndLiteral(Expressions::lessThan, Expressions::greaterThan, call); case LT_EQ: return convertFieldAndLiteral( Expressions::lessThanOrEqual, Expressions::greaterThanOrEqual, call); case GT: return convertFieldAndLiteral(Expressions::greaterThan, Expressions::lessThan, call); case GT_EQ: return convertFieldAndLiteral( Expressions::greaterThanOrEqual, Expressions::lessThanOrEqual, call); case EQ: return convertFieldAndLiteral( (ref, lit) -> { if (NaNUtil.isNaN(lit)) { return Expressions.isNaN(ref); } else { return Expressions.equal(ref, lit); } }, call); case NOT_EQ: return convertFieldAndLiteral( (ref, lit) -> { if (NaNUtil.isNaN(lit)) { return Expressions.notNaN(ref); } else { return Expressions.notEqual(ref, lit); } }, call); case NOT: return onlyChildAs(call, CallExpression.class) .flatMap(FlinkFilters::convert) .map(Expressions::not); case AND: return convertLogicExpression(Expressions::and, call); case OR: return convertLogicExpression(Expressions::or, call); case STARTS_WITH: return convertLike(call); } } return Optional.empty(); }
@Test public void testGreaterThan() { UnboundPredicate<Integer> expected = org.apache.iceberg.expressions.Expressions.greaterThan("field1", 1); Optional<org.apache.iceberg.expressions.Expression> actual = FlinkFilters.convert(resolve(Expressions.$("field1").isGreater(Expressions.lit(1)))); assertThat(actual).isPresent(); assertPredicatesMatch(expected, actual.get()); Optional<org.apache.iceberg.expressions.Expression> actual1 = FlinkFilters.convert(resolve(Expressions.lit(1).isLess(Expressions.$("field1")))); assertThat(actual1).isPresent(); assertPredicatesMatch(expected, actual1.get()); }
@Override public Long getLocalValue() { return this.max; }
@Test void testGet() { LongMaximum max = new LongMaximum(); assertThat(max.getLocalValue().longValue()).isEqualTo(Long.MIN_VALUE); }
@Override public int hashcode(DataChangeEvent event) { GenericRow genericRow = PaimonWriterHelper.convertEventToGenericRow(event, fieldGetters); return channelComputer.channel(genericRow); }
@Test public void testHashCodeForFixedBucketTable() { TableId tableId = TableId.tableId(TEST_DATABASE, "test_table"); Map<String, String> tableOptions = new HashMap<>(); tableOptions.put("bucket", "10"); MetadataApplier metadataApplier = new PaimonMetadataApplier(catalogOptions, tableOptions, new HashMap<>()); Schema schema = Schema.newBuilder() .physicalColumn("col1", DataTypes.STRING().notNull()) .physicalColumn("col2", DataTypes.STRING()) .physicalColumn("pt", DataTypes.STRING()) .primaryKey("col1", "pt") .partitionKey("pt") .build(); CreateTableEvent createTableEvent = new CreateTableEvent(tableId, schema); metadataApplier.applySchemaChange(createTableEvent); BinaryRecordDataGenerator generator = new BinaryRecordDataGenerator(schema.getColumnDataTypes().toArray(new DataType[0])); PaimonHashFunction hashFunction = new PaimonHashFunction(catalogOptions, tableId, schema, ZoneId.systemDefault(), 4); DataChangeEvent dataChangeEvent1 = DataChangeEvent.insertEvent( tableId, generator.generate( new Object[] { BinaryStringData.fromString("1"), BinaryStringData.fromString("1"), BinaryStringData.fromString("2024") })); int key1 = hashFunction.hashcode(dataChangeEvent1); DataChangeEvent dataChangeEvent2 = DataChangeEvent.updateEvent( tableId, generator.generate( new Object[] { BinaryStringData.fromString("1"), BinaryStringData.fromString("1"), BinaryStringData.fromString("2024") }), generator.generate( new Object[] { BinaryStringData.fromString("1"), BinaryStringData.fromString("2"), BinaryStringData.fromString("2024") })); int key2 = hashFunction.hashcode(dataChangeEvent2); DataChangeEvent dataChangeEvent3 = DataChangeEvent.deleteEvent( tableId, generator.generate( new Object[] { BinaryStringData.fromString("1"), BinaryStringData.fromString("2"), BinaryStringData.fromString("2024") })); int key3 = hashFunction.hashcode(dataChangeEvent3); assertThat(key1).isEqualTo(key2); assertThat(key1).isEqualTo(key3); }
@Udf(description = "Returns a new string encoded using the outputEncoding ") public String encode( @UdfParameter( description = "The source string. If null, then function returns null.") final String str, @UdfParameter( description = "The input encoding." + " If null, then function returns null.") final String inputEncoding, @UdfParameter( description = "The output encoding." + " If null, then function returns null.") final String outputEncoding) { if (str == null || inputEncoding == null || outputEncoding == null) { return null; } final String encodedString = inputEncoding.toLowerCase() + outputEncoding.toLowerCase(); final Encode.Encoder encoder = ENCODER_MAP.get(encodedString); if (encoder == null) { throw new KsqlFunctionException("Supported input and output encodings are: " + "hex, utf8, ascii and base64"); } return encoder.apply(str); }
@Test public void shouldEncodeBase64ToHex() { assertThat(udf.encode("RXhhbXBsZSE=", "base64", "hex"), is("4578616d706c6521")); assertThat(udf.encode("UGxhbnQgdHJlZXM=", "base64", "hex"), is("506c616e74207472656573")); assertThat(udf.encode("MSArIDEgPSAx", "base64", "hex"), is("31202b2031203d2031")); assertThat(udf.encode("zpXOu867zqzOtM6x", "base64", "hex"), is("ce95cebbcebbceacceb4ceb1")); assertThat(udf.encode("w5xiZXJtZW5zY2g", "base64", "hex"), is("c39c6265726d656e736368")); }
public Map<String, Object> getTelemetryResponse(User currentUser) { TelemetryUserSettings telemetryUserSettings = getTelemetryUserSettings(currentUser); if (isTelemetryEnabled && telemetryUserSettings.telemetryEnabled()) { DateTime clusterCreationDate = telemetryClusterService.getClusterCreationDate().orElse(null); String clusterId = telemetryClusterService.getClusterId(); List<TelemetryLicenseStatus> licenseStatuses = enterpriseDataProvider.licenseStatus(); return telemetryResponseFactory.createTelemetryResponse( getClusterInfo(clusterId, clusterCreationDate, licenseStatuses), getUserInfo(currentUser, clusterId), getPluginInfo(), getSearchClusterInfo(), licenseStatuses, telemetryUserSettings, getDataNodeInfo()); } else { return telemetryResponseFactory.createTelemetryDisabledResponse(telemetryUserSettings); } }
@Test void test_licenses() { TelemetryService telemetryService = createTelemetryService(true); mockUserTelemetryEnabled(true); mockTrafficData(trafficCounterService); TelemetryLicenseStatus enterpriseLicense = createLicense("/license/enterprise"); TelemetryLicenseStatus expiredEnterpriseLicense = enterpriseLicense.toBuilder().expired(true).build(); TelemetryLicenseStatus invalidEnterpriseLicense = enterpriseLicense.toBuilder().valid(false).build(); TelemetryLicenseStatus olderEnterpriseLicense = enterpriseLicense.toBuilder() .expirationDate(enterpriseLicense.expirationDate().minusDays(1)).build(); TelemetryLicenseStatus securityLicense = createLicense("/license/security"); when(enterpriseDataProvider.licenseStatus()).thenReturn(List.of( olderEnterpriseLicense, invalidEnterpriseLicense, enterpriseLicense, expiredEnterpriseLicense, securityLicense)); Map<String, Object> response = telemetryService.getTelemetryResponse(user); assertThat(response.get(LICENSE)).isEqualTo(merge( toMap(enterpriseLicense, "enterprise"), toMap(securityLicense, "security"))); }
public static PartitionKey createPartitionKey(List<String> values, List<Column> columns) throws AnalysisException { return createPartitionKey(values, columns, Table.TableType.HIVE); }
@Test public void testCreateIcebergPartitionKey() throws AnalysisException { PartitionKey partitionKey = createPartitionKey( Lists.newArrayList("1", "a", "3.0", IcebergApiConverter.PARTITION_NULL_VALUE), partColumns, Table.TableType.ICEBERG); Assert.assertEquals("(\"1\", \"a\", \"3.0\", \"NULL\")", partitionKey.toSql()); }