focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public List<Object> handle(String targetName, List<Object> instances, RequestData requestData) { if (requestData == null) { return super.handle(targetName, instances, null); } if (!shouldHandle(instances)) { return instances; } List<Object> result = routerConfig.isUseRequestRouter() ? getTargetInstancesByRequest(targetName, instances, requestData.getTag()) : getTargetInstancesByRules(targetName, instances, requestData.getPath(), requestData.getTag()); return super.handle(targetName, result, requestData); }
@Test public void testGetTargetInstancesByRequestWithTags() { config.setUseRequestRouter(true); config.setRequestTags(Arrays.asList("foo", "bar", "version")); List<Object> instances = new ArrayList<>(); ServiceInstance instance1 = TestDefaultServiceInstance.getTestDefaultServiceInstance("1.0.0", Collections.singletonMap("bar", "bar1")); instances.add(instance1); ServiceInstance instance2 = TestDefaultServiceInstance .getTestDefaultServiceInstance("1.0.1", Collections.singletonMap("foo", "bar2")); instances.add(instance2); ServiceInstance instance3 = TestDefaultServiceInstance.getTestDefaultServiceInstance("1.0.1"); instances.add(instance3); Map<String, List<String>> header = new HashMap<>(); // Match the foo:bar2 instance header.clear(); header.put("foo", Collections.singletonList("bar2")); header.put("foo1", Collections.singletonList("bar2")); List<Object> targetInstances = flowRouteHandler.handle("foo", instances, new RequestData(header, null, null)); Assert.assertEquals(1, targetInstances.size()); Assert.assertEquals(instance2, targetInstances.get(0)); // The instance that matches version 1.0.0 is matched header.clear(); header.put("version", Collections.singletonList("1.0.0")); targetInstances = flowRouteHandler.handle("foo", instances, new RequestData(header, null, null)); Assert.assertEquals(1, targetInstances.size()); Assert.assertEquals(instance1, targetInstances.get(0)); }
public static Getter newMethodGetter(Object object, Getter parent, Method method, String modifier) throws Exception { return newGetter(object, parent, modifier, method.getReturnType(), method::invoke, (t, et) -> new MethodGetter(parent, method, modifier, t, et)); }
@Test public void newMethodGetter_whenExtractingFromEmpty_Collection_AndReducerSuffixInNotEmpty_thenReturnNullGetter() throws Exception { OuterObject object = OuterObject.emptyInner("name"); Getter getter = GetterFactory.newMethodGetter(object, null, innersCollectionMethod, "[any]"); assertSame(NullMultiValueGetter.NULL_MULTIVALUE_GETTER, getter); }
public static int randomInt() { return getRandom().nextInt(); }
@Test public void randomIntTest() { final int c = RandomUtil.randomInt(10, 100); assertTrue(c >= 10 && c < 100); }
public static void update(@NonNull SystemState systemState, @NonNull ConfigMap configMap) { Map<String, String> data = configMap.getData(); if (data == null) { data = new LinkedHashMap<>(); configMap.setData(data); } JsonNode modifiedJson = JsonUtils.mapper() .convertValue(systemState, JsonNode.class); // original JsonNode sourceJson = JsonUtils.jsonToObject(data.getOrDefault(GROUP, emptyJsonObject()), JsonNode.class); try { // patch JsonMergePatch jsonMergePatch = JsonMergePatch.fromJson(modifiedJson); // apply patch to original JsonNode patchedNode = jsonMergePatch.apply(sourceJson); data.put(GROUP, JsonUtils.objectToJson(patchedNode)); } catch (JsonPatchException e) { throw new JsonParseException(e); } }
@Test void update() { SystemState newSystemState = new SystemState(); newSystemState.setIsSetup(true); ConfigMap configMap = new ConfigMap(); SystemState.update(newSystemState, configMap); assertThat(configMap.getData().get(SystemState.GROUP)).isEqualTo("{\"isSetup\":true}"); var data = new LinkedHashMap<String, String>(); configMap.setData(data); data.put(SystemState.GROUP, "{\"isSetup\":false}"); SystemState.update(newSystemState, configMap); assertThat(configMap.getData().get(SystemState.GROUP)).isEqualTo("{\"isSetup\":true}"); data.clear(); data.put(SystemState.GROUP, "{\"isSetup\":true, \"foo\":\"bar\"}"); newSystemState.setIsSetup(false); SystemState.update(newSystemState, configMap); assertThat(configMap.getData().get(SystemState.GROUP)) .isEqualTo("{\"isSetup\":false,\"foo\":\"bar\"}"); }
public FloatArrayAsIterable usingTolerance(double tolerance) { return new FloatArrayAsIterable(tolerance(tolerance), iterableSubject()); }
@Test public void usingTolerance_contains_successWithExpectedLong() { assertThat(array(1.0f, TOLERABLE_TWO, 3.0f)).usingTolerance(DEFAULT_TOLERANCE).contains(2L); }
@Override public PollResult poll(long currentTimeMs) { return pollInternal( prepareFetchRequests(), this::handleFetchSuccess, this::handleFetchFailure ); }
@Test public void testFetchMaxPollRecords() { buildFetcher(2); assignFromUser(singleton(tp0)); subscriptions.seek(tp0, 1); client.prepareResponse(matchesOffset(tidp0, 1), fullFetchResponse(tidp0, records, Errors.NONE, 100L, 0)); client.prepareResponse(matchesOffset(tidp0, 4), fullFetchResponse(tidp0, nextRecords, Errors.NONE, 100L, 0)); assertEquals(1, sendFetches()); networkClientDelegate.poll(time.timer(0)); Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> recordsByPartition = fetchRecords(); List<ConsumerRecord<byte[], byte[]>> recordsToTest = recordsByPartition.get(tp0); assertEquals(2, recordsToTest.size()); assertEquals(3L, subscriptions.position(tp0).offset); assertEquals(1, recordsToTest.get(0).offset()); assertEquals(2, recordsToTest.get(1).offset()); assertEquals(0, sendFetches()); networkClientDelegate.poll(time.timer(0)); recordsByPartition = fetchRecords(); recordsToTest = recordsByPartition.get(tp0); assertEquals(1, recordsToTest.size()); assertEquals(4L, subscriptions.position(tp0).offset); assertEquals(3, recordsToTest.get(0).offset()); assertTrue(sendFetches() > 0); networkClientDelegate.poll(time.timer(0)); recordsByPartition = fetchRecords(); recordsToTest = recordsByPartition.get(tp0); assertEquals(2, recordsToTest.size()); assertEquals(6L, subscriptions.position(tp0).offset); assertEquals(4, recordsToTest.get(0).offset()); assertEquals(5, recordsToTest.get(1).offset()); }
public List<Search> getAllForUser(SearchPermissions searchPermissions, Predicate<ViewDTO> viewReadPermission) { return dbService.streamAll() .filter(s -> hasReadPermissionFor(searchPermissions, viewReadPermission, s)) .collect(Collectors.toList()); }
@Test public void includesSearchesPermittedViaViewsInList() { final Search permittedSearch = mockSearchWithOwner("someone else"); mockSearchWithOwner("someone else"); final SearchUser searchUser = mock(SearchUser.class); final ViewDTO viewDTO = mock(ViewDTO.class); when(viewService.forSearch(permittedSearch.id())).thenReturn(ImmutableList.of(viewDTO)); when(searchUser.canReadView(viewDTO)).thenReturn(true); List<Search> result = sut.getAllForUser(searchUser, searchUser::canReadView); assertThat(result).containsExactly(permittedSearch); }
public static SerializableFunction<Row, Mutation> beamRowToMutationFn( Mutation.Op operation, String table) { return (row -> { switch (operation) { case INSERT: return MutationUtils.createMutationFromBeamRows(Mutation.newInsertBuilder(table), row); case DELETE: return Mutation.delete(table, MutationUtils.createKeyFromBeamRow(row)); case UPDATE: return MutationUtils.createMutationFromBeamRows(Mutation.newUpdateBuilder(table), row); case REPLACE: return MutationUtils.createMutationFromBeamRows(Mutation.newReplaceBuilder(table), row); case INSERT_OR_UPDATE: return MutationUtils.createMutationFromBeamRows( Mutation.newInsertOrUpdateBuilder(table), row); default: throw new IllegalArgumentException( String.format("Unknown mutation operation type: %s", operation)); } }); }
@Test public void testCreateReplaceMutationFromRowWithNulls() { Mutation expectedMutation = createMutationNulls(Mutation.Op.REPLACE); Mutation mutation = beamRowToMutationFn(Mutation.Op.REPLACE, TABLE).apply(WRITE_ROW_NULLS); assertEquals(expectedMutation, mutation); }
public Optional<Object> get(String path) { if (path == null || path.trim().isEmpty()) { throw new IllegalArgumentException(String.format("path [%s] is invalid", path)); } path = validatePath(path); if (path.equals("/")) { return Optional.of(map); } String[] pathTokens = path.split(Pattern.quote("/")); Object object = map; for (int i = 1; i < pathTokens.length; i++) { try { object = resolve(pathTokens[i], object); } catch (NullPointerException | ClassCastException e) { return Optional.empty(); } } return Optional.ofNullable(object); }
@Test public void testInvalidPaths() throws IOException { YamlMapAccessor yamlMapAccessor = createYamlMapAccessor("/YamlMapAccessorTest.yaml"); try { yamlMapAccessor.get(""); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { // DO NOTHING } try { yamlMapAccessor.get("//"); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { // DO NOTHING } try { yamlMapAccessor.get("foo"); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { // DO NOTHING } try { yamlMapAccessor.get("/foo/"); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { // DO NOTHING } }
public static boolean isCommonFieldsEqual(Object source, Object target, String... ignoreProperties) { if (null == source && null == target) { return true; } if (null == source || null == target) { return false; } final Map<String, Object> sourceFieldsMap = BeanUtil.beanToMap(source); final Map<String, Object> targetFieldsMap = BeanUtil.beanToMap(target); final Set<String> sourceFields = sourceFieldsMap.keySet(); sourceFields.removeAll(Arrays.asList(ignoreProperties)); for (String field : sourceFields) { if(sourceFieldsMap.containsKey(field) && targetFieldsMap.containsKey(field)){ if (ObjectUtil.notEqual(sourceFieldsMap.get(field), targetFieldsMap.get(field))) { return false; } } } return true; }
@Test public void isCommonFieldsEqualTest() { final TestUserEntity userEntity = new TestUserEntity(); final TestUserDTO userDTO = new TestUserDTO(); userDTO.setAge(20); userDTO.setName("takaki"); userDTO.setSex(1); userDTO.setMobile("17812312023"); BeanUtil.copyProperties(userDTO, userEntity); assertTrue(BeanUtil.isCommonFieldsEqual(userDTO, userEntity)); userEntity.setAge(13); assertFalse(BeanUtil.isCommonFieldsEqual(userDTO, userEntity)); assertTrue(BeanUtil.isCommonFieldsEqual(userDTO, userEntity, "age")); assertTrue(BeanUtil.isCommonFieldsEqual(null, null)); assertFalse(BeanUtil.isCommonFieldsEqual(null, userEntity)); assertFalse(BeanUtil.isCommonFieldsEqual(userEntity, null)); userEntity.setSex(0); assertTrue(BeanUtil.isCommonFieldsEqual(userDTO, userEntity, "age", "sex")); }
@Override public boolean isLoggedIn() { return true; }
@Test public void isLoggedIn() { assertThat(githubWebhookUserSession.isLoggedIn()).isTrue(); }
public static Set<Result> anaylze(String log) { Set<Result> results = new HashSet<>(); for (Rule rule : Rule.values()) { Matcher matcher = rule.pattern.matcher(log); if (matcher.find()) { results.add(new Result(rule, log, matcher)); } } return results; }
@Test public void needJDK11() throws IOException { CrashReportAnalyzer.Result result = findResultByRule( CrashReportAnalyzer.anaylze(loadLog("/crash-report/need_jdk11.txt")), CrashReportAnalyzer.Rule.NEED_JDK11); }
static int transformDictionaryWord(byte[] dst, int dstOffset, byte[] word, int wordOffset, int len, Transform transform) { int offset = dstOffset; // Copy prefix. byte[] string = transform.prefix; int tmp = string.length; int i = 0; // In most cases tmp < 10 -> no benefits from System.arrayCopy while (i < tmp) { dst[offset++] = string[i++]; } // Copy trimmed word. int op = transform.type; tmp = WordTransformType.getOmitFirst(op); if (tmp > len) { tmp = len; } wordOffset += tmp; len -= tmp; len -= WordTransformType.getOmitLast(op); i = len; while (i > 0) { dst[offset++] = word[wordOffset++]; i--; } if (op == UPPERCASE_ALL || op == UPPERCASE_FIRST) { int uppercaseOffset = offset - len; if (op == UPPERCASE_FIRST) { len = 1; } while (len > 0) { tmp = dst[uppercaseOffset] & 0xFF; if (tmp < 0xc0) { if (tmp >= 'a' && tmp <= 'z') { dst[uppercaseOffset] ^= (byte) 32; } uppercaseOffset += 1; len -= 1; } else if (tmp < 0xe0) { dst[uppercaseOffset + 1] ^= (byte) 32; uppercaseOffset += 2; len -= 2; } else { dst[uppercaseOffset + 2] ^= (byte) 5; uppercaseOffset += 3; len -= 3; } } } // Copy suffix. string = transform.suffix; tmp = string.length; i = 0; while (i < tmp) { dst[offset++] = string[i++]; } return offset - dstOffset; }
@Test public void testTrimAll() { byte[] output = new byte[2]; byte[] input = "word".getBytes(StandardCharsets.UTF_8); Transform transform = new Transform("[", WordTransformType.OMIT_FIRST_5, "]"); Transform.transformDictionaryWord(output, 0, input, 0, input.length, transform); assertArrayEquals(output, "[]".getBytes(StandardCharsets.UTF_8)); }
public static <T> Map<String, T> jsonToMap(final String json, final Class<T> valueTypeRef) { try { JavaType t = MAPPER.getTypeFactory().constructParametricType(HashMap.class, String.class, valueTypeRef); return MAPPER.readValue(json, t); } catch (IOException e) { LOG.warn("write to map error: " + json, e); return new LinkedHashMap<>(); } }
@Test public void testJsonToMap() { Map<String, Object> stringObjectMap = JsonUtils.jsonToMap(EXPECTED_JSON); assertEquals(stringObjectMap.get("name"), "test object"); }
public Optional<String> getNodeName(String nodeId) { return nodeNameCache.getUnchecked(nodeId); }
@Test public void getNodeNameReturnsNodeNameIfNodeIdIsValid() { when(cluster.nodeIdToName("node_id")).thenReturn(Optional.of("Node Name")); assertThat(nodeInfoCache.getNodeName("node_id")).contains("Node Name"); }
@GetMapping(params = "show=all") @Secured(action = ActionTypes.READ, signType = SignType.CONFIG) public ConfigAllInfo detailConfigInfo(@RequestParam("dataId") String dataId, @RequestParam("group") String group, @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant) throws NacosException { // check tenant ParamUtils.checkTenant(tenant); // check params ParamUtils.checkParam(dataId, group, "datumId", "content"); ConfigAllInfo configAllInfo = configInfoPersistService.findConfigAllInfo(dataId, group, tenant); // decrypted if (Objects.nonNull(configAllInfo)) { String encryptedDataKey = configAllInfo.getEncryptedDataKey(); Pair<String, String> pair = EncryptionHandler.decryptHandler(dataId, encryptedDataKey, configAllInfo.getContent()); configAllInfo.setContent(pair.getSecond()); } return configAllInfo; }
@Test void testDetailConfigInfo() throws Exception { ConfigAllInfo configAllInfo = new ConfigAllInfo(); configAllInfo.setDataId("test"); configAllInfo.setGroup("test"); configAllInfo.setCreateIp("localhost"); configAllInfo.setCreateUser("test"); when(configInfoPersistService.findConfigAllInfo("test", "test", "")).thenReturn(configAllInfo); MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.get(Constants.CONFIG_CONTROLLER_PATH).param("show", "all") .param("dataId", "test").param("group", "test").param("tenant", ""); String actualValue = mockmvc.perform(builder).andReturn().getResponse().getContentAsString(); ConfigAllInfo resConfigAllInfo = JacksonUtils.toObj(actualValue, ConfigAllInfo.class); assertEquals(configAllInfo.getDataId(), resConfigAllInfo.getDataId()); assertEquals(configAllInfo.getGroup(), resConfigAllInfo.getGroup()); assertEquals(configAllInfo.getCreateIp(), resConfigAllInfo.getCreateIp()); assertEquals(configAllInfo.getCreateUser(), resConfigAllInfo.getCreateUser()); }
@Override public void delete(PageId pageId) throws IOException, PageNotFoundException { Callable<Void> callable = () -> { mPageStore.delete(pageId); return null; }; try { mTimeLimter.callWithTimeout(callable, mTimeoutMs, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { // Task got cancelled by others, interrupt the current thread // and then throw a runtime ex to make the higher level stop. Thread.currentThread().interrupt(); throw new RuntimeException(e); } catch (TimeoutException e) { Metrics.STORE_DELETE_TIMEOUT.inc(); throw new IOException(e); } catch (RejectedExecutionException e) { Metrics.STORE_THREADS_REJECTED.inc(); throw new IOException(e); } catch (Throwable t) { Throwables.propagateIfPossible(t, IOException.class, PageNotFoundException.class); throw new IOException(t); } }
@Test public void deleteTimeout() throws Exception { mPageStore.setDeleteHanging(true); try { mTimeBoundPageStore.delete(PAGE_ID); fail(); } catch (IOException e) { assertTrue(e.getCause() instanceof TimeoutException); } }
public static JoinParams create( final ColumnName keyColName, final LogicalSchema leftSchema, final LogicalSchema rightSchema ) { final boolean appendKey = neitherContain(keyColName, leftSchema, rightSchema); return new JoinParams( new KsqlValueJoiner(leftSchema.value().size(), rightSchema.value().size(), appendKey ? 1 : 0 ), createSchema(keyColName, leftSchema, rightSchema) ); }
@Test public void shouldBuildCorrectLeftKeyedSchema() { // Given: final ColumnName keyName = Iterables.getOnlyElement(LEFT_SCHEMA.key()).name(); // When: final JoinParams joinParams = JoinParamsFactory.create(keyName, LEFT_SCHEMA, RIGHT_SCHEMA); // Then: assertThat(joinParams.getSchema(), is(LogicalSchema.builder() .keyColumn(ColumnName.of("L_K"), SqlTypes.STRING) .valueColumn(ColumnName.of("L_BLUE"), SqlTypes.STRING) .valueColumn(ColumnName.of("L_GREEN"), SqlTypes.INTEGER) .valueColumn(ColumnName.of("L_K"), SqlTypes.STRING) .valueColumn(ColumnName.of("R_RED"), SqlTypes.BIGINT) .valueColumn(ColumnName.of("R_ORANGE"), SqlTypes.DOUBLE) .valueColumn(ColumnName.of("R_K"), SqlTypes.STRING) .build()) ); }
public ValueAndTimestamp<V> get(final K key) { if (timestampedStore != null) { return timestampedStore.get(key); } if (versionedStore != null) { final VersionedRecord<V> versionedRecord = versionedStore.get(key); return versionedRecord == null ? null : ValueAndTimestamp.make(versionedRecord.value(), versionedRecord.timestamp()); } throw new IllegalStateException("KeyValueStoreWrapper must be initialized with either timestamped or versioned store"); }
@Test public void shouldGetNullFromTimestampedStore() { givenWrapperWithTimestampedStore(); when(timestampedStore.get(KEY)).thenReturn(null); assertThat(wrapper.get(KEY), nullValue()); }
@EventListener(value = ContextClosedEvent.class) public void listener() { if (!isEnableGraceDown() || graceService == null) { return; } graceService.shutdown(); }
@Test public void listener() { MockitoAnnotations.openMocks(this); final GraceConfig graceConfig = new GraceConfig(); try (final MockedStatic<PluginServiceManager> pluginServiceManagerMockedStatic = Mockito.mockStatic(PluginServiceManager.class); final MockedStatic<PluginConfigManager> pluginConfigManagerMockedStatic = Mockito .mockStatic(PluginConfigManager.class);) { pluginConfigManagerMockedStatic.when(() -> PluginConfigManager.getPluginConfig(GraceConfig.class)) .thenReturn(graceConfig); pluginServiceManagerMockedStatic.when(() -> PluginServiceManager.getPluginService(GraceService.class)) .thenReturn(graceService); final ContextClosedEventListener contextClosedEventListener = new ContextClosedEventListener(); contextClosedEventListener.listener(); Mockito.verify(graceService, Mockito.times(0)).shutdown(); graceConfig.setEnableSpring(true); graceConfig.setEnableGraceShutdown(true); graceConfig.setEnableOfflineNotify(true); contextClosedEventListener.listener(); Mockito.verify(graceService, Mockito.times(1)).shutdown(); } }
@Override public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { if(file.isPlaceholder()) { final DescriptiveUrl link = new DriveUrlProvider().toUrl(file).find(DescriptiveUrl.Type.http); if(DescriptiveUrl.EMPTY.equals(link)) { log.warn(String.format("Missing web link for file %s", file)); return new NullInputStream(file.attributes().getSize()); } // Write web link file return IOUtils.toInputStream(UrlFileWriterFactory.get().write(link), Charset.defaultCharset()); } else { final HttpHeaders headers = new HttpHeaders(); headers.setContentType(MEDIA_TYPE); if(status.isAppend()) { final HttpRange range = HttpRange.withStatus(status); final String header; if(TransferStatus.UNKNOWN_LENGTH == range.getEnd()) { header = String.format("bytes=%d-", range.getStart()); } else { header = String.format("bytes=%d-%d", range.getStart(), range.getEnd()); } if(log.isDebugEnabled()) { log.debug(String.format("Add range header %s for file %s", header, file)); } headers.setRange(header); // Disable compression headers.setAcceptEncoding("identity"); } if(file.attributes().isDuplicate()) { // Read previous version try { final Drive.Revisions.Get request = session.getClient().revisions().get(fileid.getFileId(file), file.attributes().getVersionId()); request.setRequestHeaders(headers); return request.executeMediaAsInputStream(); } catch(IOException e) { throw new DriveExceptionMappingService(fileid).map("Download {0} failed", e, file); } } else { try { try { final Drive.Files.Get request = session.getClient().files().get(fileid.getFileId(file)); request.setRequestHeaders(headers); request.setSupportsTeamDrives(new HostPreferences(session.getHost()).getBoolean("googledrive.teamdrive.enable")); return request.executeMediaAsInputStream(); } catch(IOException e) { throw new DriveExceptionMappingService(fileid).map("Download {0} failed", e, file); } } catch(RetriableAccessDeniedException e) { throw e; } catch(AccessDeniedException e) { if(!PreferencesFactory.get().getBoolean(String.format("connection.unsecure.download.%s", session.getHost().getHostname()))) { // Not previously dismissed callback.warn(session.getHost(), MessageFormat.format(LocaleFactory.localizedString("Download {0} failed", "Error"), file.getName()), "Acknowledge the risk of downloading known malware or other abusive file.", LocaleFactory.localizedString("Continue", "Credentials"), LocaleFactory.localizedString("Cancel", "Localizable"), String.format("connection.unsecure.download.%s", session.getHost().getHostname())); } try { final Drive.Files.Get request = session.getClient().files().get(fileid.getFileId(file)); request.setAcknowledgeAbuse(true); request.setRequestHeaders(headers); request.setSupportsTeamDrives(new HostPreferences(session.getHost()).getBoolean("googledrive.teamdrive.enable")); return request.executeMediaAsInputStream(); } catch(IOException f) { throw new DriveExceptionMappingService(fileid).map("Download {0} failed", f, file); } } } } }
@Test public void testReadPath() throws Exception { final DriveFileIdProvider fileid = new DriveFileIdProvider(session); final Path directory = new DriveDirectoryFeature(session, fileid).mkdir(new Path(DriveHomeFinderService.MYDRIVE_FOLDER, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final Path file = new DriveTouchFeature(session, fileid).touch(new Path(directory, String.format("t %s", new AlphanumericRandomStringService().random()), EnumSet.of(Path.Type.file)), new TransferStatus()); assertEquals(0, new DriveAttributesFinderFeature(session, fileid).find(file).getSize()); final CountingInputStream in = new CountingInputStream(new DriveReadFeature(session, fileid).read(file, new TransferStatus(), new DisabledConnectionCallback())); in.close(); assertEquals(0L, in.getByteCount(), 0L); new DriveDeleteFeature(session, fileid).delete(Arrays.asList(file, directory), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public String getName() { return name; }
@Test public void hasAName() throws Exception { assertThat(handler.getName()) .isEqualTo("handler"); }
public int find(String what) { return find(what, 0); }
@Test public void testFind() throws Exception { Text text = new Text("abcd\u20acbdcd\u20ac"); assertThat(text.find("abd")).isEqualTo(-1); assertThat(text.find("ac")).isEqualTo(-1); assertThat(text.find("\u20ac")).isEqualTo(4); assertThat(text.find("\u20ac", 5)).isEqualTo(11); }
static String toDatabaseName(Namespace namespace, boolean skipNameValidation) { if (!skipNameValidation) { validateNamespace(namespace); } return namespace.level(0); }
@Test public void testToDatabaseNameFailure() { List<Namespace> badNames = Lists.newArrayList( Namespace.of("db", "a"), Namespace.of("db-1"), Namespace.empty(), Namespace.of(""), Namespace.of(new String(new char[600]).replace("\0", "a"))); for (Namespace name : badNames) { assertThatThrownBy(() -> IcebergToGlueConverter.toDatabaseName(name, false)) .isInstanceOf(ValidationException.class) .hasMessageStartingWith("Cannot convert namespace") .hasMessageEndingWith( "to Glue database name, " + "because it must be 1-252 chars of lowercase letters, numbers, underscore"); } }
@Override public int compareTo(ResourceConfig other) { return name.compareTo(other.name); }
@Test public void shouldCompareBasedOnName() { ResourceConfig resourceConfigA = new ResourceConfig("aaa"); ResourceConfig resourceConfigB = new ResourceConfig("bbb"); assertThat(resourceConfigA.compareTo(resourceConfigB), is(org.hamcrest.Matchers.lessThan(0))); assertThat(resourceConfigB.compareTo(resourceConfigA), is(greaterThan(0))); assertThat(resourceConfigA.compareTo(resourceConfigA), is(0)); }
public int run() throws IOException { ObjectMapper objectMapper = new ObjectMapper(); MetricsOutput metricsInfo = new MetricsOutput(mMetricsMasterClient.getMetrics()); try { String json = objectMapper.writeValueAsString(metricsInfo); mPrintStream.println(json); } catch (JsonProcessingException e) { mPrintStream.println("Failed to convert metricsInfo output to JSON. " + "Check the command line log for the detailed error message."); LOG.error("Failed to output JSON object {}", metricsInfo); e.printStackTrace(); return -1; } return 0; }
@Test public void metrics() throws IOException { MetricsCommand metricsCommand = new MetricsCommand(mMetricsMasterClient, mPrintStream); metricsCommand.run(); checkOutput(); }
@ThriftConstructor public ThriftBufferResult( String taskInstanceId, long token, long nextToken, boolean bufferComplete, long bufferedBytes, List<ThriftSerializedPage> thriftSerializedPages) { checkArgument(!isNullOrEmpty(taskInstanceId), "taskInstanceId is null"); this.taskInstanceId = taskInstanceId; this.token = token; this.nextToken = nextToken; this.bufferComplete = bufferComplete; this.bufferedBytes = bufferedBytes; this.thriftSerializedPages = ImmutableList.copyOf(requireNonNull(thriftSerializedPages, "thriftSerializedPages is null")); }
@Test public void testThriftBufferResult() { BufferResult bufferResult = new BufferResult("task-instance-id", 0, 0, false, 0, ImmutableList.of()); ThriftBufferResult thriftBufferResult = fromBufferResult(bufferResult); BufferResult newBufferResult = thriftBufferResult.toBufferResult(); assertEquals(bufferResult, newBufferResult); }
public static Pair<String, String> encryptHandler(String dataId, String content) { if (!checkCipher(dataId)) { return Pair.with("", content); } Optional<String> algorithmName = parseAlgorithmName(dataId); Optional<EncryptionPluginService> optional = algorithmName.flatMap( EncryptionPluginManager.instance()::findEncryptionService); if (!optional.isPresent()) { LOGGER.warn("[EncryptionHandler] [encryptHandler] No encryption program with the corresponding name found"); return Pair.with("", content); } EncryptionPluginService encryptionPluginService = optional.get(); String secretKey = encryptionPluginService.generateSecretKey(); String encryptContent = encryptionPluginService.encrypt(secretKey, content); return Pair.with(encryptionPluginService.encryptSecretKey(secretKey), encryptContent); }
@Test void testEncryptHandler() { Pair<String, String> pair = EncryptionHandler.encryptHandler("test-dataId", "content"); assertNotNull(pair); }
public boolean cancelTimerByCorrelationId(final long correlationId) { final Timer removedTimer = timerByCorrelationId.remove(correlationId); if (null == removedTimer) { return false; } final int lastIndex = --size; final Timer lastTimer = timers[lastIndex]; timers[lastIndex] = null; if (lastIndex != removedTimer.index) { shiftDown(timers, lastIndex, removedTimer.index, lastTimer); if (timers[removedTimer.index] == lastTimer) { shiftUp(timers, removedTimer.index, lastTimer); } } addToFreeList(removedTimer); return true; }
@Test void cancelTimerByCorrelationIdIsANoOpIfNoTimersRegistered() { final PriorityHeapTimerService timerService = new PriorityHeapTimerService(mock(TimerHandler.class)); assertFalse(timerService.cancelTimerByCorrelationId(100)); }
Optional<TextRange> mapRegion(@Nullable Region region, InputFile file) { if (region == null) { return Optional.empty(); } int startLine = Objects.requireNonNull(region.getStartLine(), "No start line defined for the region."); int endLine = Optional.ofNullable(region.getEndLine()).orElse(startLine); int startColumn = Optional.ofNullable(region.getStartColumn()).map(RegionMapper::adjustSarifColumnIndexToSqIndex).orElse(0); int endColumn = Optional.ofNullable(region.getEndColumn()).map(RegionMapper::adjustSarifColumnIndexToSqIndex) .orElseGet(() -> file.selectLine(endLine).end().lineOffset()); if (rangeIsEmpty(startLine, endLine, startColumn, endColumn)) { return Optional.of(file.selectLine(startLine)); } else { return Optional.of(file.newRange(startLine, startColumn, endLine, endColumn)); } }
@Test public void mapRegion_whenNullRegion_returnsEmpty() { assertThat(regionMapper.mapRegion(null, INPUT_FILE)).isEmpty(); }
@VisibleForTesting @Override List<String> cancelNonTerminalTasks(Workflow workflow) { List<String> erroredTasks = new ArrayList<>(); // Update non-terminal tasks' status to CANCELED for (Task task : workflow.getTasks()) { if (!task.getStatus().isTerminal()) { // Cancel the ones which are not completed yet.... task.setStatus(CANCELED); // all of our tasks are system tasks. Checks.checkTrue( SystemTaskType.is(task.getTaskType()), "Invalid task type [%s], all tasks should have a known maestro task type.", task.getTaskType()); WorkflowSystemTask workflowSystemTask = WorkflowSystemTask.get(task.getTaskType()); try { workflowSystemTask.cancel(workflow, task, this); executionDAOFacade.updateTask(task); // only update if cancelled } catch (Exception e) { erroredTasks.add(task.getReferenceTaskName()); LOG.error( "Error canceling system task:{}/{} in workflow: {}", workflowSystemTask.getName(), task.getTaskId(), workflow.getWorkflowId(), e); } } } if (erroredTasks.isEmpty()) { try { workflowStatusListener.onWorkflowFinalizedIfEnabled(workflow); queueDAO.remove(DECIDER_QUEUE, workflow.getWorkflowId()); } catch (Exception e) { LOG.error("Error removing workflow: {} from decider queue", workflow.getWorkflowId(), e); throw e; // we need to throw it to get at least once guarantee. } } else { // also throw to retry errored tasks later. throw new MaestroRetryableError( "Error canceling tasks [%s] in workflow: [%s]", erroredTasks, workflow.getWorkflowId()); } return erroredTasks; }
@Test public void testCancelNonTerminalTasks() { Task startTask = new Task(); startTask.setTaskId(UUID.randomUUID().toString()); startTask.setTaskType(Constants.DEFAULT_START_STEP_NAME); startTask.setStatus(Task.Status.IN_PROGRESS); Task maestroTask = new Task(); maestroTask.setTaskId(UUID.randomUUID().toString()); maestroTask.setTaskType(Constants.MAESTRO_TASK_NAME); maestroTask.setStatus(Task.Status.SCHEDULED); Task gateTask = new Task(); gateTask.setTaskId(UUID.randomUUID().toString()); gateTask.setTaskType(SystemTaskType.EXCLUSIVE_JOIN.name()); gateTask.setStatus(Task.Status.COMPLETED); Task endTask = new Task(); endTask.setTaskId(UUID.randomUUID().toString()); endTask.setTaskType(SystemTaskType.JOIN.name()); endTask.setStatus(Task.Status.IN_PROGRESS); workflow.getTasks().addAll(Arrays.asList(startTask, maestroTask, gateTask, endTask)); List<String> erroredTasks = maestroWorkflowExecutor.cancelNonTerminalTasks(workflow); assertTrue(erroredTasks.isEmpty()); ArgumentCaptor<Task> argumentCaptor = ArgumentCaptor.forClass(Task.class); verify(executionDAOFacade, times(3)).updateTask(argumentCaptor.capture()); assertEquals(3, argumentCaptor.getAllValues().size()); assertEquals( Constants.DEFAULT_START_STEP_NAME, argumentCaptor.getAllValues().get(0).getTaskType()); assertEquals(Task.Status.CANCELED, argumentCaptor.getAllValues().get(0).getStatus()); assertEquals(Constants.MAESTRO_TASK_NAME, argumentCaptor.getAllValues().get(1).getTaskType()); assertEquals(Task.Status.CANCELED, argumentCaptor.getAllValues().get(1).getStatus()); assertEquals(SystemTaskType.JOIN.name(), argumentCaptor.getAllValues().get(2).getTaskType()); assertEquals(Task.Status.CANCELED, argumentCaptor.getAllValues().get(2).getStatus()); verify(workflowStatusListener, times(1)).onWorkflowFinalizedIfEnabled(any(Workflow.class)); verify(queueDAO, times(1)).remove(any(), any()); }
public static DescriptorDigest fromHash(String hash) throws DigestException { if (!hash.matches(HASH_REGEX)) { throw new DigestException("Invalid hash: " + hash); } return new DescriptorDigest(hash); }
@Test public void testCreateFromHash_failIncorrectLength() { String badHash = createGoodHash('a') + 'a'; try { DescriptorDigest.fromHash(badHash); Assert.fail("Invalid hash should have caused digest creation failure."); } catch (DigestException ex) { Assert.assertEquals("Invalid hash: " + badHash, ex.getMessage()); } }
public static <T> T toObj(byte[] json, Class<T> cls) { try { return mapper.readValue(json, cls); } catch (Exception e) { throw new NacosDeserializationException(cls, e); } }
@Test void testToObject3() { assertEquals(Collections.singletonMap("key", "value"), JacksonUtils.toObj("{\"key\":\"value\"}".getBytes(), TypeUtils.parameterize(Map.class, String.class, String.class))); assertEquals(Collections.singletonList(Collections.singletonMap("key", "value")), JacksonUtils.toObj("[{\"key\":\"value\"}]".getBytes(), TypeUtils.parameterize(List.class, TypeUtils.parameterize(Map.class, String.class, String.class)))); }
public void wrapMultiDispatch(final MessageExtBrokerInner msg) { String multiDispatchQueue = msg.getProperty(MessageConst.PROPERTY_INNER_MULTI_DISPATCH); String[] queues = multiDispatchQueue.split(MixAll.MULTI_DISPATCH_QUEUE_SPLITTER); Long[] queueOffsets = new Long[queues.length]; if (messageStore.getMessageStoreConfig().isEnableLmq()) { for (int i = 0; i < queues.length; i++) { String key = queueKey(queues[i], msg); if (MixAll.isLmq(key)) { queueOffsets[i] = messageStore.getQueueStore().getLmqQueueOffset(key); } } } MessageAccessor.putProperty(msg, MessageConst.PROPERTY_INNER_MULTI_QUEUE_OFFSET, StringUtils.join(queueOffsets, MixAll.MULTI_DISPATCH_QUEUE_SPLITTER)); msg.removeWaitStorePropertyString(); }
@Test public void wrapMultiDispatch() throws RocksDBException { MessageExtBrokerInner messageExtBrokerInner = buildMessageMultiQueue(); multiDispatch.wrapMultiDispatch(messageExtBrokerInner); assertEquals(messageExtBrokerInner.getProperty(MessageConst.PROPERTY_INNER_MULTI_QUEUE_OFFSET), "0,0"); }
@Override protected CouchDbEndpoint createEndpoint(String uri, String remaining, Map<String, Object> params) throws Exception { CouchDbEndpoint endpoint = new CouchDbEndpoint(uri, remaining, this); setProperties(endpoint, params); return endpoint; }
@Test void testEndpointCreated() throws Exception { Map<String, Object> params = new HashMap<>(); String uri = "couchdb:http://localhost:5984/db"; String remaining = "http://localhost:5984/db"; CouchDbEndpoint endpoint = context.getComponent("couchdb", CouchDbComponent.class).createEndpoint(uri, remaining, params); assertNotNull(endpoint); }
public TableStats merge(TableStats other, @Nullable Set<String> partitionKeys) { if (this.rowCount < 0 || other.rowCount < 0) { return TableStats.UNKNOWN; } long rowCount = this.rowCount >= 0 && other.rowCount >= 0 ? this.rowCount + other.rowCount : UNKNOWN.rowCount; return new TableStats(rowCount, mergeColumnStates(other, partitionKeys)); }
@Test void testMergeLackColumnStats() { Map<String, ColumnStats> colStats1 = new HashMap<>(); colStats1.put("a", new ColumnStats(4L, 5L, 2D, 3, 15, 2)); colStats1.put("b", new ColumnStats(4L, 5L, 2D, 3, 15, 2)); TableStats stats1 = new TableStats(30, colStats1); Map<String, ColumnStats> colStats2 = new HashMap<>(); colStats2.put("a", new ColumnStats(3L, 15L, 12D, 23, 35, 6)); TableStats stats2 = new TableStats(32, colStats2); Map<String, ColumnStats> colStatsMerge = new HashMap<>(); colStatsMerge.put("a", new ColumnStats(4L, 20L, 7D, 23, 35, 2)); assertThat(stats1.merge(stats2, null)).isEqualTo(new TableStats(62, colStatsMerge)); Map<String, ColumnStats> colStatsMerge2 = new HashMap<>(); colStatsMerge2.put("a", new ColumnStats(4L, 20L, 7D, 23, 35, 2)); assertThat(stats1.merge(stats2, new HashSet<>())) .isEqualTo(new TableStats(62, colStatsMerge2)); // test column stats merge while column 'a' is partition key. Merged Ndv for columns which // are partition keys using sum instead of max. Map<String, ColumnStats> colStatsMerge3 = new HashMap<>(); colStatsMerge3.put("a", new ColumnStats(7L, 20L, 7D, 23, 35, 2)); assertThat(stats1.merge(stats2, new HashSet<>(Collections.singletonList("a")))) .isEqualTo(new TableStats(62, colStatsMerge3)); }
@VisibleForTesting static BlobKey createKey(BlobType type) { if (type == PERMANENT_BLOB) { return new PermanentBlobKey(); } else { return new TransientBlobKey(); } }
@Test void testToFromStringPermanentKey() { testToFromString(BlobKey.createKey(PERMANENT_BLOB)); }
@Override public PageStoreDir allocate(String fileId, long fileLength) { return mDirs.get( Math.floorMod(mHashFunction.apply(fileId), mDirs.size())); }
@Test public void hashDistributionTest() { mAllocator = new HashAllocator(mDirs); Map<Path, Integer> result = new HashMap<>(); int numFiles = 1_000_0; for (int i = 0; i < numFiles; i++) { PageStoreDir dir = mAllocator.allocate(String.valueOf(i), 0); result.put(dir.getRootPath(), result.getOrDefault(dir.getRootPath(), 0) + 1); } assertTrue(result.values().stream() .allMatch(count -> count >= numFiles / mDirs.size() * 0.8 && count <= numFiles / mDirs.size() * 1.2)); }
@Override public void close() { close(Duration.ofMillis(0)); }
@Test public void shouldThrowOnFenceProducerIfProducerIsClosed() { buildMockProducer(true); producer.close(); assertThrows(IllegalStateException.class, producer::fenceProducer); }
public boolean addMetadataString(String rawMetadata) throws UnmarshallingException { InputStream inputStream = new ByteArrayInputStream(rawMetadata.getBytes(UTF_8)); XMLObject metadata = super.unmarshallMetadata(inputStream); if (!isValid(metadata)) { return false; } if (metadata instanceof EntitiesDescriptor) { this.entitiesDescriptor = (EntitiesDescriptor) metadata; } if (metadata instanceof EntityDescriptor) { this.entityDescriptor = (EntityDescriptor) metadata; } return true; }
@Test public void resolveNonExistingIdTest() throws UnmarshallingException, ResolverException { stringMetadataResolver.addMetadataString(metadata); CriteriaSet criteria = new CriteriaSet(new EntityIdCriterion("urn:nl-eid-gdi:1:0:entities:0000000999999999900")); EntityDescriptor entityDescriptor = stringMetadataResolver.resolveSingle(criteria); assertNull(entityDescriptor); }
public Version getCassandraVersion(String clusterId) { CqlSession session = cqlSessionFactory.get(clusterId); return getCassandraVersionWithSession(session); }
@Test void get_cassandra_version() { // when Version version = clusterVersionGetCommander.getCassandraVersion(CLUSTER_ID); // then assertThat(version).isNotNull(); assertThat(version).isGreaterThanOrEqualTo(Version.parse("1.0.0")); }
@Override public V getNow(V valueIfAbsent) { // if there is an explicit value set, we use that if (result != null) { return (V) result; } // if there already is a deserialized value set, use it. if (deserializedValue != VOID) { return (V) deserializedValue; } // otherwise, do not cache the value returned from future.getNow // because it might be the default valueIfAbsent Object value = future.getNow(valueIfAbsent); try { if (value instanceof ClientMessage) { return resolve(value); } else { return (value instanceof Data && deserializeResponse) ? serializationService.toObject(value) : (V) value; } } catch (HazelcastSerializationException exc) { throw new CompletionException(exc); } }
@Test public void getNow_cachedValue() throws Exception { invocationFuture.complete(response); assertTrue(delegatingFuture.isDone()); String cachedValue = delegatingFuture.get(); assertEquals(DESERIALIZED_VALUE, cachedValue); assertSame(cachedValue, delegatingFuture.getNow(DESERIALIZED_DEFAULT_VALUE)); }
@Override public RuleDao loadByName(String name) throws NotFoundException { final String id = titleToId.get(name); if (id == null) { throw new NotFoundException("No rule with name " + name); } return load(id); }
@Test public void loadByNameNotFound() { assertThatThrownBy(() -> service.loadByName("Foobar")) .isInstanceOf(NotFoundException.class) .hasMessage("No rule with name Foobar"); }
@Override public MetricType getType() { return MetricType.GAUGE_NUMBER; }
@Test public void set() { NumberGauge gauge = new NumberGauge("bar"); assertThat(gauge.getValue()).isNull(); gauge.set(123l); assertThat(gauge.getValue()).isEqualTo(123l); assertThat(gauge.getType()).isEqualTo(MetricType.GAUGE_NUMBER); }
@PublicEvolving public static <IN, OUT> TypeInformation<OUT> getMapReturnTypes( MapFunction<IN, OUT> mapInterface, TypeInformation<IN> inType) { return getMapReturnTypes(mapInterface, inType, null, false); }
@Test void testFunctionDependingOnInputFromInput() { IdentityMapper<Boolean> function = new IdentityMapper<Boolean>(); TypeInformation<?> ti = TypeExtractor.getMapReturnTypes(function, BasicTypeInfo.BOOLEAN_TYPE_INFO); assertThat(ti.isBasicType()).isTrue(); assertThat(ti).isEqualTo(BasicTypeInfo.BOOLEAN_TYPE_INFO); }
KafkaBasedLog<String, byte[]> setupAndCreateKafkaBasedLog(String topic, final WorkerConfig config) { String clusterId = config.kafkaClusterId(); Map<String, Object> originals = config.originals(); Map<String, Object> producerProps = new HashMap<>(baseProducerProps); producerProps.put(CommonClientConfigs.CLIENT_ID_CONFIG, clientId); Map<String, Object> consumerProps = new HashMap<>(originals); consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName()); consumerProps.put(CommonClientConfigs.CLIENT_ID_CONFIG, clientId); ConnectUtils.addMetricsContextProperties(consumerProps, config, clusterId); if (config.exactlyOnceSourceEnabled()) { ConnectUtils.ensureProperty( consumerProps, ConsumerConfig.ISOLATION_LEVEL_CONFIG, IsolationLevel.READ_COMMITTED.toString(), "for the worker's config topic consumer when exactly-once source support is enabled", true ); } Map<String, Object> adminProps = new HashMap<>(originals); ConnectUtils.addMetricsContextProperties(adminProps, config, clusterId); adminProps.put(CommonClientConfigs.CLIENT_ID_CONFIG, clientId); Map<String, Object> topicSettings = config instanceof DistributedConfig ? ((DistributedConfig) config).configStorageTopicSettings() : Collections.emptyMap(); NewTopic topicDescription = TopicAdmin.defineTopic(topic) .config(topicSettings) // first so that we override user-supplied settings as needed .compacted() .partitions(1) .replicationFactor(config.getShort(DistributedConfig.CONFIG_STORAGE_REPLICATION_FACTOR_CONFIG)) .build(); return createKafkaBasedLog(topic, producerProps, consumerProps, new ConsumeCallback(), topicDescription, topicAdminSupplier, config, time); }
@Test public void testConsumerPropertiesNotInsertedByDefaultWithoutExactlyOnceSourceEnabled() { props.put(EXACTLY_ONCE_SOURCE_SUPPORT_CONFIG, "preparing"); props.remove(ISOLATION_LEVEL_CONFIG); createStore(); configStorage.setupAndCreateKafkaBasedLog(TOPIC, config); verifyConfigure(); assertNull(capturedConsumerProps.getValue().get(ISOLATION_LEVEL_CONFIG)); }
@Override public RouteContext route(final ShardingRule shardingRule) { RouteContext result = new RouteContext(); for (String each : shardingRule.getDataSourceNames()) { if (resourceMetaData.getAllInstanceDataSourceNames().contains(each)) { result.getRouteUnits().add(new RouteUnit(new RouteMapper(each, each), Collections.emptyList())); } } return result; }
@Test void assertRoute() { RouteContext routeContext = shardingInstanceBroadcastRoutingEngine.route(shardingRule); assertThat(routeContext.getRouteUnits().size(), is(1)); assertThat(routeContext.getRouteUnits().iterator().next().getDataSourceMapper().getActualName(), is(DATASOURCE_NAME)); }
@Override public void onUnloaded() { timer.cancel(GROUP_EXPIRATION_KEY); coordinatorMetrics.deactivateMetricsShard(metricsShard); groupMetadataManager.onUnloaded(); }
@Test public void testOnUnloaded() { GroupMetadataManager groupMetadataManager = mock(GroupMetadataManager.class); OffsetMetadataManager offsetMetadataManager = mock(OffsetMetadataManager.class); Time mockTime = new MockTime(); MockCoordinatorTimer<Void, CoordinatorRecord> timer = new MockCoordinatorTimer<>(mockTime); GroupCoordinatorShard coordinator = new GroupCoordinatorShard( new LogContext(), groupMetadataManager, offsetMetadataManager, mockTime, timer, GroupCoordinatorConfigTest.createGroupCoordinatorConfig(4096, 1000L, 24 * 60), mock(CoordinatorMetrics.class), mock(CoordinatorMetricsShard.class) ); coordinator.onUnloaded(); assertEquals(0, timer.size()); verify(groupMetadataManager, times(1)).onUnloaded(); }
@Override public void cancel(ExecutionAttemptID executionAttemptId) { final CompletableFuture<LogicalSlot> slotFuture = this.requestedPhysicalSlots.getValueByKeyA(executionAttemptId); if (slotFuture != null) { slotFuture.cancel(false); } }
@Test void testLogicalSlotCancellationCancelsPhysicalSlotRequest() throws Exception { testLogicalSlotRequestCancellationOrRelease( false, true, (context, slotFuture) -> { assertThatThrownBy( () -> { context.getAllocator().cancel(EXECUTION_ATTEMPT_ID); slotFuture.get(); }) .as("The logical future must finish with a cancellation exception.") .isInstanceOf(CancellationException.class); }); }
public static Build withPropertyValue(String propertyValue) { return new Builder(propertyValue); }
@Test void it_should_return_client_type_when_property_value_exists() { //GIVEN String clientType = "https"; //WHEN ElasticsearchClientType esClientType = ElasticsearchClientTypeBuilder.withPropertyValue(clientType).build(); //THEN assertEquals(HTTPS, esClientType); }
public double calcAzimuth(double lat1, double lon1, double lat2, double lon2) { double orientation = Math.PI / 2 - calcOrientation(lat1, lon1, lat2, lon2); if (orientation < 0) orientation += 2 * Math.PI; return Math.toDegrees(Helper.round4(orientation)) % 360; }
@Test public void testCalcAzimuth() { assertEquals(45.0, AC.calcAzimuth(0, 0, 1, 1), 0.001); assertEquals(90.0, AC.calcAzimuth(0, 0, 0, 1), 0.001); assertEquals(180.0, AC.calcAzimuth(0, 0, -1, 0), 0.001); assertEquals(270.0, AC.calcAzimuth(0, 0, 0, -1), 0.001); assertEquals(0.0, AC.calcAzimuth(49.942, 11.580, 49.944, 11.580), 0.001); }
public static Map<String, String> parseToMap(String attributesModification) { if (Strings.isNullOrEmpty(attributesModification)) { return new HashMap<>(); } // format: +key1=value1,+key2=value2,-key3,+key4=value4 Map<String, String> attributes = new HashMap<>(); String[] kvs = attributesModification.split(ATTR_ARRAY_SEPARATOR_COMMA); for (String kv : kvs) { String key; String value; if (kv.contains(ATTR_KEY_VALUE_EQUAL_SIGN)) { String[] splits = kv.split(ATTR_KEY_VALUE_EQUAL_SIGN); key = splits[0]; value = splits[1]; if (!key.contains(ATTR_ADD_PLUS_SIGN)) { throw new RuntimeException("add/alter attribute format is wrong: " + key); } } else { key = kv; value = ""; if (!key.contains(ATTR_DELETE_MINUS_SIGN)) { throw new RuntimeException("delete attribute format is wrong: " + key); } } String old = attributes.put(key, value); if (old != null) { throw new RuntimeException("key duplication: " + key); } } return attributes; }
@Test(expected = RuntimeException.class) public void parseToMap_InvalidAddAttributeFormat_ThrowsRuntimeException() { String attributesModification = "+key1=value1,key2=value2,-key3,+key4=value4"; AttributeParser.parseToMap(attributesModification); }
public static DateTime offsetMonth(Date date, int offset) { return offset(date, DateField.MONTH, offset); }
@Test public void offsetMonthTest() { final DateTime st = DateUtil.parseDate("2018-05-31"); final List<DateTime> list = new ArrayList<>(); for (int i = 0; i < 4; i++) { list.add(DateUtil.offsetMonth(st, i)); } assertEquals("2018-05-31 00:00:00", list.get(0).toString()); assertEquals("2018-06-30 00:00:00", list.get(1).toString()); assertEquals("2018-07-31 00:00:00", list.get(2).toString()); assertEquals("2018-08-31 00:00:00", list.get(3).toString()); }
public static Matcher<HttpRequest> pathStartsWith(String pathPrefix) { if (pathPrefix == null) throw new NullPointerException("pathPrefix == null"); if (pathPrefix.isEmpty()) throw new NullPointerException("pathPrefix is empty"); return new PathStartsWith(pathPrefix); }
@Test void pathStartsWith_matched_exact() { when(httpRequest.path()).thenReturn("/foo"); assertThat(pathStartsWith("/foo").matches(httpRequest)).isTrue(); }
@SuppressWarnings("unchecked") @Override public <T extends Statement> ConfiguredStatement<T> inject( final ConfiguredStatement<T> statement ) { if (!(statement.getStatement() instanceof CreateSource) && !(statement.getStatement() instanceof CreateAsSelect)) { return statement; } try { if (statement.getStatement() instanceof CreateSource) { final ConfiguredStatement<CreateSource> createStatement = (ConfiguredStatement<CreateSource>) statement; return (ConfiguredStatement<T>) forCreateStatement(createStatement).orElse(createStatement); } else { final ConfiguredStatement<CreateAsSelect> createStatement = (ConfiguredStatement<CreateAsSelect>) statement; return (ConfiguredStatement<T>) forCreateAsStatement(createStatement).orElse( createStatement); } } catch (final KsqlStatementException e) { throw e; } catch (final KsqlException e) { throw new KsqlStatementException( ErrorMessageUtil.buildErrorMessage(e), statement.getMaskedStatementText(), e.getCause()); } }
@Test public void shouldThrowIfCtasKeyTableElementsNotCompatibleReorderedValue() { // Given: givenFormatsAndProps("kafka", "avro", ImmutableMap.of("VALUE_SCHEMA_ID", new IntegerLiteral(42))); givenDDLSchemaAndFormats(LOGICAL_SCHEMA_VALUE_REORDERED, "kafka", "avro", SerdeFeature.UNWRAP_SINGLES, SerdeFeature.UNWRAP_SINGLES); // When: final Exception e = assertThrows( KsqlException.class, () -> injector.inject(ctasStatement) ); // Then: // Then: assertThat(e.getMessage(), containsString("The following value columns are changed, missing or reordered: " + "[`bigIntField` BIGINT, `intField` INTEGER]. Schema from schema registry is [" + "`intField` INTEGER, " + "`bigIntField` BIGINT, " + "`doubleField` DOUBLE, " + "`stringField` STRING, " + "`booleanField` BOOLEAN, " + "`arrayField` ARRAY<INTEGER>, " + "`mapField` MAP<STRING, BIGINT>, " + "`structField` STRUCT<`s0` BIGINT>, " + "`decimalField` DECIMAL(4, 2)]" ) ); }
@Override public ResourceModel processSubResource(ResourceModel model, Configuration config) { return model; }
@Test public void processSubResourceDoesNothing() throws Exception { final Map<String, String> packagePrefixes = ImmutableMap.of(PACKAGE_NAME, "/test/prefix"); when(configuration.isCloud()).thenReturn(false); final PrefixAddingModelProcessor modelProcessor = new PrefixAddingModelProcessor(packagePrefixes); final ResourceModel originalResourceModel = new ResourceModel.Builder(false) .addResource(Resource.from(TestResource.class)).build(); final ResourceModel resourceModel = modelProcessor.processSubResource(originalResourceModel, new ResourceConfig()); assertThat(originalResourceModel).isSameAs(resourceModel); }
public void init(CustomModel customModel, EncodedValueLookup lookup, Map<String, JsonFeature> areas) { this.lookup = lookup; this.customModel = customModel; }
@Test public void testNegativeMax() { CustomModel customModel = new CustomModel(); customModel.addToSpeed(If("true", LIMIT, VehicleSpeed.key("car"))); customModel.addToSpeed(If("road_class == PRIMARY", MULTIPLY, "0.5")); customModel.addToSpeed(Else(MULTIPLY, "-0.5")); CustomWeightingHelper helper = new CustomWeightingHelper(); EncodingManager lookup = new EncodingManager.Builder().add(VehicleSpeed.create("car", 5, 5, true)).build(); helper.init(customModel, lookup, null); IllegalArgumentException ret = assertThrows(IllegalArgumentException.class, helper::calcMaxSpeed); assertTrue(ret.getMessage().startsWith("statement resulted in negative value")); }
@Nonnull public static String classToPrimitive(@Nonnull String name) { for (Type prim : PRIMITIVES) { String className = prim.getClassName(); if (className.equals(name)) return prim.getInternalName(); } throw new IllegalArgumentException("Descriptor was not a primitive class name!"); }
@Test void testClassToPrimitive() { assertEquals("V", Types.classToPrimitive("void")); assertEquals("Z", Types.classToPrimitive("boolean")); assertEquals("B", Types.classToPrimitive("byte")); assertEquals("C", Types.classToPrimitive("char")); assertEquals("S", Types.classToPrimitive("short")); assertEquals("I", Types.classToPrimitive("int")); assertEquals("F", Types.classToPrimitive("float")); assertEquals("D", Types.classToPrimitive("double")); assertEquals("J", Types.classToPrimitive("long")); assertThrows(IllegalArgumentException.class, () -> Types.classToPrimitive("foo")); }
public synchronized KafkaMetric removeMetric(MetricName metricName) { KafkaMetric metric = this.metrics.remove(metricName); if (metric != null) { for (MetricsReporter reporter : reporters) { try { reporter.metricRemoval(metric); } catch (Exception e) { log.error("Error when removing metric from " + reporter.getClass().getName(), e); } } log.trace("Removed metric named {}", metricName); } return metric; }
@Test public void testRemoveMetric() { int size = metrics.metrics().size(); metrics.addMetric(metrics.metricName("test1", "grp1"), new WindowedCount()); metrics.addMetric(metrics.metricName("test2", "grp1"), new WindowedCount()); assertNotNull(metrics.removeMetric(metrics.metricName("test1", "grp1"))); assertNull(metrics.metrics().get(metrics.metricName("test1", "grp1"))); assertNotNull(metrics.metrics().get(metrics.metricName("test2", "grp1"))); assertNotNull(metrics.removeMetric(metrics.metricName("test2", "grp1"))); assertNull(metrics.metrics().get(metrics.metricName("test2", "grp1"))); assertEquals(size, metrics.metrics().size()); }
public static ValidOffsetAndEpoch valid(OffsetAndEpoch offsetAndEpoch) { return new ValidOffsetAndEpoch(Kind.VALID, offsetAndEpoch); }
@Test void valid() { ValidOffsetAndEpoch validOffsetAndEpoch = ValidOffsetAndEpoch.valid(new OffsetAndEpoch(0, 0)); assertEquals(ValidOffsetAndEpoch.Kind.VALID, validOffsetAndEpoch.kind()); }
public NotDocIdIterator(BlockDocIdIterator childDocIdIterator, int numDocs) { _childDocIdIterator = childDocIdIterator; _nextDocId = 0; int currentDocIdFromChildIterator = childDocIdIterator.next(); _nextNonMatchingDocId = currentDocIdFromChildIterator == Constants.EOF ? numDocs : currentDocIdFromChildIterator; _numDocs = numDocs; }
@Test public void testNotDocIdIterator() { // OR result: [0, 1, 2, 4, 5, 6, 8, 10, 13, 15, 16, 17, 18, 19, 20] int[] docIds1 = new int[]{1, 4, 6, 10, 15, 17, 18, 20}; int[] docIds2 = new int[]{0, 1, 5, 8, 15, 18}; int[] docIds3 = new int[]{1, 2, 6, 13, 16, 19}; int[] docIds4 = new int[]{0, 1, 2, 3, 4, 5}; MutableRoaringBitmap bitmap1 = new MutableRoaringBitmap(); bitmap1.add(docIds1); MutableRoaringBitmap bitmap2 = new MutableRoaringBitmap(); bitmap2.add(docIds2); MutableRoaringBitmap bitmap3 = new MutableRoaringBitmap(); bitmap3.add(docIds3); MutableRoaringBitmap bitmap4 = new MutableRoaringBitmap(); bitmap4.add(docIds4); OrDocIdIterator orDocIdIterator = new OrDocIdIterator(new BlockDocIdIterator[]{ new RangelessBitmapDocIdIterator(bitmap1), new RangelessBitmapDocIdIterator( bitmap2), new RangelessBitmapDocIdIterator(bitmap3) }); NotDocIdIterator notDocIdIterator = new NotDocIdIterator(new RangelessBitmapDocIdIterator(bitmap1), 25); assertEquals(notDocIdIterator.advance(1), 2); assertEquals(notDocIdIterator.next(), 3); assertEquals(notDocIdIterator.next(), 5); assertEquals(notDocIdIterator.advance(7), 7); assertEquals(notDocIdIterator.advance(13), 13); assertEquals(notDocIdIterator.next(), 14); assertEquals(notDocIdIterator.advance(18), 19); assertEquals(notDocIdIterator.advance(21), 21); assertEquals(notDocIdIterator.advance(26), Constants.EOF); notDocIdIterator = new NotDocIdIterator(new RangelessBitmapDocIdIterator(bitmap1), 25); assertEquals(notDocIdIterator.next(), 0); assertEquals(notDocIdIterator.next(), 2); assertEquals(notDocIdIterator.next(), 3); assertEquals(notDocIdIterator.next(), 5); assertEquals(notDocIdIterator.next(), 7); assertEquals(notDocIdIterator.next(), 8); assertEquals(notDocIdIterator.next(), 9); assertEquals(notDocIdIterator.next(), 11); assertEquals(notDocIdIterator.next(), 12); assertEquals(notDocIdIterator.next(), 13); assertEquals(notDocIdIterator.next(), 14); assertEquals(notDocIdIterator.next(), 16); assertEquals(notDocIdIterator.next(), 19); assertEquals(notDocIdIterator.next(), 21); assertEquals(notDocIdIterator.next(), 22); assertEquals(notDocIdIterator.next(), 23); assertEquals(notDocIdIterator.next(), 24); assertEquals(notDocIdIterator.next(), Constants.EOF); notDocIdIterator = new NotDocIdIterator(orDocIdIterator, 25); assertEquals(notDocIdIterator.next(), 3); assertEquals(notDocIdIterator.next(), 7); assertEquals(notDocIdIterator.next(), 9); assertEquals(notDocIdIterator.next(), 11); assertEquals(notDocIdIterator.next(), 12); assertEquals(notDocIdIterator.next(), 14); assertEquals(notDocIdIterator.next(), 21); assertEquals(notDocIdIterator.next(), 22); assertEquals(notDocIdIterator.next(), 23); assertEquals(notDocIdIterator.next(), 24); assertEquals(notDocIdIterator.next(), Constants.EOF); notDocIdIterator = new NotDocIdIterator(new RangelessBitmapDocIdIterator(bitmap4), 6); assertEquals(notDocIdIterator.next(), Constants.EOF); notDocIdIterator = new NotDocIdIterator(new RangelessBitmapDocIdIterator(bitmap4), 9); assertEquals(notDocIdIterator.next(), 6); assertEquals(notDocIdIterator.next(), 7); assertEquals(notDocIdIterator.next(), 8); }
public static Node createNodeAtPosition(Node containerNode, String nodeToCreateName, String nodeContent, Integer position) { Node toReturn = containerNode.getOwnerDocument().createElement(nodeToCreateName); if (nodeContent != null) { toReturn.setTextContent(nodeContent); } if (containerNode.hasChildNodes() && position != null && position < containerNode.getChildNodes().getLength()) { Node positionNode = containerNode.getChildNodes().item(position); containerNode.insertBefore(toReturn, positionNode); } else { containerNode.appendChild(toReturn); } return toReturn; }
@Test public void createNodeAtPosition() throws Exception { String newNodeName = "NEW_NODE_NAME_0"; String newNodeValue = "NEW_NODE_VALUE_="; Document document = DOMParserUtil.getDocument(XML); Map<Node, List<Node>> testNodesMap = DOMParserUtil.getChildrenNodesMap(document, MAIN_NODE, TEST_NODE); assertThat(testNodesMap).hasSize(1); Node mainNode = testNodesMap.keySet().iterator().next(); Node retrieved = DOMParserUtil.createNodeAtPosition(mainNode, newNodeName, newNodeValue, null); assertThat(retrieved).isNotNull(); assertThat(retrieved.getNodeName()).isEqualTo(newNodeName); assertThat(retrieved.getTextContent()).isEqualTo(newNodeValue); assertThat(mainNode.getChildNodes().item(mainNode.getChildNodes().getLength() - 1)).isEqualTo(retrieved); newNodeName = "NEW_NODE_NAME_1"; newNodeValue = "NEW_NODE_VALUE_1"; retrieved = DOMParserUtil.createNodeAtPosition(mainNode, newNodeName, newNodeValue, 0); assertThat(retrieved).isNotNull(); assertThat(retrieved.getNodeName()).isEqualTo(newNodeName); assertThat(retrieved.getTextContent()).isEqualTo(newNodeValue); assertThat(mainNode.getChildNodes().item(0)).isEqualTo(retrieved); newNodeName = "NEW_NODE_NAME_2"; newNodeValue = "NEW_NODE_VALUE_2"; retrieved = DOMParserUtil.createNodeAtPosition(mainNode, newNodeName, newNodeValue, 2); assertThat(retrieved).isNotNull(); assertThat(retrieved.getNodeName()).isEqualTo(newNodeName); assertThat(retrieved.getTextContent()).isEqualTo(newNodeValue); assertThat(mainNode.getChildNodes().item(2)).isEqualTo(retrieved); }
public KiePMMLPredicate getKiePMMLPredicate() { return kiePMMLPredicate; }
@Test void getKiePMMLPredicate() { assertThat(KIE_PMML_SEGMENT.getKiePMMLPredicate()).isEqualTo(KIE_PMML_PREDICATE); }
@SuppressWarnings({"SimplifyBooleanReturn"}) public static Map<String, ParamDefinition> cleanupParams(Map<String, ParamDefinition> params) { if (params == null || params.isEmpty()) { return params; } Map<String, ParamDefinition> mapped = params.entrySet().stream() .collect( MapHelper.toListMap( Map.Entry::getKey, p -> { ParamDefinition param = p.getValue(); if (param.getType() == ParamType.MAP) { MapParamDefinition mapParamDef = param.asMapParamDef(); if (mapParamDef.getValue() == null && (mapParamDef.getInternalMode() == InternalParamMode.OPTIONAL)) { return mapParamDef; } return MapParamDefinition.builder() .name(mapParamDef.getName()) .value(cleanupParams(mapParamDef.getValue())) .expression(mapParamDef.getExpression()) .name(mapParamDef.getName()) .validator(mapParamDef.getValidator()) .tags(mapParamDef.getTags()) .mode(mapParamDef.getMode()) .meta(mapParamDef.getMeta()) .build(); } else { return param; } })); Map<String, ParamDefinition> filtered = mapped.entrySet().stream() .filter( p -> { ParamDefinition param = p.getValue(); if (param.getInternalMode() == InternalParamMode.OPTIONAL) { if (param.getValue() == null && param.getExpression() == null) { return false; } else if (param.getType() == ParamType.MAP && param.asMapParamDef().getValue() != null && param.asMapParamDef().getValue().isEmpty()) { return false; } else { return true; } } else { Checks.checkTrue( param.getValue() != null || param.getExpression() != null, String.format( "[%s] is a required parameter (type=[%s])", p.getKey(), param.getType())); return true; } }) .collect(MapHelper.toListMap(Map.Entry::getKey, Map.Entry::getValue)); return cleanIntermediateMetadata(filtered); }
@Test public void testCleanupAllPresentParams() throws JsonProcessingException { for (ParamMode mode : ParamMode.values()) { Map<String, ParamDefinition> allParams = parseParamDefMap( String.format( "{'optional': {'type': 'STRING', 'mode': '%s', 'value': 'hello'}}", mode.toString())); Map<String, ParamDefinition> cleanedParams = ParamsMergeHelper.cleanupParams(allParams); assertEquals(1, cleanedParams.size()); } }
static <T> T executeSupplier(Observation observation, Supplier<T> supplier) { return decorateSupplier(observation, supplier).get(); }
@Test public void shouldExecuteSupplier() throws Throwable { given(helloWorldService.returnHelloWorld()).willReturn("Hello world") .willThrow(new IllegalArgumentException("BAM!")); try { Observations.executeSupplier(observation, helloWorldService::returnHelloWorld); } catch (Exception e) { assertThat(e).isInstanceOf(IllegalArgumentException.class); } assertThatObservationWasStartedAndFinishedWithoutErrors(); then(helloWorldService).should().returnHelloWorld(); }
public boolean hasValue(String value) { return props.containsValue(value); }
@Test public void testHasValue() { Environment environment = Environment.empty(); assertEquals(Boolean.FALSE, environment.hasValue("hello")); }
public static String toString(InputStream input, String encoding) throws IOException { return (null == encoding) ? toString(new InputStreamReader(input, Constants.ENCODE)) : toString(new InputStreamReader(input, encoding)); }
@Test void testToStringV1() { try { InputStream input = IOUtils.toInputStream("test", StandardCharsets.UTF_8); String actualValue = MD5Util.toString(input, "UTF-8"); assertEquals("test", actualValue); } catch (IOException e) { System.out.println(e.toString()); } }
public static String buildWebApplicationRootUrl(NetworkService networkService) { checkNotNull(networkService); if (!isWebService(networkService)) { return "http://" + NetworkEndpointUtils.toUriAuthority(networkService.getNetworkEndpoint()) + "/"; } String rootUrl = (isPlainHttp(networkService) ? "http://" : "https://") + buildWebUriAuthority(networkService) + buildWebAppRootPath(networkService); return rootUrl.endsWith("/") ? rootUrl : rootUrl + "/"; }
@Test public void buildWebApplicationRootUrl_whenHttpsWithoutRoot_buildsExpectedUrl() { assertThat( NetworkServiceUtils.buildWebApplicationRootUrl( NetworkService.newBuilder() .setNetworkEndpoint(forIpAndPort("127.0.0.1", 8443)) .setServiceName("ssl/https") .setServiceContext( ServiceContext.newBuilder() .setWebServiceContext( WebServiceContext.newBuilder().setApplicationRoot("test_root"))) .build())) .isEqualTo("https://127.0.0.1:8443/test_root/"); }
public RequestSender request(HttpMethod method) { Objects.requireNonNull(method, "method"); HttpClientFinalizer dup = new HttpClientFinalizer(new HttpClientConfig(configuration())); dup.configuration().method = method; return dup; }
@Test void testIssue694() { disposableServer = createServer() .handle((req, res) -> { req.receive() .subscribe(); return Mono.empty(); }) .bindNow(); HttpClient client = createHttpClientForContextWithPort(); ByteBufAllocator alloc = ByteBufAllocator.DEFAULT; ByteBuf buffer1 = alloc.buffer() .writeInt(1) .retain(9); client.request(HttpMethod.GET) .send((req, out) -> out.send(Flux.range(0, 10) .map(i -> buffer1))) .response() .block(Duration.ofSeconds(30)); assertThat(buffer1.refCnt()).isEqualTo(0); ByteBuf buffer2 = alloc.buffer() .writeInt(1) .retain(9); client.request(HttpMethod.GET) .send(Flux.range(0, 10) .map(i -> buffer2)) .response() .block(Duration.ofSeconds(30)); assertThat(buffer2.refCnt()).isEqualTo(0); }
@Deprecated @Override public Beacon fromScanData(byte[] scanData, int rssi, BluetoothDevice device) { return fromScanData(scanData, rssi, device, System.currentTimeMillis(), new AltBeacon()); }
@Test public void testParseWrongFormatReturnsNothing() { BeaconManager.setDebug(true); org.robolectric.shadows.ShadowLog.stream = System.err; LogManager.d("XXX", "testParseWrongFormatReturnsNothing start"); byte[] bytes = hexStringToByteArray("02011a1aff1801ffff2f234454cf6d4a0fadf2f4911ba9ffa600010002c509"); AltBeaconParser parser = new AltBeaconParser(); Beacon beacon = parser.fromScanData(bytes, -55, null, 123456L); LogManager.d("XXX", "testParseWrongFormatReturnsNothing end"); assertNull("Beacon should be null if not parsed successfully", beacon); }
public static void setZone(Map<String, String> meta) { final String originZone = meta.get(SpringRegistryConstants.LOAD_BALANCER_ZONE_META_KEY); if (originZone != null) { LoggerFactory.getLogger().info(String.format(Locale.ENGLISH, "Registry instance with zone [%s]", originZone)); return; } String zone = PluginConfigManager.getPluginConfig(RegisterConfig.class).getZone(); if (zone == null) { zone = RegisterContext.INSTANCE.getClientInfo().getZone(); } if (zone != null) { meta.put(SpringRegistryConstants.LOAD_BALANCER_ZONE_META_KEY, zone); LoggerFactory.getLogger().info(String.format(Locale.ENGLISH, "Registry instance with zone [%s]", zone)); } }
@Test public void testSetZone() { try (MockedStatic<PluginConfigManager> pluginConfigManagerMockedStatic = Mockito.mockStatic(PluginConfigManager.class)) { final HashMap<String, String> meta = new HashMap<>( Collections.singletonMap(SpringRegistryConstants.LOAD_BALANCER_ZONE_META_KEY, "test")); ZoneUtils.setZone(meta); Assert.assertEquals(meta.get(SpringRegistryConstants.LOAD_BALANCER_ZONE_META_KEY), "test"); final RegisterConfig registerConfig = new RegisterConfig(); String zone = "registerZone"; registerConfig.setZone(zone); pluginConfigManagerMockedStatic.when(() -> PluginConfigManager.getPluginConfig(RegisterConfig.class)) .thenReturn(registerConfig); final HashMap<String, String> map = new HashMap<>(); ZoneUtils.setZone(map); Assert.assertEquals(map.get(SpringRegistryConstants.LOAD_BALANCER_ZONE_META_KEY), zone); } }
public static String getBucketName(AlluxioURI uri) { return uri.getAuthority().toString(); }
@Test public void getBucketName() throws Exception { assertEquals("s3-bucket-name", UnderFileSystemUtils.getBucketName(new AlluxioURI("s3://s3-bucket-name/"))); assertEquals("s3a_bucket_name", UnderFileSystemUtils.getBucketName(new AlluxioURI("s3a://s3a_bucket_name/"))); assertEquals("a.b.c", UnderFileSystemUtils.getBucketName(new AlluxioURI("gs://a.b.c/folder/sub-folder/"))); assertEquals("oss", UnderFileSystemUtils.getBucketName(new AlluxioURI("oss://oss/folder/.file"))); }
@Override public Object adapt(final HttpAction action, final WebContext context) { if (action != null) { var code = action.getCode(); val response = ((JEEContext) context).getNativeResponse(); if (code < 400) { response.setStatus(code); } else { try { response.sendError(code); } catch (final IOException e) { throw new TechnicalException(e); } } if (action instanceof WithLocationAction withLocationAction) { context.setResponseHeader(HttpConstants.LOCATION_HEADER, withLocationAction.getLocation()); } else if (action instanceof WithContentAction withContentAction) { val content = withContentAction.getContent(); if (content != null) { try { response.getWriter().write(content); } catch (final IOException e) { throw new TechnicalException(e); } } } return null; } throw new TechnicalException("No action provided"); }
@Test public void testActionWithContent() { JEEHttpActionAdapter.INSTANCE.adapt(new OkAction(TestsConstants.VALUE), context); verify(response).setStatus(200); verify(writer).write(TestsConstants.VALUE); }
public EndpointGroupingRule4Openapi read() { EndpointGroupingRule4Openapi endpointGroupingRule = new EndpointGroupingRule4Openapi(); serviceOpenapiDefMap.forEach((serviceName, openapiDefs) -> { openapiDefs.forEach(openapiData -> { LinkedHashMap<String, LinkedHashMap<String, LinkedHashMap>> paths = (LinkedHashMap<String, LinkedHashMap<String, LinkedHashMap>>) openapiData.get( "paths"); if (paths != null) { paths.forEach((pathString, pathItem) -> { pathItem.keySet().forEach(key -> { String requestMethod = requestMethodsMap.get(key); if (!StringUtil.isEmpty(requestMethod)) { String endpointGroupName = formatEndPointName( pathString, requestMethod, openapiData); String groupRegex = getGroupRegex( pathString, requestMethod, openapiData); if (isTemplatePath(pathString)) { endpointGroupingRule.addGroupedRule( serviceName, endpointGroupName, groupRegex); } else { endpointGroupingRule.addDirectLookup( serviceName, groupRegex, endpointGroupName); } } }); }); } }); }); endpointGroupingRule.sortRulesAll(); return endpointGroupingRule; }
@Test public void testReadingRule() throws IOException { EndpointGroupingRuleReader4Openapi reader = new EndpointGroupingRuleReader4Openapi("openapi-definitions"); EndpointGroupingRule4Openapi rule = reader.read(); EndpointNameGrouping nameGrouping = new EndpointNameGrouping(); nameGrouping.setEndpointGroupingRule4Openapi(rule); //default x-sw-service-name x-sw-endpoint-name-match-rule and x-sw-endpoint-name-format // test direct lookup String endpointName = nameGrouping.format("serviceA", "GET:/products")._1(); Assertions.assertEquals("GET:/products", endpointName); endpointName = nameGrouping.format("serviceA", "GET:/products/123")._1(); Assertions.assertEquals("GET:/products/{id}", endpointName); endpointName = nameGrouping.format("serviceA", "GET:/products/123/abc/ef")._1(); Assertions.assertEquals("GET:/products/123/abc/ef", endpointName); endpointName = nameGrouping.format("serviceA", "GET:/products/123/relatedProducts")._1(); Assertions.assertEquals("GET:/products/{id}/relatedProducts", endpointName); endpointName = nameGrouping.format("serviceA", "GET:/products/1/relatedProducts")._1(); Assertions.assertEquals("GET:/products/{id}/relatedProducts", endpointName); //test custom x-sw-service-name same x-sw-endpoint-name-match-rule and x-sw-endpoint-name-format endpointName = nameGrouping.format("serviceA-1", "POST:/customer")._1(); Assertions.assertEquals("POST:/customer", endpointName); endpointName = nameGrouping.format("serviceA-1", "<GET>:/customers/1")._1(); Assertions.assertEquals("<GET>:/customers/{id}", endpointName); //test different x-sw-endpoint-name-match-rule and x-sw-endpoint-name-format endpointName = nameGrouping.format("serviceB", "GET:/products")._1(); Assertions.assertEquals("/products:<GET>", endpointName); endpointName = nameGrouping.format("serviceB", "GET:/products/asia/cn")._1(); Assertions.assertEquals("/products/{region}/{country}:<GET>", endpointName); //test match priority, not match /products/{region}/{country}:<GET> endpointName = nameGrouping.format("serviceB", "GET:/products/12/relatedProducts")._1(); Assertions.assertEquals("/products/{id}/relatedProducts:<GET>", endpointName); //test not match, return the origin endpointName = nameGrouping.format("serviceA", "GET:/products/")._1(); Assertions.assertNotEquals("GET:/products", endpointName); endpointName = nameGrouping.format("serviceA", "GET:/products/123/")._1(); Assertions.assertEquals("GET:/products/123/", endpointName); endpointName = nameGrouping.format("serviceC", "GET:/products/123")._1(); Assertions.assertEquals("GET:/products/123", endpointName); endpointName = nameGrouping.format("serviceA", "GET:/products/1/ratings/123")._1(); Assertions.assertEquals("GET:/products/1/ratings/123", endpointName); endpointName = nameGrouping.format("serviceA-1", "<GET>:/customers/1/123")._1(); Assertions.assertEquals("<GET>:/customers/1/123", endpointName); endpointName = nameGrouping.format("serviceB", "/products/:<GET>")._1(); Assertions.assertEquals("/products/:<GET>", endpointName); endpointName = nameGrouping.format("serviceB", "{GET}:/products")._1(); Assertions.assertEquals("{GET}:/products", endpointName); endpointName = nameGrouping.format("serviceB", "/products/1/2/3:<GET>")._1(); Assertions.assertEquals("/products/1/2/3:<GET>", endpointName); }
@GetMapping("/plugin/selector/findList") public Mono<String> findListSelector(@RequestParam("pluginName") final String pluginName, @RequestParam(value = "id", required = false) final String id) { List<SelectorData> selectorDataList = BaseDataCache.getInstance().obtainSelectorData(pluginName); if (CollectionUtils.isEmpty(selectorDataList)) { return Mono.just("Error: can not find selector data by pluginName :" + pluginName); } if (StringUtils.isNotEmpty(id)) { List<SelectorData> result = selectorDataList.stream().filter(selectorData -> selectorData.getId().equals(id)).collect(Collectors.toList()); return Mono.just(JsonUtils.toJson(result)); } return Mono.just(JsonUtils.toJson(selectorDataList)); }
@Test public void testFindListSelector() throws Exception { final String selectorPluginName = "testFindListSelector"; final String testFindListSelectorId = "testFindListSelectorId"; final SelectorData selectorData = new SelectorData(); selectorData.setPluginName(selectorPluginName); selectorData.setId(testFindListSelectorId); subscriber.onSelectorSubscribe(selectorData); final Object result = this.mockMvc .perform(MockMvcRequestBuilders.get("/shenyu/plugin/selector/findList") .param("pluginName", selectorPluginName)) .andExpect(status().isOk()) .andReturn() .getAsyncResult(); assertThat(result).isInstanceOf(String.class); @SuppressWarnings("UnstableApiUsage") final List<SelectorData> list = GsonUtils.getGson().fromJson((String) result, new TypeToken<List<SelectorData>>() { }.getType()); final List<String> idList = list.stream().map(SelectorData::getPluginName).collect(Collectors.toList()); assertThat(idList).contains(selectorPluginName); final Object resultError1 = this.mockMvc .perform(MockMvcRequestBuilders.get("/shenyu/plugin/selector/findList") .param("pluginName", "testFindListSelectorError")) .andExpect(status().isOk()) .andReturn() .getAsyncResult(); assertThat(resultError1).isEqualTo("Error: can not find selector data by pluginName :testFindListSelectorError"); final Object result2 = this.mockMvc .perform(MockMvcRequestBuilders.get("/shenyu/plugin/selector/findList") .param("id", "testFindListSelectorId") .param("pluginName", selectorPluginName)) .andExpect(status().isOk()) .andReturn() .getAsyncResult(); final List<SelectorData> list2 = GsonUtils.getGson().fromJson((String) result2, new TypeToken<List<SelectorData>>() { }.getType()); final List<String> selectorDataIds = list2.stream().map(SelectorData::getId).collect(Collectors.toList()); assertThat(selectorDataIds).contains(testFindListSelectorId); }
public HostProvisioner getProvisioner() { return provisioner; }
@Test void testProvisionerIsSet() { DeployState.Builder builder = new DeployState.Builder(); HostProvisioner provisioner = new InMemoryProvisioner(true, false, "foo.yahoo.com"); builder.modelHostProvisioner(provisioner); DeployState state = builder.build(); assertEquals(provisioner, state.getProvisioner()); }
public int getBatchSizeInt( VariableSpace vars ) { return Const.toInt( vars.environmentSubstitute( this.batchSize ), DEFAULT_BATCH_SIZE ); }
@Test public void testGetBatchSizeInt() { ElasticSearchBulkMeta esbm = new ElasticSearchBulkMeta(); int batchSize = esbm.getBatchSizeInt( new VariableSpaceImpl() ); assertEquals( batchSize, ElasticSearchBulkMeta.DEFAULT_BATCH_SIZE ); }
@NonNull @Override public HealthResponse healthResponse(final Map<String, Collection<String>> queryParams) { final String type = queryParams.getOrDefault(CHECK_TYPE_QUERY_PARAM, Collections.emptyList()) .stream() .findFirst() .orElse(null); final Collection<HealthStateView> views = getViews(queryParams); final String responseBody; try { responseBody = mapper.writeValueAsString(views); } catch (final Exception e) { LOGGER.error("Failed to serialize health state views: {}", views, e); throw new RuntimeException(e); } final boolean healthy = healthStatusChecker.isHealthy(type); final int status; if (healthy) { // HTTP OK status = 200; } else { // HTTP Service unavailable status = 503; } return new HealthResponse(healthy, responseBody, MEDIA_TYPE, status); }
@Test void shouldThrowExceptionWhenJsonProcessorExceptionOccurs() throws IOException { // given final ObjectMapper mapperMock = mock(ObjectMapper.class); this.jsonHealthResponseProvider = new JsonHealthResponseProvider(healthStatusChecker, healthStateAggregator, mapperMock); final HealthStateView view = new HealthStateView("foo", true, HealthCheckType.READY, true); final Map<String, Collection<String>> queryParams = Collections.singletonMap( JsonHealthResponseProvider.NAME_QUERY_PARAM, Collections.singleton(view.getName())); final JsonMappingException exception = JsonMappingException.fromUnexpectedIOE(new IOException("uh oh")); // when when(healthStateAggregator.healthStateView(view.getName())).thenReturn(Optional.of(view)); when(mapperMock.writeValueAsString(any())) .thenThrow(exception); // then assertThatThrownBy(() -> jsonHealthResponseProvider.healthResponse(queryParams)) .isInstanceOf(RuntimeException.class) .hasCauseReference(exception); verifyNoInteractions(healthStatusChecker); }
public boolean isValid(String value) { if (value == null) { return false; } URI uri; // ensure value is a valid URI try { uri = new URI(value); } catch (URISyntaxException e) { return false; } // OK, perfom additional validation String scheme = uri.getScheme(); if (!isValidScheme(scheme)) { return false; } String authority = uri.getRawAuthority(); if ("file".equals(scheme) && (authority == null || "".equals(authority))) { // Special case - file: allows an empty authority return true; // this is a local file - nothing more to do here } else if ("file".equals(scheme) && authority != null && authority.contains(":")) { return false; } else { // Validate the authority if (!isValidAuthority(authority)) { return false; } } if (!isValidPath(uri.getRawPath())) { return false; } if (!isValidQuery(uri.getRawQuery())) { return false; } if (!isValidFragment(uri.getRawFragment())) { return false; } return true; }
@Test public void testValidator339IDN() { UrlValidator urlValidator = new UrlValidator(); assertTrue(urlValidator.isValid("http://президент.рф/WORLD/?hpt=sitenav")); // without assertTrue(urlValidator.isValid("http://президент.рф./WORLD/?hpt=sitenav")); // with assertFalse(urlValidator.isValid("http://президент.рф..../")); // very dotty assertFalse(urlValidator.isValid("http://президент.рф.../")); // triply dotty assertFalse(urlValidator.isValid("http://президент.рф../")); // doubly dotty }
public static Configuration configurePythonDependencies(ReadableConfig config) { final PythonDependencyManager pythonDependencyManager = new PythonDependencyManager(config); final Configuration pythonDependencyConfig = new Configuration(); pythonDependencyManager.applyToConfiguration(pythonDependencyConfig); return pythonDependencyConfig; }
@Test void testPythonFiles() { Configuration config = new Configuration(); config.set( PythonOptions.PYTHON_FILES, "hdfs:///tmp_dir/test_file1.py,tmp_dir/test_file2.py,tmp_dir/test_dir,hdfs:///tmp_dir/test_file1.py"); Configuration actual = configurePythonDependencies(config); Map<String, String> expectedCachedFiles = new HashMap<>(); expectedCachedFiles.put( "python_file_83bbdaee494ad7d9b334c02ec71dc86a0868f7f8e49d1249a37c517dc6ee15a7", "hdfs:///tmp_dir/test_file1.py"); expectedCachedFiles.put( "python_file_e57a895cb1256500098be0874128680cd9f56000d48fcd393c48d6371bd2d947", "tmp_dir/test_file2.py"); expectedCachedFiles.put( "python_file_e56bc55ff643576457b3d012b2bba888727c71cf05a958930f2263398c4e9798", "tmp_dir/test_dir"); verifyCachedFiles(expectedCachedFiles, config); Configuration expectedConfiguration = new Configuration(); expectedConfiguration.set(PYTHON_FILES_DISTRIBUTED_CACHE_INFO, new HashMap<>()); expectedConfiguration .get(PYTHON_FILES_DISTRIBUTED_CACHE_INFO) .put( "python_file_83bbdaee494ad7d9b334c02ec71dc86a0868f7f8e49d1249a37c517dc6ee15a7", "test_file1.py"); expectedConfiguration .get(PYTHON_FILES_DISTRIBUTED_CACHE_INFO) .put( "python_file_e57a895cb1256500098be0874128680cd9f56000d48fcd393c48d6371bd2d947", "test_file2.py"); expectedConfiguration .get(PYTHON_FILES_DISTRIBUTED_CACHE_INFO) .put( "python_file_e56bc55ff643576457b3d012b2bba888727c71cf05a958930f2263398c4e9798", "test_dir"); verifyConfiguration(expectedConfiguration, actual); }
public FEELFnResult<Boolean> invoke(@ParameterName( "list" ) List list) { if ( list == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "list", "cannot be null")); } boolean result = true; boolean containsNull = false; // Spec. definition: return false if any item is false, else true if all items are true, else null for ( final Object element : list ) { if (element != null && !(element instanceof Boolean)) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "an element in the list is not a Boolean")); } else { if (element != null) { result &= (Boolean) element; } else if (!containsNull) { containsNull = true; } } } if (containsNull && result) { return FEELFnResult.ofResult( null ); } else { return FEELFnResult.ofResult( result ); } }
@Test void invokeListParamReturnTrue() { FunctionTestUtil.assertResult(allFunction.invoke(Arrays.asList(Boolean.TRUE, Boolean.TRUE)), true); }
public static String entityUuidOf(String id) { if (id.startsWith(ID_PREFIX)) { return id.substring(ID_PREFIX.length()); } return id; }
@Test public void projectUuidOf_returns_substring_if_starts_with_id_prefix() { assertThat(AuthorizationDoc.entityUuidOf("auth_")).isEmpty(); String id = randomAlphabetic(1 + new Random().nextInt(10)); assertThat(AuthorizationDoc.entityUuidOf("auth_" + id)).isEqualTo(id); }
@Override public Map<ConfigDefinitionKey, ConfigDefinition> getConfigDefinitions() { return Collections.unmodifiableMap(configDefinitions); }
@Test public void testThatRepoIsCorrectlyInitialized() throws IOException { File topDir = folder.newFolder(); File defDir = new File(topDir, "classes"); defDir.mkdir(); addFile(defDir, new ConfigDefinitionKey("foo", "foons"), "namespace=foons\nval int\n"); addFile(defDir, new ConfigDefinitionKey("bar", "barns"), "namespace=barns\nval string\n"); ConfigDefinitionRepo repo = new StaticConfigDefinitionRepo(defDir); assertEquals(2, repo.getConfigDefinitions().size()); }
@Override public int getBitsetSize() { return this.bitset.length; }
@Test public void testConstructor() { BloomFilter bloomFilter1 = new BlockSplitBloomFilter(0); assertEquals(bloomFilter1.getBitsetSize(), BlockSplitBloomFilter.LOWER_BOUND_BYTES); BloomFilter bloomFilter3 = new BlockSplitBloomFilter(1000); assertEquals(bloomFilter3.getBitsetSize(), 1024); }
@Override public Optional<Entity> exportEntity(EntityDescriptor entityDescriptor, EntityDescriptorIds entityDescriptorIds) { final ModelId modelId = entityDescriptor.id(); try { final GrokPattern grokPattern = grokPatternService.load(modelId.id()); return Optional.of(exportNativeEntity(grokPattern, entityDescriptorIds)); } catch (NotFoundException e) { LOG.debug("Couldn't find grok pattern {}", entityDescriptor, e); return Optional.empty(); } }
@Test public void exportEntity() throws ValidationException { grokPatternService.save(GrokPattern.create("Test1", "[a-z]+")); grokPatternService.save(GrokPattern.create("Test2", "[a-z]+")); final EntityDescriptor descriptor = EntityDescriptor.create("1", ModelTypes.GROK_PATTERN_V1); final EntityDescriptorIds entityDescriptorIds = EntityDescriptorIds.of(descriptor); final Map<String, Object> entity = ImmutableMap.of( "name", "Test1", "pattern", "[a-z]+"); final JsonNode entityData = objectMapper.convertValue(entity, JsonNode.class); final Optional<Entity> collectedEntity = facade.exportEntity(descriptor, entityDescriptorIds); assertThat(collectedEntity) .isPresent(); final EntityV1 entityV1 = (EntityV1) collectedEntity.get(); assertThat(entityV1.id()).isEqualTo(ModelId.of(entityDescriptorIds.get(descriptor).orElse(null))); assertThat(entityV1.type()).isEqualTo(ModelTypes.GROK_PATTERN_V1); assertThat(entityV1.data()).isEqualTo(entityData); }
@Override public String toString() { return "QGChangeEvent{" + "project=" + toString(project) + ", branch=" + toString(branch) + ", analysis=" + toString(analysis) + ", projectConfiguration=" + projectConfiguration + ", previousStatus=" + previousStatus + ", qualityGateSupplier=" + qualityGateSupplier + '}'; }
@Test public void overrides_toString() { QGChangeEvent underTest = new QGChangeEvent(project, branch, analysis, configuration, previousStatus, supplier); assertThat(underTest) .hasToString("QGChangeEvent{project=bar:foo, branch=BRANCH:bar:doh:zop, analysis=pto:8999999765" + ", projectConfiguration=" + configuration.toString() + ", previousStatus=" + previousStatus + ", qualityGateSupplier=" + supplier + "}"); }
@SuppressWarnings({"checkstyle:npathcomplexity", "checkstyle:cyclomaticcomplexity", "checkstyle:methodlength"}) void planMigrations(int partitionId, PartitionReplica[] oldReplicas, PartitionReplica[] newReplicas, MigrationDecisionCallback callback) { assert oldReplicas.length == newReplicas.length : "Replica addresses with different lengths! Old: " + Arrays.toString(oldReplicas) + ", New: " + Arrays.toString(newReplicas); if (logger.isFinestEnabled()) { logger.finest("partitionId=%d, Initial state: %s", partitionId, Arrays.toString(oldReplicas)); logger.finest("partitionId=%d, Final state: %s", partitionId, Arrays.toString(newReplicas)); } initState(oldReplicas); assertNoDuplicate(partitionId, oldReplicas, newReplicas); // fix cyclic partition replica movements if (fixCycle(oldReplicas, newReplicas)) { if (logger.isFinestEnabled()) { logger.finest("partitionId=%d, Final state (after cycle fix): %s", partitionId, Arrays.toString(newReplicas)); } } int currentIndex = 0; while (currentIndex < oldReplicas.length) { if (logger.isFinestEnabled()) { logger.finest("partitionId=%d, Current index: %d, state: %s", partitionId, currentIndex, Arrays.toString(state)); } assertNoDuplicate(partitionId, oldReplicas, newReplicas); if (newReplicas[currentIndex] == null) { if (state[currentIndex] != null) { // replica owner is removed and no one will own this replica logger.finest("partitionId=%d, New address is null at index: %d", partitionId, currentIndex); callback.migrate(state[currentIndex], currentIndex, -1, null, -1, -1); state[currentIndex] = null; } currentIndex++; continue; } if (state[currentIndex] == null) { int i = getReplicaIndex(state, newReplicas[currentIndex]); if (i == -1) { // fresh replica copy is needed, so COPY replica to newReplicas[currentIndex] from partition owner logger.finest("partitionId=%d, COPY %s to index: %d", partitionId, newReplicas[currentIndex], currentIndex); callback.migrate(null, -1, -1, newReplicas[currentIndex], -1, currentIndex); state[currentIndex] = newReplicas[currentIndex]; currentIndex++; continue; } if (i > currentIndex) { // SHIFT UP replica from i to currentIndex, copy data from partition owner logger.finest("partitionId=%d, SHIFT UP-2 %s from old addresses index: %d to index: %d", partitionId, state[i], i, currentIndex); callback.migrate(null, -1, -1, state[i], i, currentIndex); state[currentIndex] = state[i]; state[i] = null; continue; } throw new AssertionError("partitionId=" + partitionId + "Migration decision algorithm failed during SHIFT UP! INITIAL: " + Arrays.toString(oldReplicas) + ", CURRENT: " + Arrays.toString(state) + ", FINAL: " + Arrays.toString(newReplicas)); } if (newReplicas[currentIndex].equals(state[currentIndex])) { // no change, no action needed currentIndex++; continue; } if (getReplicaIndex(newReplicas, state[currentIndex]) == -1 && getReplicaIndex(state, newReplicas[currentIndex]) == -1) { // MOVE partition replica from its old owner to new owner logger.finest("partitionId=%d, MOVE %s to index: %d", partitionId, newReplicas[currentIndex], currentIndex); callback.migrate(state[currentIndex], currentIndex, -1, newReplicas[currentIndex], -1, currentIndex); state[currentIndex] = newReplicas[currentIndex]; currentIndex++; continue; } if (getReplicaIndex(state, newReplicas[currentIndex]) == -1) { int newIndex = getReplicaIndex(newReplicas, state[currentIndex]); assert newIndex > currentIndex : "partitionId=" + partitionId + ", Migration decision algorithm failed during SHIFT DOWN! INITIAL: " + Arrays.toString(oldReplicas) + ", CURRENT: " + Arrays.toString(state) + ", FINAL: " + Arrays.toString(newReplicas); if (state[newIndex] == null) { // it is a SHIFT DOWN logger.finest("partitionId=%d, SHIFT DOWN %s to index: %d, COPY %s to index: %d", partitionId, state[currentIndex], newIndex, newReplicas[currentIndex], currentIndex); callback.migrate(state[currentIndex], currentIndex, newIndex, newReplicas[currentIndex], -1, currentIndex); state[newIndex] = state[currentIndex]; } else { logger.finest("partitionId=%d, MOVE-3 %s to index: %d", partitionId, newReplicas[currentIndex], currentIndex); callback.migrate(state[currentIndex], currentIndex, -1, newReplicas[currentIndex], -1, currentIndex); } state[currentIndex] = newReplicas[currentIndex]; currentIndex++; continue; } planMigrations(partitionId, oldReplicas, newReplicas, callback, currentIndex); } assert Arrays.equals(state, newReplicas) : "partitionId=" + partitionId + ", Migration decisions failed! INITIAL: " + Arrays.toString(oldReplicas) + " CURRENT: " + Arrays.toString(state) + ", FINAL: " + Arrays.toString(newReplicas); }
@Test public void test_MOVE_toNull() throws UnknownHostException { final PartitionReplica[] oldReplicas = { new PartitionReplica(new Address("localhost", 5701), uuids[0]), new PartitionReplica(new Address("localhost", 5702), uuids[1]), new PartitionReplica(new Address("localhost", 5703), uuids[2]), new PartitionReplica(new Address("localhost", 5705), uuids[4]), null, null, null, }; final PartitionReplica[] newReplicas = { new PartitionReplica(new Address("localhost", 5701), uuids[0]), new PartitionReplica(new Address("localhost", 5702), uuids[1]), new PartitionReplica(new Address("localhost", 5703), uuids[2]), null, null, null, null, }; migrationPlanner.planMigrations(0, oldReplicas, newReplicas, callback); verify(callback).migrate(new PartitionReplica(new Address("localhost", 5705), uuids[4]), 3, -1, null, -1, -1); }
@Override public CommitWorkStream commitWorkStream() { return windmillStreamFactory.createCommitWorkStream( dispatcherClient.getWindmillServiceStub(), throttleTimers.commitWorkThrottleTimer()); }
@Test public void testStreamingCommit() throws Exception { ConcurrentHashMap<Long, WorkItemCommitRequest> commitRequests = new ConcurrentHashMap<>(); serviceRegistry.addService( new CloudWindmillServiceV1Alpha1ImplBase() { @Override public StreamObserver<StreamingCommitWorkRequest> commitWorkStream( StreamObserver<StreamingCommitResponse> responseObserver) { return getTestCommitStreamObserver(responseObserver, commitRequests); } }); // Make the commit requests, waiting for each of them to be verified and acknowledged. CommitWorkStream stream = client.commitWorkStream(); commitWorkTestHelper(stream, commitRequests, 0, 500); stream.halfClose(); assertTrue(stream.awaitTermination(30, TimeUnit.SECONDS)); }
@Override public List<String> splitAndEvaluate() { return Strings.isNullOrEmpty(inlineExpression) ? Collections.emptyList() : flatten(evaluate(GroovyUtils.split(handlePlaceHolder(inlineExpression)))); }
@Test void assertEvaluateForNull() { List<String> expected = TypedSPILoader.getService(InlineExpressionParser.class, "GROOVY", PropertiesBuilder.build( new PropertiesBuilder.Property(InlineExpressionParser.INLINE_EXPRESSION_KEY, "t_order_${null}"))).splitAndEvaluate(); assertThat(expected.size(), is(1)); assertThat(expected, hasItems("t_order_")); }
@Override public void loadGlue(Glue glue, List<URI> gluePaths) { gluePaths.stream() .filter(gluePath -> CLASSPATH_SCHEME.equals(gluePath.getScheme())) .map(ClasspathSupport::packageName) .map(classFinder::scanForClassesInPackage) .flatMap(Collection::stream) .filter(InjectorSource.class::isAssignableFrom) .distinct() .forEach(container::addClass); }
@Test void finds_injector_source_impls_by_classpath_url() { GuiceBackend backend = new GuiceBackend(factory, classLoader); backend.loadGlue(glue, singletonList(URI.create("classpath:io/cucumber/guice/integration"))); verify(factory).addClass(YourInjectorSource.class); }
@Override public void execute(SensorContext context) { analyse(context, Xoo.KEY, XooRulesDefinition.XOO_REPOSITORY); analyse(context, Xoo2.KEY, XooRulesDefinition.XOO2_REPOSITORY); }
@Test public void testProvideGap() throws IOException { DefaultInputFile inputFile = new TestInputFileBuilder("foo", "src/Foo.xoo") .setLanguage(Xoo.KEY) .initMetadata("a\nb\nc\nd\ne\nf\ng\nh\ni\n") .build(); SensorContextTester context = SensorContextTester.create(temp.newFolder()); context.fileSystem().add(inputFile); context.setSettings(new MapSettings().setProperty(OneIssuePerLineSensor.EFFORT_TO_FIX_PROPERTY, "1.2")); sensor.execute(context); assertThat(context.allIssues()).hasSize(10); // One issue per line for (Issue issue : context.allIssues()) { assertThat(issue.gap()).isEqualTo(1.2d); } }
public void addLastHandler(AbstractChainHandler handler) { if (tail == null) { tail = handler; setNext(handler); return; } tail.setNext(handler); tail = handler; }
@Test public void testChain() { final HandlerChain handlerChain = new HandlerChain(); final BulkheadRequestHandler bulkheadRequestHandler = Mockito.spy(BulkheadRequestHandler.class); final CircuitBreakerRequestHandler circuitBreakerClientReqHandler = Mockito.spy(CircuitBreakerRequestHandler.class); final FaultRequestHandler faultRequestHandler = Mockito.spy(FaultRequestHandler.class); handlerChain.addLastHandler(bulkheadRequestHandler); handlerChain.addLastHandler(circuitBreakerClientReqHandler); handlerChain.addLastHandler(faultRequestHandler); int num = 3; AbstractChainHandler handler = handlerChain; while (handler.getNext() != null) { num--; handler = handler.getNext(); } assertEquals(0, num); // test call final RequestContext requestContext = ChainContext.getThreadLocalContext("test"); final HttpRequestEntity build = new Builder().setRequestType(RequestType.CLIENT).setApiPath("/api").build(); requestContext.setRequestEntity(build); final Set<String> businessNames = Collections.singleton("test"); final Exception exception = new IllegalArgumentException("error"); final Object result = new Object(); handlerChain.onBefore(requestContext, businessNames); handlerChain.onThrow(requestContext, businessNames, exception); handlerChain.onResult(requestContext, businessNames, result); Mockito.verify(bulkheadRequestHandler, Mockito.times(1)) .onBefore(requestContext, businessNames); Mockito.verify(bulkheadRequestHandler, Mockito.times(1)) .onThrow(requestContext, businessNames, exception); Mockito.verify(bulkheadRequestHandler, Mockito.times(1)) .onResult(requestContext, businessNames, result); Mockito.verify(circuitBreakerClientReqHandler, Mockito.times(1)) .onBefore(requestContext, businessNames); Mockito.verify(circuitBreakerClientReqHandler, Mockito.times(1)) .onThrow(requestContext, businessNames, exception); Mockito.verify(circuitBreakerClientReqHandler, Mockito.times(1)) .onResult(requestContext, businessNames, result); Mockito.verify(faultRequestHandler, Mockito.times(1)) .onBefore(requestContext, businessNames); Mockito.verify(faultRequestHandler, Mockito.times(1)) .onThrow(requestContext, businessNames, exception); Mockito.verify(faultRequestHandler, Mockito.times(1)) .onResult(requestContext, businessNames, result); }
public static Deserializer<NeighborAdvertisement> deserializer() { return (data, offset, length) -> { checkInput(data, offset, length, HEADER_LENGTH); NeighborAdvertisement neighborAdvertisement = new NeighborAdvertisement(); ByteBuffer bb = ByteBuffer.wrap(data, offset, length); int iscratch; iscratch = bb.getInt(); neighborAdvertisement.routerFlag = (byte) (iscratch >> 31 & 0x1); neighborAdvertisement.solicitedFlag = (byte) (iscratch >> 30 & 0x1); neighborAdvertisement.overrideFlag = (byte) (iscratch >> 29 & 0x1); bb.get(neighborAdvertisement.targetAddress, 0, Ip6Address.BYTE_LENGTH); if (bb.limit() - bb.position() > 0) { NeighborDiscoveryOptions options = NeighborDiscoveryOptions.deserializer() .deserialize(data, bb.position(), bb.limit() - bb.position()); for (NeighborDiscoveryOptions.Option option : options.options()) { neighborAdvertisement.addOption(option.type(), option.data()); } } return neighborAdvertisement; }; }
@Test public void testDeserializeBadInput() throws Exception { PacketTestUtils.testDeserializeBadInput(NeighborAdvertisement.deserializer()); }
@Override public void loginFailure(HttpRequest request, AuthenticationException e) { checkRequest(request); requireNonNull(e, "AuthenticationException can't be null"); if (!LOGGER.isDebugEnabled()) { return; } Source source = e.getSource(); LOGGER.debug("login failure [cause|{}][method|{}][provider|{}|{}][IP|{}|{}][login|{}]", emptyIfNull(e.getMessage()), source.getMethod(), source.getProvider(), source.getProviderName(), request.getRemoteAddr(), getAllIps(request), preventLogFlood(emptyIfNull(e.getLogin()))); }
@Test public void login_failure_prevents_log_flooding_on_login_starting_from_128_chars() { AuthenticationException exception = newBuilder() .setSource(Source.realm(Method.BASIC, "some provider name")) .setMessage("pop") .setLogin(LOGIN_129_CHARS) .build(); underTest.loginFailure(mockRequest(), exception); verifyLog("login failure [cause|pop][method|BASIC][provider|REALM|some provider name][IP||][login|012345678901234567890123456789012345678901234567890123456789" + "01234567890123456789012345678901234567890123456789012345678901234567...(129)]", Set.of("logout", "login success")); }
public Optional<String> getDatabaseName() { Preconditions.checkState(databaseNames.size() <= 1, "Can not support multiple different database."); return databaseNames.isEmpty() ? Optional.empty() : Optional.of(databaseNames.iterator().next()); }
@Test void assertGetSchemaNameWithDifferentSchemaAndSameTable() { SimpleTableSegment tableSegment1 = createTableSegment("table_1", "tbl_1"); tableSegment1.setOwner(new OwnerSegment(0, 0, new IdentifierValue("sharding_db_1"))); SimpleTableSegment tableSegment2 = createTableSegment("table_1", "tbl_1"); tableSegment2.setOwner(new OwnerSegment(0, 0, new IdentifierValue("sharding_db_2"))); assertThrows(IllegalStateException.class, () -> new TablesContext(Arrays.asList(tableSegment1, tableSegment2), TypedSPILoader.getService(DatabaseType.class, "FIXTURE"), DefaultDatabase.LOGIC_NAME).getDatabaseName()); }
@Override public Future<?> schedule(Executor executor, Runnable command, long delay, TimeUnit unit) { requireNonNull(executor); requireNonNull(command); requireNonNull(unit); if (scheduledExecutorService.isShutdown()) { return DisabledFuture.INSTANCE; } return scheduledExecutorService.schedule(() -> { try { executor.execute(command); } catch (Throwable t) { logger.log(Level.WARNING, "Exception thrown when submitting scheduled task", t); throw t; } }, delay, unit); }
@Test public void scheduledExecutorService_schedule() { ScheduledExecutorService scheduledExecutor = Mockito.mock(); var task = ArgumentCaptor.forClass(Runnable.class); Executor executor = Mockito.mock(); Runnable command = () -> {}; var scheduler = Scheduler.forScheduledExecutorService(scheduledExecutor); var future = scheduler.schedule(executor, command, 1L, TimeUnit.MINUTES); assertThat(future).isNotSameInstanceAs(DisabledFuture.INSTANCE); verify(scheduledExecutor).isShutdown(); verify(scheduledExecutor).schedule(task.capture(), eq(1L), eq(TimeUnit.MINUTES)); verifyNoMoreInteractions(scheduledExecutor); task.getValue().run(); verify(executor).execute(command); verifyNoMoreInteractions(executor); }
@Override public String getName() { return "Dart Package Analyzer"; }
@Test public void testDartPubspecYamlAnalyzerAddressbook() throws AnalysisException { final Engine engine = new Engine(getSettings()); final Dependency result = new Dependency(BaseTest.getResourceAsFile(this, "dart.addressbook/pubspec.yaml")); dartAnalyzer.analyze(result, engine); assertThat(engine.getDependencies().length, equalTo(1)); Dependency dependency1 = engine.getDependencies()[0]; assertThat(dependency1.getName(), equalTo("protobuf")); assertThat(dependency1.getVersion(), equalTo("")); }
@Override public List<SnowflakeIdentifier> listIcebergTables(SnowflakeIdentifier scope) { StringBuilder baseQuery = new StringBuilder("SHOW ICEBERG TABLES"); String[] queryParams = null; switch (scope.type()) { case ROOT: // account-level listing baseQuery.append(" IN ACCOUNT"); break; case DATABASE: // database-level listing baseQuery.append(" IN DATABASE IDENTIFIER(?)"); queryParams = new String[] {scope.toIdentifierString()}; break; case SCHEMA: // schema-level listing baseQuery.append(" IN SCHEMA IDENTIFIER(?)"); queryParams = new String[] {scope.toIdentifierString()}; break; default: throw new IllegalArgumentException( String.format("Unsupported scope type for listIcebergTables: %s", scope)); } final String finalQuery = baseQuery.toString(); final String[] finalQueryParams = queryParams; List<SnowflakeIdentifier> tables; try { tables = connectionPool.run( conn -> queryHarness.query(conn, finalQuery, TABLE_RESULT_SET_HANDLER, finalQueryParams)); } catch (SQLException e) { throw snowflakeExceptionToIcebergException( scope, e, String.format("Failed to list tables for scope '%s'", scope)); } catch (InterruptedException e) { throw new UncheckedInterruptedException( e, "Interrupted while listing tables for scope '%s'", scope); } tables.forEach( table -> Preconditions.checkState( table.type() == SnowflakeIdentifier.Type.TABLE, "Expected TABLE, got identifier '%s' for scope '%s'", table, scope)); return tables; }
@SuppressWarnings("unchecked") @Test public void testListIcebergTablesSQLExceptionAtSchemaLevel() throws SQLException, InterruptedException { for (Integer errorCode : SCHEMA_NOT_FOUND_ERROR_CODES) { Exception injectedException = new SQLException( String.format("SQL exception with Error Code %d", errorCode), "2000", errorCode, null); when(mockClientPool.run(any(ClientPool.Action.class))).thenThrow(injectedException); assertThatExceptionOfType(NoSuchNamespaceException.class) .isThrownBy( () -> snowflakeClient.listIcebergTables( SnowflakeIdentifier.ofSchema("DB_1", "SCHEMA_1"))) .withMessageContaining( String.format( "Identifier not found: 'SCHEMA: 'DB_1.SCHEMA_1''. Underlying exception: 'SQL exception with Error Code %d'", errorCode)) .withCause(injectedException); } }
public static boolean canManage(EfestoInput toEvaluate, EfestoRuntimeContext context) { return getGeneratedExecutableResource(toEvaluate.getModelLocalUriId(), context.getGeneratedResourcesMap()).isPresent(); }
@Test void canManage() { ModelLocalUriId modelLocalUriId = new ModelLocalUriId(LocalUri.parse("/drl/" + basePath)); EfestoRuntimeContext context = EfestoRuntimeContextUtils.buildWithParentClassLoader(Thread.currentThread().getContextClassLoader()); BaseEfestoInput darInputDrlMap = new EfestoInputDrlMap(modelLocalUriId, new EfestoMapInputDTO(null, null, null, null, null, null)); assertThat(DrlRuntimeHelper.canManage(darInputDrlMap, context)).isTrue(); modelLocalUriId = new ModelLocalUriId(LocalUri.parse("/drl/notexisting")); darInputDrlMap = new EfestoInputDrlMap(modelLocalUriId, null); assertThat(DrlRuntimeHelper.canManage(darInputDrlMap, context)).isFalse(); }
public int compare(Session session, PlanCostEstimate left, PlanCostEstimate right) { requireNonNull(session, "session is null"); requireNonNull(left, "left is null"); requireNonNull(right, "right is null"); checkArgument(!left.hasUnknownComponents() && !right.hasUnknownComponents(), "cannot compare unknown costs"); // TODO when one left.getMaxMemory() and right.getMaxMemory() exceeds query memory limit * configurable safety margin, choose the plan with lower memory usage double leftCost = left.getCpuCost() * cpuWeight + left.getMaxMemory() * memoryWeight + left.getNetworkCost() * networkWeight; double rightCost = right.getCpuCost() * cpuWeight + right.getMaxMemory() * memoryWeight + right.getNetworkCost() * networkWeight; return Double.compare(leftCost, rightCost); }
@Test public void testUnknownCost() { CostComparator costComparator = new CostComparator(1.0, 1.0, 1.0); Session session = testSessionBuilder().build(); assertThrows(IllegalArgumentException.class, () -> costComparator.compare(session, PlanCostEstimate.zero(), PlanCostEstimate.unknown())); assertThrows(IllegalArgumentException.class, () -> costComparator.compare(session, PlanCostEstimate.unknown(), PlanCostEstimate.zero())); assertThrows(IllegalArgumentException.class, () -> costComparator.compare(session, PlanCostEstimate.unknown(), PlanCostEstimate.unknown())); }