focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Operation(summary = "queryProcessInstanceListPaging", description = "QUERY_PROCESS_INSTANCE_LIST_NOTES") @Parameters({ @Parameter(name = "processDefineCode", description = "PROCESS_DEFINITION_CODE", schema = @Schema(implementation = long.class, example = "100")), @Parameter(name = "searchVal", description = "SEARCH_VAL", schema = @Schema(implementation = String.class)), @Parameter(name = "executorName", description = "EXECUTOR_NAME", schema = @Schema(implementation = String.class)), @Parameter(name = "stateType", description = "EXECUTION_STATUS", schema = @Schema(implementation = WorkflowExecutionStatus.class)), @Parameter(name = "host", description = "HOST", schema = @Schema(implementation = String.class)), @Parameter(name = "startDate", description = "START_DATE", schema = @Schema(implementation = String.class)), @Parameter(name = "endDate", description = "END_DATE", schema = @Schema(implementation = String.class)), @Parameter(name = "pageNo", description = "PAGE_NO", required = true, schema = @Schema(implementation = int.class, example = "1")), @Parameter(name = "pageSize", description = "PAGE_SIZE", required = true, schema = @Schema(implementation = int.class, example = "10")) }) @GetMapping() @ResponseStatus(HttpStatus.OK) @ApiException(Status.QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR) public Result queryProcessInstanceList(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @Parameter(name = "projectCode", description = "PROJECT_CODE", required = true) @PathVariable long projectCode, @RequestParam(value = "processDefineCode", required = false, defaultValue = "0") long processDefineCode, @RequestParam(value = "searchVal", required = false) String searchVal, @RequestParam(value = "executorName", required = false) String executorName, @RequestParam(value = "stateType", required = false) WorkflowExecutionStatus stateType, @RequestParam(value = "host", required = false) String host, @RequestParam(value = "startDate", required = false) String startTime, @RequestParam(value = "endDate", required = false) String endTime, @RequestParam(value = "otherParamsJson", required = false) String otherParamsJson, @RequestParam("pageNo") Integer pageNo, @RequestParam("pageSize") Integer pageSize) { checkPageParams(pageNo, pageSize); searchVal = ParameterUtils.handleEscapes(searchVal); return processInstanceService.queryProcessInstanceList(loginUser, projectCode, processDefineCode, startTime, endTime, searchVal, executorName, stateType, host, otherParamsJson, pageNo, pageSize); }
@Test public void testQueryProcessInstanceList() throws Exception { Result mockResult = new Result<>(); mockResult.setCode(Status.SUCCESS.getCode()); Mockito.when(processInstanceService .queryProcessInstanceList(Mockito.any(), Mockito.anyLong(), Mockito.anyLong(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any())) .thenReturn(mockResult); MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("processDefineCode", "91"); paramsMap.add("searchVal", "cxc"); paramsMap.add("stateType", WorkflowExecutionStatus.SUCCESS.name()); paramsMap.add("host", "192.168.1.13"); paramsMap.add("startDate", "2019-12-15 00:00:00"); paramsMap.add("endDate", "2019-12-16 00:00:00"); paramsMap.add("pageNo", "2"); paramsMap.add("pageSize", "2"); MvcResult mvcResult = mockMvc.perform(get("/projects/1113/process-instances") .header("sessionId", sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assertions.assertNotNull(result); Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); }
static String generateIndexName(String baseString) { return generateResourceId( baseString, ILLEGAL_INDEX_NAME_CHARS, REPLACE_INDEX_NAME_CHAR, MAX_INDEX_NAME_LENGTH, TIME_FORMAT); }
@Test public void testGenerateIndexNameShouldReplaceComma() { String testBaseString = "Test,DB,Name"; String actual = generateIndexName(testBaseString); assertThat(actual).matches("test-db-name-\\d{8}-\\d{6}-\\d{6}"); }
static Map<String, KiePMMLTableSourceCategory> getRegressionTablesMap(final RegressionCompilationDTO compilationDTO) { Map<String, KiePMMLTableSourceCategory> toReturn; if (compilationDTO.isRegression()) { final List<RegressionTable> regressionTables = Collections.singletonList(compilationDTO.getModel().getRegressionTables().get(0)); final RegressionCompilationDTO regressionCompilationDTO = RegressionCompilationDTO.fromCompilationDTORegressionTablesAndNormalizationMethod(compilationDTO, regressionTables, compilationDTO.getModel().getNormalizationMethod()); toReturn = KiePMMLRegressionTableFactory.getRegressionTableBuilders(regressionCompilationDTO); } else { final List<RegressionTable> regressionTables = compilationDTO.getModel().getRegressionTables(); final RegressionCompilationDTO regressionCompilationDTO = RegressionCompilationDTO.fromCompilationDTORegressionTablesAndNormalizationMethod(compilationDTO, regressionTables, RegressionModel.NormalizationMethod.NONE); toReturn = KiePMMLClassificationTableFactory.getClassificationTableBuilders(regressionCompilationDTO); } return toReturn; }
@Test void getRegressionTablesMap() { final CompilationDTO<RegressionModel> compilationDTO = CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME, pmml, regressionModel, new PMMLCompilationContextMock(), "FILENAME"); Map<String, KiePMMLTableSourceCategory> retrieved = KiePMMLRegressionModelFactory .getRegressionTablesMap(RegressionCompilationDTO.fromCompilationDTO(compilationDTO)); int expectedSize = regressionTables.size() + 1; // One for classification assertThat(retrieved).hasSize(expectedSize); final Collection<KiePMMLTableSourceCategory> values = retrieved.values(); regressionTables.forEach(regressionTable -> assertThat(values.stream().anyMatch(kiePMMLTableSourceCategory -> kiePMMLTableSourceCategory.getCategory().equals(regressionTable.getTargetCategory()))).isTrue()); }
public Document process(Document input) throws TransformerException { log.log(Level.FINE, () -> "Preprocessing overrides with " + environment + "." + region); Document ret = Xml.copyDocument(input); Element root = ret.getDocumentElement(); applyOverrides(root, Context.empty()); return ret; }
@Test(expected = IllegalArgumentException.class) public void testParsingDifferentEnvInParentAndChild() throws TransformerException { String in = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>" + "<services xmlns:deploy=\"vespa\" xmlns:preprocess=\"?\" version=\"1.0\">" + " <admin deploy:environment=\"prod\" version=\"2.0\">" + " <adminserver deploy:environment=\"test\" hostalias=\"node1\"/>" + " </admin>" + "</services>"; Document inputDoc = Xml.getDocument(new StringReader(in)); new OverrideProcessor(InstanceName.from("default"), Environment.from("prod"), RegionName.from("us-west"), Cloud.defaultCloud().name(), Tags.empty()).process(inputDoc); }
static void manageMissingValues(final KiePMMLMiningField miningField, final PMMLRequestData requestData) { MISSING_VALUE_TREATMENT_METHOD missingValueTreatmentMethod = miningField.getMissingValueTreatmentMethod() != null ? miningField.getMissingValueTreatmentMethod() : MISSING_VALUE_TREATMENT_METHOD.RETURN_INVALID; switch (missingValueTreatmentMethod) { case RETURN_INVALID: throw new KiePMMLInputDataException("Missing required value for " + miningField.getName()); case AS_IS: case AS_MEAN: case AS_MODE: case AS_MEDIAN: case AS_VALUE: String missingValueReplacement = miningField.getMissingValueReplacement(); if (missingValueReplacement != null) { Object requiredValue = miningField.getDataType().getActualValue(missingValueReplacement); requestData.addRequestParam(miningField.getName(), requiredValue); } break; default: throw new KiePMMLException("Unmanaged INVALID_VALUE_TREATMENT_METHOD " + missingValueTreatmentMethod); } }
@Test void manageMissingValuesNotReturnInvalid() { List<MISSING_VALUE_TREATMENT_METHOD> missingValueTreatmentMethods = Arrays.stream(MISSING_VALUE_TREATMENT_METHOD.values()) .filter(treatmentMethod -> !treatmentMethod.equals(MISSING_VALUE_TREATMENT_METHOD.RETURN_INVALID)) .collect(Collectors.toList()); final String fieldName = "FIELD"; missingValueTreatmentMethods.forEach(missingValueTreatmentMethod -> { KiePMMLMiningField miningField = KiePMMLMiningField.builder(fieldName, null) .withMissingValueTreatmentMethod(missingValueTreatmentMethod) .build(); PMMLRequestData pmmlRequestData = new PMMLRequestData(); PreProcess.manageMissingValues(miningField, pmmlRequestData); assertThat(pmmlRequestData.getRequestParams()).isEmpty(); String missingValueReplacement = "REPLACEMENT"; miningField = KiePMMLMiningField.builder(fieldName, null) .withDataType(DATA_TYPE.STRING) .withMissingValueTreatmentMethod(missingValueTreatmentMethod) .withMissingValueReplacement(missingValueReplacement) .build(); pmmlRequestData = new PMMLRequestData(); PreProcess.manageMissingValues(miningField, pmmlRequestData); assertThat(pmmlRequestData.getRequestParams()).hasSize(1); assertThat(pmmlRequestData.getMappedRequestParams()).containsKey(fieldName); ParameterInfo parameterInfo = pmmlRequestData.getMappedRequestParams().get(fieldName); assertThat(parameterInfo.getValue()).isEqualTo(missingValueReplacement); assertThat(parameterInfo.getType()).isEqualTo(String.class); }); }
@Override public void prepare() { try { PluggableSCMMaterial material = (PluggableSCMMaterial) revision.getMaterial(); Modification latestModification = revision.getLatestModification(); SCMRevision scmRevision = new SCMRevision(latestModification.getRevision(), latestModification.getModifiedTime(), null, null, latestModification.getAdditionalDataMap(), null); File destinationFolder = material.workingDirectory(workingDirectory); Result result = scmExtension.checkout(material.getScmConfig().getPluginConfiguration().getId(), buildSCMPropertyConfigurations(material.getScmConfig()), destinationFolder.getAbsolutePath(), scmRevision); handleCheckoutResult(material, result); } catch (Exception e) { consumer.taggedErrOutput(PREP_ERR, String.format("Material %s checkout failed: %s", revision.getMaterial().getDisplayName(), e.getMessage())); throw e; } }
@Test void shouldSendResolvedValuesToPluginCheckoutForPrepareWhenSecretParamsArePresent() { PluggableSCMMaterial pluggableSCMMaterial = MaterialsMother.pluggableSCMMaterial(); pluggableSCMMaterial.getScmConfig().getConfiguration().get(0).setConfigurationValue(new ConfigurationValue("{{SECRET:[secret_config_id][lookup_username]}}")); pluggableSCMMaterial.getScmConfig().getConfiguration().get(0).getSecretParams().get(0).setValue("some-dummy-value"); pluggableSCMMaterial.setFolder("destination-folder"); Modification modification = ModificationsMother.oneModifiedFile("r1"); Map<String, String> additionalData = new HashMap<>(); additionalData.put("a1", "v1"); additionalData.put("a2", "v2"); modification.setAdditionalData(new Gson().toJson(additionalData)); MaterialRevision revision = new MaterialRevision(pluggableSCMMaterial, modification); String pipelineFolder = new File(System.getProperty("java.io.tmpdir")).getAbsolutePath(); String destinationFolder = new File(pipelineFolder, "destination-folder").getAbsolutePath(); PluggableSCMMaterialAgent pluggableSCMMaterialAgent = new PluggableSCMMaterialAgent(scmExtension, revision, new File(pipelineFolder), consumer); when(scmExtension.checkout(eq("pluginid"), scmConfiguration.capture(), eq(destinationFolder), scmRevision.capture())).thenReturn(new Result()); pluggableSCMMaterialAgent.prepare(); verify(scmExtension).checkout(any(String.class), any(SCMPropertyConfiguration.class), any(String.class), any(SCMRevision.class)); assertThat(scmConfiguration.getValue().size()).isEqualTo(2); assertThat(scmConfiguration.getValue().get("k1").getValue()).isEqualTo("some-dummy-value"); assertThat(scmConfiguration.getValue().get("k2").getValue()).isEqualTo("v2"); assertThat(scmRevision.getValue().getRevision()).isEqualTo("r1"); assertThat(scmRevision.getValue().getTimestamp()).isEqualTo(modification.getModifiedTime()); assertThat(scmRevision.getValue().getData().size()).isEqualTo(2); assertThat(scmRevision.getValue().getDataFor("a1")).isEqualTo("v1"); assertThat(scmRevision.getValue().getDataFor("a2")).isEqualTo("v2"); }
@ExceptionHandler(Exception.class) protected ShenyuAdminResult handleExceptionHandler(final Exception exception) { LOG.error(exception.getMessage(), exception); String message = "The system is busy, please try again later"; return ShenyuAdminResult.error(message); }
@Test public void testServerExceptionHandlerByException() { Exception exception = new Exception(); ShenyuAdminResult result = exceptionHandlersUnderTest.handleExceptionHandler(exception); Assertions.assertEquals(result.getCode().intValue(), CommonErrorCode.ERROR); Assertions.assertEquals(result.getMessage(), "The system is busy, please try again later"); }
public Collection<ViewParameterSummaryDTO> forValue() { final Set<String> searches = viewService.streamAll() .map(ViewDTO::searchId) .collect(Collectors.toSet()); final Map<String, Search> qualifyingSearches = this.searchDbService.findByIds(searches).stream() .filter(search -> !search.parameters().isEmpty()) .collect(Collectors.toMap(Search::id, Functions.identity())); return viewService.streamAll() .filter(view -> qualifyingSearches.keySet().contains(view.searchId())) .map(view -> ViewParameterSummaryDTO.create(view, qualifyingSearches.get(view.searchId()))) .collect(Collectors.toSet()); }
@Test public void returnViewWhenBothSearchesWithAndWithoutParametersIsPresent() { final Search search1 = Search.builder() .id("searchWithParameter") .parameters(ImmutableSet.of(ValueParameter.any("foobar"))) .build(); final Search search2 = Search.builder() .parameters(ImmutableSet.of()) .build(); final ViewDTO view1 = createView("searchWithParameter", "viewWithParameter"); final ViewDTO view2 = createView("anotherView", "viewWithParameter"); final QualifyingViewsService service = new QualifyingViewsService( mockSearchService(search1, search2), mockViewService(view1, view2) ); final Collection<ViewParameterSummaryDTO> result = service.forValue(); assertThat(result) .hasOnlyOneElementSatisfying(summary -> { assertThat(summary.id()).isEqualTo("viewWithParameter"); assertThat(summary.title()).isEqualTo("My View"); assertThat(summary.summary()).isEqualTo("My Summary"); assertThat(summary.description()).isEqualTo("My Description"); } ); }
@Override public Set<SecurityGroup> securityGroups() { return osSecurityGroupStore.securityGroups(); }
@Test public void testGetSecurityGroups() { createBasicSecurityGroups(); assertEquals("Number of security group did not match", 2, target.securityGroups().size()); }
public static void put(ByteBuffer dst, ByteBuffer src) { if (src.remaining() <= dst.remaining()) { // there is enough space in the dst buffer to copy the src dst.put(src); } else { // there is not enough space in the dst buffer, so we need to // copy as much as we can. int srcOldLimit = src.limit(); src.limit(src.position() + dst.remaining()); dst.put(src); src.limit(srcOldLimit); } }
@Test public void test_put_exactlyEnoughSpace() { ByteBuffer src = ByteBuffer.allocate(8); src.putInt(1); src.putInt(2); src.flip(); int srcPos = src.position(); int srcLimit = src.limit(); ByteBuffer dst = ByteBuffer.allocate(8); BufferUtil.put(dst, src); dst.flip(); assertEquals(8, dst.remaining()); assertEquals(1, dst.getInt()); assertEquals(2, dst.getInt()); assertEquals(srcPos + 8, src.position()); assertEquals(srcLimit, src.limit()); }
@Override public HttpResponseOutputStream<File> write(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { final DelayedHttpEntityCallable<File> command = new DelayedHttpEntityCallable<File>(file) { @Override public File call(final HttpEntity entity) throws BackgroundException { try { // Initiate a resumable upload final HttpEntityEnclosingRequestBase request; if(status.isExists()) { final String fileid = DriveWriteFeature.this.fileid.getFileId(file); request = new HttpPatch(String.format("%supload/drive/v3/files/%s?supportsAllDrives=true&fields=%s", session.getClient().getRootUrl(), fileid, DriveAttributesFinderFeature.DEFAULT_FIELDS)); if(StringUtils.isNotBlank(status.getMime())) { request.setHeader(HttpHeaders.CONTENT_TYPE, status.getMime()); } // Upload the file request.setEntity(entity); } else { request = new HttpPost(String.format("%supload/drive/v3/files?uploadType=resumable&supportsAllDrives=%s&fields=%s", session.getClient().getRootUrl(), new HostPreferences(session.getHost()).getBoolean("googledrive.teamdrive.enable"), DriveAttributesFinderFeature.DEFAULT_FIELDS)); final StringBuilder metadata = new StringBuilder("{"); metadata.append(String.format("\"name\":\"%s\"", file.getName())); if(null != status.getModified()) { metadata.append(String.format(",\"modifiedTime\":\"%s\"", new ISO8601DateFormatter().format(status.getModified(), TimeZone.getTimeZone("UTC")))); } if(null != status.getCreated()) { metadata.append(String.format(",\"createdTime\":\"%s\"", new ISO8601DateFormatter().format(status.getCreated(), TimeZone.getTimeZone("UTC")))); } if(StringUtils.isNotBlank(status.getMime())) { metadata.append(String.format(",\"mimeType\":\"%s\"", status.getMime())); } metadata.append(String.format(",\"parents\":[\"%s\"]", fileid.getFileId(file.getParent()))); metadata.append("}"); request.setEntity(new StringEntity(metadata.toString(), ContentType.create("application/json", StandardCharsets.UTF_8.name()))); if(StringUtils.isNotBlank(status.getMime())) { // Set to the media MIME type of the upload data to be transferred in subsequent requests. request.addHeader("X-Upload-Content-Type", status.getMime()); } } request.addHeader(HTTP.CONTENT_TYPE, MEDIA_TYPE); final HttpClient client = session.getHttpClient(); final HttpResponse postResponse = client.execute(request); try { switch(postResponse.getStatusLine().getStatusCode()) { case HttpStatus.SC_OK: if(status.isExists()) { final File f = session.getClient().getObjectParser().parseAndClose( new InputStreamReader(postResponse.getEntity().getContent(), StandardCharsets.UTF_8), File.class); if(null != status.getModified()) { new DriveTimestampFeature(session, fileid).setTimestamp(file, status); f.setModifiedTime(new DateTime(status.getModified())); } return f; } break; default: throw new DefaultHttpResponseExceptionMappingService().map("Upload {0} failed", new HttpResponseException(postResponse.getStatusLine().getStatusCode(), postResponse.getStatusLine().getReasonPhrase()), file); } } finally { EntityUtils.consume(postResponse.getEntity()); } if(!status.isExists()) { if(postResponse.containsHeader(HttpHeaders.LOCATION)) { final String putTarget = postResponse.getFirstHeader(HttpHeaders.LOCATION).getValue(); // Upload the file final HttpPut put = new HttpPut(putTarget); put.setEntity(entity); final HttpResponse putResponse = client.execute(put); try { switch(putResponse.getStatusLine().getStatusCode()) { case HttpStatus.SC_OK: case HttpStatus.SC_CREATED: final File response = session.getClient().getObjectParser().parseAndClose( new InputStreamReader(putResponse.getEntity().getContent(), StandardCharsets.UTF_8), File.class); fileid.cache(file, response.getId()); return response; default: throw new DefaultHttpResponseExceptionMappingService().map("Upload {0} failed", new HttpResponseException(putResponse.getStatusLine().getStatusCode(), putResponse.getStatusLine().getReasonPhrase()), file); } } finally { EntityUtils.consume(putResponse.getEntity()); } } else { throw new DefaultHttpResponseExceptionMappingService().map("Upload {0} failed", new HttpResponseException(postResponse.getStatusLine().getStatusCode(), postResponse.getStatusLine().getReasonPhrase()), file); } } return null; } catch(IOException e) { throw new DriveExceptionMappingService(fileid).map("Upload {0} failed", e, file); } } @Override public long getContentLength() { return status.getLength(); } }; return this.write(file, status, command); }
@Test public void testWrite() throws Exception { final DriveFileIdProvider idProvider = new DriveFileIdProvider(session); final Path folder = new DriveDirectoryFeature(session, idProvider).mkdir( new Path(DriveHomeFinderService.MYDRIVE_FOLDER, UUID.randomUUID().toString(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final Path test = new Path(folder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); String fileid; { final TransferStatus status = new TransferStatus(); status.setMime("x-application/cyberduck"); status.setModified(1620113107725L); status.setCreated(1695160857860L); final byte[] content = RandomUtils.nextBytes(2048); status.setLength(content.length); final HttpResponseOutputStream<File> out = new DriveWriteFeature(session, idProvider).write(test, status, new DisabledConnectionCallback()); new StreamCopier(new TransferStatus(), new TransferStatus()).transfer(new ByteArrayInputStream(content), out); fileid = out.getStatus().getId(); assertNotNull(fileid); assertTrue(new DefaultFindFeature(session).find(test)); assertEquals(status.getModified(), new DriveAttributesFinderFeature(session, idProvider).toAttributes(out.getStatus()).getModificationDate(), 0L); final PathAttributes attributes = new DriveAttributesFinderFeature(session, idProvider).find(test); assertEquals(new DriveAttributesFinderFeature(session, idProvider).toAttributes(out.getStatus()), attributes); assertEquals(fileid, attributes.getFileId()); assertEquals(1620113107725L, attributes.getModificationDate()); assertEquals(1695160857860L, attributes.getCreationDate()); assertEquals(content.length, attributes.getSize()); final byte[] buffer = new byte[content.length]; final InputStream in = new DriveReadFeature(session, idProvider).read(test, new TransferStatus(), new DisabledConnectionCallback()); IOUtils.readFully(in, buffer); in.close(); assertArrayEquals(content, buffer); assertEquals("x-application/cyberduck", session.getClient().files().get(test.attributes().getFileId()).execute().getMimeType()); } { // overwrite final TransferStatus status = new TransferStatus(); status.setMime("x-application/cyberduck"); status.setModified(System.currentTimeMillis()); status.setExists(true); final byte[] content = RandomUtils.nextBytes(1024); status.setLength(content.length); final HttpResponseOutputStream<File> out = new DriveWriteFeature(session, idProvider).write(test, status, new DisabledConnectionCallback()); new StreamCopier(new TransferStatus(), new TransferStatus()).transfer(new ByteArrayInputStream(content), out); assertEquals(fileid, out.getStatus().getId()); final PathAttributes attributes = new DriveListService(session, idProvider).list(test.getParent(), new DisabledListProgressListener()).get(test).attributes(); assertEquals(content.length, attributes.getSize()); assertEquals("x-application/cyberduck", session.getClient().files().get(test.attributes().getFileId()).execute().getMimeType()); assertEquals(status.getModified().longValue(), new DriveAttributesFinderFeature(session, idProvider).toAttributes(out.getStatus()).getModificationDate()); assertEquals(new DriveAttributesFinderFeature(session, idProvider).toAttributes(out.getStatus()), attributes); } new DriveDeleteFeature(session, idProvider).delete(Arrays.asList(test, folder), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static Path compose(final Path root, final String path) { if(StringUtils.startsWith(path, String.valueOf(Path.DELIMITER))) { // Mount absolute path final String normalized = normalize(StringUtils.replace(path, "\\", String.valueOf(Path.DELIMITER)), true); if(StringUtils.equals(normalized, String.valueOf(Path.DELIMITER))) { return root; } return new Path(normalized, normalized.equals(String.valueOf(Path.DELIMITER)) ? EnumSet.of(Path.Type.volume, Path.Type.directory) : EnumSet.of(Path.Type.directory)); } else { final String normalized; if(StringUtils.startsWith(path, String.format("%s%s", Path.HOME, Path.DELIMITER))) { // Relative path to the home directory normalized = normalize(StringUtils.removeStart(StringUtils.removeStart( StringUtils.replace(path, "\\", String.valueOf(Path.DELIMITER)), Path.HOME), String.valueOf(Path.DELIMITER)), false); } else { // Relative path normalized = normalize(StringUtils.replace(path, "\\", String.valueOf(Path.DELIMITER)), false); } if(StringUtils.equals(normalized, String.valueOf(Path.DELIMITER))) { return root; } return new Path(String.format("%s%s%s", root.getAbsolute(), root.isRoot() ? StringUtils.EMPTY : Path.DELIMITER, normalized), EnumSet.of(Path.Type.directory)); } }
@Test public void testFindWithWorkdir() { assertEquals(new Path("/sandbox", EnumSet.of(Path.Type.directory)), PathNormalizer.compose(new Path("/", EnumSet.of(Path.Type.directory)), "sandbox")); assertEquals(new Path("/sandbox", EnumSet.of(Path.Type.directory)), PathNormalizer.compose(new Path("/", EnumSet.of(Path.Type.directory)), "/sandbox")); }
public static JsonAsserter with(String json) { return new JsonAsserterImpl(JsonPath.parse(json).json()); }
@Test public void testAssertEqualsIntegerInvalidField() throws Exception { assertThrows(AssertionError.class, () -> with(getResourceAsStream("lotto.json")).assertEquals("lotto.winners[0].winnerId1", 24)); }
@PublicEvolving public static <IN, OUT> TypeInformation<OUT> getMapReturnTypes( MapFunction<IN, OUT> mapInterface, TypeInformation<IN> inType) { return getMapReturnTypes(mapInterface, inType, null, false); }
@SuppressWarnings({"unchecked", "rawtypes"}) @Test void testDuplicateValue() { TypeInformation<?> ti = TypeExtractor.getMapReturnTypes( (MapFunction) new DuplicateValue<String>(), TypeInformation.of(new TypeHint<Tuple1<String>>() {})); assertThat(ti.isTupleType()).isTrue(); assertThat(ti.getArity()).isEqualTo(2); TupleTypeInfo<?> tti = (TupleTypeInfo<?>) ti; assertThat(tti.getTypeAt(0)).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO); assertThat(tti.getTypeAt(1)).isEqualTo(BasicTypeInfo.STRING_TYPE_INFO); }
public void updateEtlStatus() throws Exception { if (!checkState(JobState.ETL)) { return; } // get etl status SparkEtlJobHandler handler = new SparkEtlJobHandler(); EtlStatus status = handler.getEtlJobStatus(sparkLoadAppHandle, appId, id, etlOutputPath, sparkResource, brokerDesc); writeLock(); try { switch (status.getState()) { case RUNNING: unprotectedUpdateEtlStatusInternal(status); break; case FINISHED: unprotectedProcessEtlFinish(status, handler); break; case CANCELLED: throw new LoadException("spark etl job failed. msg: " + status.getFailMsg()); default: LOG.warn("unknown etl state: {}", status.getState().name()); break; } } finally { writeUnlock(); } if (checkState(JobState.LOADING)) { // create and send push tasks submitPushTasks(); } }
@Test(expected = DataQualityException.class) public void testUpdateEtlStatusFinishedQualityFailed(@Mocked GlobalStateMgr globalStateMgr, @Injectable String originStmt, @Mocked SparkEtlJobHandler handler) throws Exception { EtlStatus status = new EtlStatus(); status.setState(TEtlState.FINISHED); status.getCounters().put("dpp.norm.ALL", "8"); status.getCounters().put("dpp.abnorm.ALL", "2"); new Expectations() { { handler.getEtlJobStatus((SparkLoadAppHandle) any, appId, anyLong, etlOutputPath, (SparkResource) any, (BrokerDesc) any); result = status; } }; SparkLoadJob job = getEtlStateJob(originStmt); job.updateEtlStatus(); }
public boolean write(final int msgTypeId, final DirectBuffer srcBuffer, final int offset, final int length) { checkTypeId(msgTypeId); checkMsgLength(length); final AtomicBuffer buffer = this.buffer; final int recordLength = length + HEADER_LENGTH; final int recordIndex = claimCapacity(buffer, recordLength); if (INSUFFICIENT_CAPACITY == recordIndex) { return false; } buffer.putIntOrdered(lengthOffset(recordIndex), -recordLength); MemoryAccess.releaseFence(); buffer.putBytes(encodedMsgOffset(recordIndex), srcBuffer, offset, length); buffer.putInt(typeOffset(recordIndex), msgTypeId); buffer.putIntOrdered(lengthOffset(recordIndex), recordLength); return true; }
@Test void shouldInsertPaddingAndWriteToBuffer() { final int padding = 200; final int messageLength = 400; final int recordLength = messageLength + HEADER_LENGTH; final int alignedRecordLength = align(recordLength, ALIGNMENT); final long tail = 2 * CAPACITY - padding; final long head = tail; // free space is (200 + 300) more than message length (400) // but contiguous space (300) is less than message length (400) final long headCache = CAPACITY + 300; when(buffer.getLongVolatile(HEAD_COUNTER_INDEX)).thenReturn(head); when(buffer.getLongVolatile(TAIL_COUNTER_INDEX)).thenReturn(tail); when(buffer.getLongVolatile(HEAD_COUNTER_CACHE_INDEX)).thenReturn(headCache); when(buffer.compareAndSetLong(TAIL_COUNTER_INDEX, tail, tail + alignedRecordLength + padding)) .thenReturn(true); final UnsafeBuffer srcBuffer = new UnsafeBuffer(new byte[messageLength]); assertTrue(ringBuffer.write(MSG_TYPE_ID, srcBuffer, 0, messageLength)); }
public static boolean isIn(Date date, Date beginDate, Date endDate) { if (date instanceof DateTime) { return ((DateTime) date).isIn(beginDate, endDate); } else { return new DateTime(date).isIn(beginDate, endDate); } }
@Test public void isInTest(){ final String sourceStr = "2022-04-19 00:00:00"; final String startTimeStr = "2022-04-19 00:00:00"; final String endTimeStr = "2022-04-19 23:59:59"; final boolean between = DateUtil.isIn(DateUtil.parse(startTimeStr), DateUtil.parse(endTimeStr), DateUtil.parse(sourceStr)); assertTrue(between); }
public void estimatorStats() { expressionContext.getOp().accept(this, expressionContext); }
@Test public void testLogicalOlapTableScanPartitionPrune2(@Mocked CachedStatisticStorage cachedStatisticStorage) throws Exception { FeConstants.runningUnitTest = true; ColumnRefOperator idDate = columnRefFactory.create("id_date", Type.DATE, true); GlobalStateMgr globalStateMgr = connectContext.getGlobalStateMgr(); OlapTable table = (OlapTable) globalStateMgr.getDb("statistics_test").getTable("test_all_type_day_partition"); new Expectations() { { cachedStatisticStorage.getColumnStatistics(table, Lists.newArrayList("id_date")); result = new ColumnStatistic(Utils.getLongFromDateTime(LocalDateTime.of(2020, 4, 23, 0, 0, 0)), Utils.getLongFromDateTime(LocalDateTime.of(2020, 4, 25, 0, 0, 0)), 0, 0, 3); minTimes = 0; cachedStatisticStorage.getColumnStatistic(table, "id_date"); result = new ColumnStatistic(Utils.getLongFromDateTime(LocalDateTime.of(2020, 4, 23, 0, 0, 0)), Utils.getLongFromDateTime(LocalDateTime.of(2020, 4, 25, 0, 0, 0)), 0, 0, 3); minTimes = 0; } }; Collection<Partition> partitions = table.getPartitions(); // select partition p2 List<Long> partitionIds = partitions.stream().filter(partition -> partition.getName().equalsIgnoreCase("p2")). mapToLong(partition -> partition.getId()).boxed().collect(Collectors.toList()); for (Partition partition : partitions) { partition.getBaseIndex().setRowCount(1000); } LogicalOlapScanOperator olapScanOperator = new LogicalOlapScanOperator(table, ImmutableMap.of(idDate, new Column("id_date", Type.DATE, true)), ImmutableMap.of(new Column("id_date", Type.DATE, true), idDate), null, -1, null, ((OlapTable) table).getBaseIndexId(), partitionIds, null, false, Lists.newArrayList(), Lists.newArrayList(), Lists.newArrayList(), false); GroupExpression groupExpression = new GroupExpression(olapScanOperator, Lists.newArrayList()); groupExpression.setGroup(new Group(0)); ExpressionContext expressionContext = new ExpressionContext(groupExpression); StatisticsCalculator statisticsCalculator = new StatisticsCalculator(expressionContext, columnRefFactory, optimizerContext); statisticsCalculator.estimatorStats(); Assert.assertEquals(1000, expressionContext.getStatistics().getOutputRowCount(), 0.001); ColumnStatistic columnStatistic = expressionContext.getStatistics().getColumnStatistic(idDate); Assert.assertEquals(Utils.getLongFromDateTime(LocalDateTime.of(2020, 4, 24, 0, 0, 0)), columnStatistic.getMinValue(), 0.001); Assert.assertEquals(Utils.getLongFromDateTime(LocalDateTime.of(2020, 4, 25, 0, 0, 0)), columnStatistic.getMaxValue(), 0.001); Assert.assertEquals(1, columnStatistic.getDistinctValuesCount(), 0.001); // select partition p2, p3 partitionIds.clear(); partitionIds = partitions.stream().filter(partition -> !(partition.getName().equalsIgnoreCase("p1"))). mapToLong(Partition::getId).boxed().collect(Collectors.toList()); olapScanOperator = new LogicalOlapScanOperator(table, ImmutableMap.of(idDate, new Column("id_date", Type.DATE, true)), ImmutableMap.of(new Column("id_date", Type.DATE, true), idDate), null, -1, null, ((OlapTable) table).getBaseIndexId(), partitionIds, null, false, Lists.newArrayList(), Lists.newArrayList(), Lists.newArrayList(), false); olapScanOperator.setPredicate(new BinaryPredicateOperator(BinaryType.GE, idDate, ConstantOperator.createDate(LocalDateTime.of(2020, 04, 24, 0, 0, 0)))); groupExpression = new GroupExpression(olapScanOperator, Lists.newArrayList()); groupExpression.setGroup(new Group(0)); expressionContext = new ExpressionContext(groupExpression); statisticsCalculator = new StatisticsCalculator(expressionContext, columnRefFactory, optimizerContext); statisticsCalculator.estimatorStats(); columnStatistic = expressionContext.getStatistics().getColumnStatistic(idDate); // has two partitions Assert.assertEquals(2000, expressionContext.getStatistics().getOutputRowCount(), 0.001); Assert.assertEquals(Utils.getLongFromDateTime(LocalDateTime.of(2020, 4, 24, 0, 0, 0)), columnStatistic.getMinValue(), 0.001); Assert.assertEquals(Utils.getLongFromDateTime(LocalDateTime.of(2020, 4, 26, 0, 0, 0)), columnStatistic.getMaxValue(), 0.001); Assert.assertEquals(2, columnStatistic.getDistinctValuesCount(), 0.001); FeConstants.runningUnitTest = false; }
@Override public final boolean accept(EdgeIteratorState iter) { if (!edgeFilter.accept(iter)) { return false; } if (pointHint.isEmpty()) { return true; } String name = iter.getName(); if (name == null || name.isEmpty()) { return false; } BBox bbox = createBBox(iter); if (!pointCircle.intersects(bbox)) { return false; } name = removeRelation(name); String edgeName = prepareName(name); return isJaroWinklerSimilar(pointHint, edgeName); }
@Test public void testDistanceFiltering() { BaseGraph g = new BaseGraph.Builder(1).create(); NodeAccess na = g.getNodeAccess(); GHPoint pointFarAway = new GHPoint(49.458629, 11.146124); GHPoint point25mAway = new GHPoint(49.464871, 11.143575); GHPoint point200mAway = new GHPoint(49.464598, 11.149039); int farAwayId = 0; int nodeId50 = 1; int nodeID200 = 2; na.setNode(farAwayId, pointFarAway.lat, pointFarAway.lon); na.setNode(nodeId50, point25mAway.lat, point25mAway.lon); na.setNode(nodeID200, point200mAway.lat, point200mAway.lon); // Check that it matches a street 50m away EdgeIteratorState edge1 = g.edge(nodeId50, farAwayId).setKeyValues(Map.of(STREET_NAME, new KValue("Wentworth Street"))); assertTrue(createNameSimilarityEdgeFilter("Wentworth Street").accept(edge1)); // Check that it doesn't match streets 200m away EdgeIteratorState edge2 = g.edge(nodeID200, farAwayId).setKeyValues(Map.of(STREET_NAME, new KValue("Wentworth Street"))); assertFalse(createNameSimilarityEdgeFilter("Wentworth Street").accept(edge2)); }
public static void validateSchema(@Nonnull Schema schema, @Nonnull SchemaConformingTransformerConfig transformerConfig) { validateSchemaFieldNames(schema.getPhysicalColumnNames(), transformerConfig); String indexableExtrasFieldName = transformerConfig.getIndexableExtrasField(); getAndValidateExtrasFieldType(schema, indexableExtrasFieldName); String unindexableExtrasFieldName = transformerConfig.getUnindexableExtrasField(); if (null != unindexableExtrasFieldName) { getAndValidateExtrasFieldType(schema, indexableExtrasFieldName); } validateSchemaAndCreateTree(schema); }
@Test public void testOverlappingSchemaFields() { Assert.assertThrows(IllegalArgumentException.class, () -> { Schema schema = createDefaultSchemaBuilder().addSingleValueDimension("a.b", DataType.STRING) .addSingleValueDimension("a.b.c", DataType.INT).build(); SchemaConformingTransformer.validateSchema(schema, new SchemaConformingTransformerConfig(INDEXABLE_EXTRAS_FIELD_NAME, null, null, null)); }); // This is a repeat of the previous test but with fields reversed just in case they are processed in order Assert.assertThrows(IllegalArgumentException.class, () -> { Schema schema = createDefaultSchemaBuilder().addSingleValueDimension("a.b.c", DataType.INT) .addSingleValueDimension("a.b", DataType.STRING).build(); SchemaConformingTransformer.validateSchema(schema, new SchemaConformingTransformerConfig(INDEXABLE_EXTRAS_FIELD_NAME, null, null, null)); }); }
@Override public Boolean authenticate(final Host bookmark, final LoginCallback prompt, final CancelCallback cancel) throws BackgroundException { final Credentials credentials = bookmark.getCredentials(); if(credentials.isPublicKeyAuthentication()) { if(log.isDebugEnabled()) { log.debug(String.format("Login using public key authentication with credentials %s", credentials)); } final Local privKey = credentials.getIdentity(); final Local pubKey; final FileKeyProvider provider; final AtomicBoolean canceled = new AtomicBoolean(); try { final KeyFormat format = KeyProviderUtil.detectKeyFileFormat( new InputStreamReader(privKey.getInputStream(), StandardCharsets.UTF_8), true); if(log.isInfoEnabled()) { log.info(String.format("Reading private key %s with key format %s", privKey, format)); } switch(format) { case PKCS8: provider = new PKCS8KeyFile.Factory().create(); pubKey = null; break; case OpenSSH: { provider = new OpenSSHKeyFile.Factory().create(); final File f = OpenSSHKeyFileUtil.getPublicKeyFile(new File(privKey.getAbsolute())); if(f != null) { pubKey = LocalFactory.get(f.getAbsolutePath()); } else { pubKey = null; } break; } case OpenSSHv1: { provider = new OpenSSHKeyV1KeyFile.Factory().create(); final File f = OpenSSHKeyFileUtil.getPublicKeyFile(new File(privKey.getAbsolute())); if(f != null) { pubKey = LocalFactory.get(f.getAbsolutePath()); } else { pubKey = null; } break; } case PuTTY: provider = new PuTTYKeyFile.Factory().create(); pubKey = null; break; default: throw new InteroperabilityException(String.format("Unknown key format for file %s", privKey.getName())); } provider.init(new InputStreamReader(privKey.getInputStream(), StandardCharsets.UTF_8), pubKey != null ? new InputStreamReader(pubKey.getInputStream(), StandardCharsets.UTF_8) : null, new PasswordFinder() { @Override public char[] reqPassword(Resource<?> resource) { if(StringUtils.isEmpty(credentials.getIdentityPassphrase())) { try { // Use password prompt final Credentials input = prompt.prompt(bookmark, LocaleFactory.localizedString("Private key password protected", "Credentials"), String.format("%s (%s)", LocaleFactory.localizedString("Enter the passphrase for the private key file", "Credentials"), privKey.getAbbreviatedPath()), new LoginOptions() .icon(bookmark.getProtocol().disk()) .user(false).password(true) ); credentials.setSaved(input.isSaved()); credentials.setIdentityPassphrase(input.getPassword()); } catch(LoginCanceledException e) { canceled.set(true); // Return null if user cancels return StringUtils.EMPTY.toCharArray(); } } return credentials.getIdentityPassphrase().toCharArray(); } @Override public boolean shouldRetry(Resource<?> resource) { return false; } }); client.auth(credentials.getUsername(), new AuthPublickey(provider)); return client.isAuthenticated(); } catch(IOException e) { if(canceled.get()) { throw new LoginCanceledException(); } throw new SFTPExceptionMappingService().map(e); } } return false; }
@Test(expected = InteroperabilityException.class) public void testUnknownFormat() throws Exception { final Local key = new Local(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString()); try { new DefaultLocalTouchFeature().touch(key); IOUtils.copy(new StringReader("--unknown format"), key.getOutputStream(false), StandardCharsets.UTF_8); // Reconnect session.disconnect(); session.open(new DisabledProxyFinder(), new DisabledHostKeyCallback(), new DisabledLoginCallback(), new DisabledCancelCallback()); session.getHost().getCredentials().setIdentity(key); assertTrue(new SFTPPublicKeyAuthentication(session.getClient()).authenticate(session.getHost(), new DisabledLoginCallback() { @Override public Credentials prompt(final Host bookmark, String username, String title, String reason, LoginOptions options) throws LoginCanceledException { fail(); throw new LoginCanceledException(); } }, new DisabledCancelCallback())); } finally { key.delete(); } }
@Override public void upgrade() { if (clusterConfigService.get(MigrationCompleted.class) != null) { LOG.debug("Migration already done."); return; } // Do not overwrite an existing default index config boolean defaultDone = clusterConfigService.get(DefaultIndexSetConfig.class) != null; final ImmutableSet.Builder<String> builder = ImmutableSet.builder(); final FindIterable<Document> documents = collection.find(exists(FIELD_DEFAULT)).sort(ascending(FIELD_CREATION_DATE)); for (final Document document : documents) { final ObjectId id = document.getObjectId(FIELD_ID); final String idString = id.toHexString(); final boolean isDefault = firstNonNull(document.getBoolean(FIELD_DEFAULT), false); if (!defaultDone && isDefault) { defaultDone = true; clusterConfigService.write(DefaultIndexSetConfig.create(idString)); } final long modifiedCount = collection.updateOne(eq(FIELD_ID, id), unset(FIELD_DEFAULT)).getMatchedCount(); if (modifiedCount > 0) { LOG.info("Removed <default> field from index set <{}> ({})", document.getString(FIELD_TITLE), idString); builder.add(idString); } else { LOG.error("Couldn't remove <default> field from index set <{}> ({})", document.getString(FIELD_TITLE), idString); } } clusterConfigService.write(MigrationCompleted.create(builder.build())); }
@Test @MongoDBFixtures("V20161215163900_MoveIndexSetDefaultConfigTest.json") public void upgrade() throws Exception { final long count = collection.countDocuments(); migration.upgrade(); final MigrationCompleted migrationCompleted = clusterConfigService.get(MigrationCompleted.class); assertThat(collection.countDocuments()) .withFailMessage("No document should be deleted by the migration!") .isEqualTo(count); assertThat(collection.countDocuments(Filters.exists("default"))) .withFailMessage("The migration should have deleted the \"default\" field from the documents!") .isEqualTo(0L); assertThat(clusterConfigService.get(DefaultIndexSetConfig.class)) .withFailMessage("The DefaultIndexSetConfig should have been written to cluster config!") .isNotNull(); assertThat(clusterConfigService.get(DefaultIndexSetConfig.class).defaultIndexSetId()).isEqualTo("57f3d721a43c2d59cb750001"); assertThat(migrationCompleted).isNotNull(); assertThat(migrationCompleted.indexSetIds()).containsExactlyInAnyOrder("57f3d721a43c2d59cb750001", "57f3d721a43c2d59cb750003"); }
public static Class<?> tryDefineClassesInClassLoader( String className, Class<?> neighbor, ClassLoader classLoader, byte[] bytecode) { ProtectionDomain domain = neighbor != null ? neighbor.getProtectionDomain() : classLoader.getClass().getProtectionDomain(); return tryDefineClassesInClassLoader(className, neighbor, classLoader, domain, bytecode); }
@Test(dataProvider = "packages") public void testTryDefineClassesInClassLoader(String pkg) { String classname = String.format("A%d", System.currentTimeMillis()); String classCode = String.format( "" + "package %s;\n" + "public final class %s implements java.io.Serializable {\n" + " public int f1;\n" + " public long f2;\n" + "}", pkg, classname); byte[] bytes = JaninoUtils.toBytecode( getClass().getClassLoader(), new CompileUnit(pkg, classname, classCode)) .values() .iterator() .next(); if (ClassLoaderUtils.class.getPackage().getName().equals(pkg)) { Class<?> cls = ClassLoaderUtils.tryDefineClassesInClassLoader( pkg + "." + classname, ClassLoaderUtils.class, ClassLoaderUtils.class.getClassLoader(), bytes); Assert.assertNotNull(cls); Assert.assertEquals(cls.getSimpleName(), classname); } else { Class<?> cls = ClassLoaderUtils.tryDefineClassesInClassLoader( pkg + "." + classname, null, getClass().getClassLoader(), bytes); Assert.assertNotNull(cls); } }
public IssueSyncProgress getIssueSyncProgress(DbSession dbSession) { int completedCount = dbClient.projectDao().countIndexedProjects(dbSession); int total = dbClient.projectDao().countProjects(dbSession); boolean hasFailures = dbClient.ceActivityDao().hasAnyFailedOrCancelledIssueSyncTask(dbSession); boolean isCompleted = !dbClient.ceQueueDao().hasAnyIssueSyncTaskPendingOrInProgress(dbSession); return new IssueSyncProgress(isCompleted, completedCount, total, hasFailures); }
@Test public void return_is_completed_true_if_in_progress_task_exist_but_all_branches_have_been_synced() { insertCeQueue("TASK_1", Status.IN_PROGRESS); // only project IntStream.range(0, 10).forEach(value -> insertProjectWithBranches(false, 0)); // project + additional branch IntStream.range(0, 10).forEach(value -> insertProjectWithBranches(false, 1)); IssueSyncProgress result = underTest.getIssueSyncProgress(db.getSession()); assertThat(result.isCompleted()).isTrue(); }
public TokenInformation tokenInformation() { return tokenInformation; }
@Test public void testValues() { assertEquals(TOKENINFORMATION.get(0), DELEGATIONTOKENDATA.get(0).tokenInformation()); assertEquals(TOKENINFORMATION.get(1), DELEGATIONTOKENDATA.get(1).tokenInformation()); assertEquals(TOKENINFORMATION.get(2), DELEGATIONTOKENDATA.get(2).tokenInformation()); }
@Override @Transactional(rollbackFor = Exception.class) public void updateJobStatus(Long id, Integer status) throws SchedulerException { // 校验 status if (!containsAny(status, JobStatusEnum.NORMAL.getStatus(), JobStatusEnum.STOP.getStatus())) { throw exception(JOB_CHANGE_STATUS_INVALID); } // 校验存在 JobDO job = validateJobExists(id); // 校验是否已经为当前状态 if (job.getStatus().equals(status)) { throw exception(JOB_CHANGE_STATUS_EQUALS); } // 更新 Job 状态 JobDO updateObj = JobDO.builder().id(id).status(status).build(); jobMapper.updateById(updateObj); // 更新状态 Job 到 Quartz 中 if (JobStatusEnum.NORMAL.getStatus().equals(status)) { // 开启 schedulerManager.resumeJob(job.getHandlerName()); } else { // 暂停 schedulerManager.pauseJob(job.getHandlerName()); } }
@Test public void testUpdateJobStatus_normalSuccess() throws SchedulerException { // mock 数据 JobDO job = randomPojo(JobDO.class, o -> o.setStatus(JobStatusEnum.STOP.getStatus())); jobMapper.insert(job); // 调用 jobService.updateJobStatus(job.getId(), JobStatusEnum.NORMAL.getStatus()); // 校验记录的属性是否正确 JobDO dbJob = jobMapper.selectById(job.getId()); assertEquals(JobStatusEnum.NORMAL.getStatus(), dbJob.getStatus()); // 校验调用 verify(schedulerManager).resumeJob(eq(job.getHandlerName())); }
@Override public boolean evaluate(Map<String, Object> values) { boolean toReturn = false; if (values.containsKey(name)) { logger.debug("found matching parameter, evaluating... "); toReturn = evaluation(values.get(name)); } return toReturn; }
@Test void evaluateStringIsNotMissing() { assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> { Object value = "43"; KiePMMLSimplePredicate kiePMMLSimplePredicate = getKiePMMLSimplePredicate(OPERATOR.IS_NOT_MISSING, value); Map<String, Object> inputData = new HashMap<>(); inputData.put(SIMPLE_PREDICATE_NAME, value); kiePMMLSimplePredicate.evaluate(inputData); }); }
@Override public byte[] get(final Bytes key) { return wrapped().get(key); }
@Test public void shouldReturnNullOnGetWhenDoesntExist() { assertThat(store.get(hello), is(nullValue())); }
@Override public MapperResult findConfigInfoByAppFetchRows(MapperContext context) { final String appName = (String) context.getWhereParameter(FieldConstant.APP_NAME); final String tenantId = (String) context.getWhereParameter(FieldConstant.TENANT_ID); String sql = "SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE tenant_id LIKE ? AND " + "app_name = ?" + " OFFSET " + context.getStartRow() + " ROWS FETCH NEXT " + context.getPageSize() + " ROWS ONLY"; return new MapperResult(sql, CollectionUtils.list(tenantId, appName)); }
@Test void testFindConfigInfoByAppFetchRows() { MapperResult mapperResult = configInfoMapperByDerby.findConfigInfoByAppFetchRows(context); assertEquals(mapperResult.getSql(), "SELECT ID,data_id,group_id,tenant_id,app_name,content FROM config_info WHERE tenant_id LIKE" + " ? AND app_name = ? OFFSET " + startRow + " ROWS FETCH NEXT " + pageSize + " ROWS ONLY"); assertArrayEquals(new Object[] {tenantId, appName}, mapperResult.getParamList().toArray()); }
@Override public boolean validate(String metricKey) { Metric metric = metricByKey.get(metricKey); if (metric == null) { if (!alreadyLoggedMetricKeys.contains(metricKey)) { LOG.debug("The metric '{}' is ignored and should not be send in the batch report", metricKey); alreadyLoggedMetricKeys.add(metricKey); } return false; } return true; }
@Test public void not_validate_metric() { when(scannerMetrics.getMetrics()).thenReturn(Collections.emptySet()); ReportMetricValidator validator = new ReportMetricValidatorImpl(scannerMetrics); assertThat(validator.validate(METRIC_KEY)).isFalse(); assertThat(logTester.logs()).contains(expectedLog); }
@Override public List<TableInfo> getTableList(Long dataSourceConfigId, String nameLike, String commentLike) { List<TableInfo> tables = getTableList0(dataSourceConfigId, null); return tables.stream().filter(tableInfo -> (StrUtil.isEmpty(nameLike) || tableInfo.getName().contains(nameLike)) && (StrUtil.isEmpty(commentLike) || tableInfo.getComment().contains(commentLike))) .collect(Collectors.toList()); }
@Test public void testGetTableList() { // 准备参数 Long dataSourceConfigId = randomLongId(); // mock 方法 DataSourceConfigDO dataSourceConfig = new DataSourceConfigDO().setUsername("sa").setPassword("") .setUrl("jdbc:h2:mem:testdb"); when(dataSourceConfigService.getDataSourceConfig(eq(dataSourceConfigId))) .thenReturn(dataSourceConfig); // 调用 List<TableInfo> tables = databaseTableService.getTableList(dataSourceConfigId, "config", "参数"); // 断言 assertEquals(1, tables.size()); assertTableInfo(tables.get(0)); }
public static JibContainerBuilder toJibContainerBuilder( ArtifactProcessor processor, Jar jarOptions, CommonCliOptions commonCliOptions, CommonContainerConfigCliOptions commonContainerConfigCliOptions, ConsoleLogger logger) throws IOException, InvalidImageReferenceException { String imageReference = commonContainerConfigCliOptions.getFrom().orElseGet(() -> getDefaultBaseImage(processor)); JibContainerBuilder containerBuilder = ContainerBuilders.create(imageReference, Collections.emptySet(), commonCliOptions, logger); List<FileEntriesLayer> layers = processor.createLayers(); List<String> customEntrypoint = commonContainerConfigCliOptions.getEntrypoint(); List<String> entrypoint = customEntrypoint.isEmpty() ? processor.computeEntrypoint(jarOptions.getJvmFlags()) : customEntrypoint; containerBuilder .setEntrypoint(entrypoint) .setFileEntriesLayers(layers) .setExposedPorts(commonContainerConfigCliOptions.getExposedPorts()) .setVolumes(commonContainerConfigCliOptions.getVolumes()) .setEnvironment(commonContainerConfigCliOptions.getEnvironment()) .setLabels(commonContainerConfigCliOptions.getLabels()) .setProgramArguments(commonContainerConfigCliOptions.getProgramArguments()); commonContainerConfigCliOptions.getUser().ifPresent(containerBuilder::setUser); commonContainerConfigCliOptions.getFormat().ifPresent(containerBuilder::setFormat); commonContainerConfigCliOptions.getCreationTime().ifPresent(containerBuilder::setCreationTime); return containerBuilder; }
@Test public void testToJibContainerBuilder_explodedLayeredSpringBoot_basicInfo() throws IOException, InvalidImageReferenceException { when(mockSpringBootExplodedProcessor.getJavaVersion()).thenReturn(8); FileEntriesLayer layer = FileEntriesLayer.builder() .setName("classes") .addEntry( Paths.get("path/to/tempDirectory/BOOT-INF/classes/class1.class"), AbsoluteUnixPath.get("/app/BOOT-INF/classes/class1.class")) .build(); when(mockCommonContainerConfigCliOptions.getFrom()).thenReturn(Optional.empty()); when(mockSpringBootExplodedProcessor.createLayers()).thenReturn(Arrays.asList(layer)); when(mockSpringBootExplodedProcessor.computeEntrypoint(anyList())) .thenReturn( ImmutableList.of("java", "-cp", "/app", "org.springframework.boot.loader.JarLauncher")); when(mockCommonContainerConfigCliOptions.getFrom()).thenReturn(Optional.empty()); JibContainerBuilder containerBuilder = JarFiles.toJibContainerBuilder( mockSpringBootExplodedProcessor, mockJarCommand, mockCommonCliOptions, mockCommonContainerConfigCliOptions, mockLogger); ContainerBuildPlan buildPlan = containerBuilder.toContainerBuildPlan(); assertThat(buildPlan.getBaseImage()).isEqualTo("eclipse-temurin:8-jre"); assertThat(buildPlan.getPlatforms()).isEqualTo(ImmutableSet.of(new Platform("amd64", "linux"))); assertThat(buildPlan.getCreationTime()).isEqualTo(Instant.EPOCH); assertThat(buildPlan.getFormat()).isEqualTo(ImageFormat.Docker); assertThat(buildPlan.getEnvironment()).isEmpty(); assertThat(buildPlan.getLabels()).isEmpty(); assertThat(buildPlan.getVolumes()).isEmpty(); assertThat(buildPlan.getExposedPorts()).isEmpty(); assertThat(buildPlan.getUser()).isNull(); assertThat(buildPlan.getWorkingDirectory()).isNull(); assertThat(buildPlan.getEntrypoint()) .containsExactly("java", "-cp", "/app", "org.springframework.boot.loader.JarLauncher") .inOrder(); assertThat(buildPlan.getLayers()).hasSize(1); assertThat(buildPlan.getLayers().get(0).getName()).isEqualTo("classes"); assertThat(((FileEntriesLayer) buildPlan.getLayers().get(0)).getEntries()) .containsExactlyElementsIn( FileEntriesLayer.builder() .addEntry( Paths.get("path/to/tempDirectory/BOOT-INF/classes/class1.class"), AbsoluteUnixPath.get("/app/BOOT-INF/classes/class1.class")) .build() .getEntries()); }
private void addPartition(AlterTableStmt stmt, AlterClause alterClause) { HiveTable table = (HiveTable) getTable(stmt.getDbName(), stmt.getTableName()); AddPartitionClause addPartitionClause = (AddPartitionClause) alterClause; List<String> partitionColumns = table.getPartitionColumnNames(); // now do not support to specify location of hive partition in add partition if (!(addPartitionClause.getPartitionDesc() instanceof SingleItemListPartitionDesc)) { return; } SingleItemListPartitionDesc partitionDesc = (SingleItemListPartitionDesc) addPartitionClause.getPartitionDesc(); String tablePath = table.getTableLocation(); String partitionString = partitionColumns.get(0) + "=" + partitionDesc.getValues().get(0); String partitionPath = tablePath + "/" + partitionString; HivePartition hivePartition = HivePartition.builder() .setDatabaseName(table.getDbName()) .setTableName(table.getTableName()) .setColumns(table.getDataColumnNames().stream() .map(table::getColumn) .collect(Collectors.toList())) .setValues(partitionDesc.getValues()) .setParameters(ImmutableMap.<String, String>builder() .put("starrocks_version", Version.STARROCKS_VERSION + "-" + Version.STARROCKS_COMMIT_HASH) .put(STARROCKS_QUERY_ID, ConnectContext.get().getQueryId().toString()) .buildOrThrow()) .setStorageFormat(table.getStorageFormat()) .setLocation(partitionPath) .build(); HivePartitionWithStats partitionWithStats = new HivePartitionWithStats(partitionString, hivePartition, HivePartitionStats.empty()); hmsOps.addPartitions(table.getDbName(), table.getTableName(), Lists.newArrayList(partitionWithStats)); }
@Test(expected = StarRocksConnectorException.class) public void testAddPartition() throws Exception { String stagingDir = "hdfs://127.0.0.1:10000/tmp/starrocks/queryid"; THiveFileInfo fileInfo = new THiveFileInfo(); fileInfo.setFile_name("myfile.parquet"); fileInfo.setPartition_path("hdfs://127.0.0.1:10000/tmp/starrocks/queryid/col1=2"); fileInfo.setRecord_count(10); fileInfo.setFile_size_in_bytes(100); TSinkCommitInfo tSinkCommitInfo = new TSinkCommitInfo(); tSinkCommitInfo.setStaging_dir(stagingDir); tSinkCommitInfo.setIs_overwrite(false); tSinkCommitInfo.setHive_file_info(fileInfo); new MockUp<HiveMetastoreOperations>() { @Mock public boolean partitionExists(String dbName, String tableName, List<String> partitionValues) { return false; } }; new MockUp<RemoteFileOperations>() { @Mock public void renameDirectory(Path source, Path target, Runnable runWhenPathNotExist) { } }; AnalyzeTestUtil.init(); hiveMetadata.finishSink("hive_db", "hive_table", Lists.newArrayList(tSinkCommitInfo), null); new MockUp<HiveMetastoreOperations>() { @Mock public void addPartitions(String dbName, String tableName, List<HivePartitionWithStats> partitions) { throw new StarRocksConnectorException("add partition failed"); } }; hiveMetadata.finishSink("hive_db", "hive_table", Lists.newArrayList(tSinkCommitInfo), null); }
@Description("tangent") @ScalarFunction @SqlType(StandardTypes.DOUBLE) public static double tan(@SqlType(StandardTypes.DOUBLE) double num) { return Math.tan(num); }
@Test public void testTan() { for (double doubleValue : DOUBLE_VALUES) { assertFunction("tan(" + doubleValue + ")", DOUBLE, Math.tan(doubleValue)); assertFunction("tan(REAL '" + (float) doubleValue + "')", DOUBLE, Math.tan((float) doubleValue)); } assertFunction("tan(NULL)", DOUBLE, null); }
public FEELFnResult<TemporalAccessor> invoke(@ParameterName( "from" ) String val) { if ( val == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "from", "cannot be null")); } if (!BEGIN_YEAR.matcher(val).find()) { // please notice the regex strictly requires the beginning, so we can use find. return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "from", "year not compliant with XML Schema Part 2 Datatypes")); } try { return FEELFnResult.ofResult(LocalDate.from(FEEL_DATE.parse(val))); } catch (DateTimeException e) { return manageDateTimeException(e, val); } }
@Test void invokeParamTemporal() { FunctionTestUtil.assertResult(dateFunction.invoke(LocalDate.of(2017, 6, 12)), LocalDate.of(2017, 6, 12)); }
@Override public <T extends State> T state(StateNamespace namespace, StateTag<T> address) { return workItemState.get(namespace, address, StateContexts.nullContext()); }
@Test public void testOrderedListClearBeforeRead() throws Exception { StateTag<OrderedListState<String>> addr = StateTags.orderedList("orderedList", StringUtf8Coder.of()); OrderedListState<String> orderedListState = underTest.state(NAMESPACE, addr); final TimestampedValue<String> helloElement = TimestampedValue.of("hello", Instant.ofEpochSecond(1)); orderedListState.clear(); orderedListState.add(helloElement); assertThat(orderedListState.read(), Matchers.containsInAnyOrder(helloElement)); // Shouldn't need to read from windmill for this. Mockito.verifyZeroInteractions(mockReader); assertThat( orderedListState.readRange(Instant.ofEpochSecond(1), Instant.ofEpochSecond(2)), Matchers.containsInAnyOrder(helloElement)); // Shouldn't need to read from windmill for this. Mockito.verifyZeroInteractions(mockReader); // Shouldn't need to read from windmill for this. assertThat( orderedListState.readRange(Instant.ofEpochSecond(100), Instant.ofEpochSecond(200)), Matchers.emptyIterable()); assertThat( orderedListState.readRange(Instant.EPOCH, Instant.ofEpochSecond(1)), Matchers.emptyIterable()); Mockito.verifyZeroInteractions(mockReader); }
public synchronized <K, V> KTable<K, V> table(final String topic, final Consumed<K, V> consumed, final Materialized<K, V, KeyValueStore<Bytes, byte[]>> materialized) { Objects.requireNonNull(topic, "topic can't be null"); Objects.requireNonNull(consumed, "consumed can't be null"); Objects.requireNonNull(materialized, "materialized can't be null"); final ConsumedInternal<K, V> consumedInternal = new ConsumedInternal<>(consumed); materialized.withKeySerde(consumedInternal.keySerde()).withValueSerde(consumedInternal.valueSerde()); final MaterializedInternal<K, V, KeyValueStore<Bytes, byte[]>> materializedInternal = new MaterializedInternal<>(materialized, internalStreamsBuilder, topic + "-"); return internalStreamsBuilder.table(topic, consumedInternal, materializedInternal); }
@Test public void shouldNotAllowTablesFromSameTopic() { builder.table("topic"); builder.table("topic"); assertThrows(TopologyException.class, builder::build); }
public static int countChar(String s, char ch) { return countChar(s, ch, -1); }
@Test public void testCountChar() { assertEquals(0, StringHelper.countChar("Hello World", 'x')); assertEquals(1, StringHelper.countChar("Hello World", 'e')); assertEquals(3, StringHelper.countChar("Hello World", 'l')); assertEquals(1, StringHelper.countChar("Hello World", ' ')); assertEquals(0, StringHelper.countChar("", ' ')); assertEquals(0, StringHelper.countChar(null, ' ')); }
public void validate(OptionRule rule) { List<RequiredOption> requiredOptions = rule.getRequiredOptions(); for (RequiredOption requiredOption : requiredOptions) { validate(requiredOption); for (Option<?> option : requiredOption.getOptions()) { if (SingleChoiceOption.class.isAssignableFrom(option.getClass())) { // is required option and not match condition, skip validate if (isConditionOption(requiredOption) && !matchCondition( (RequiredOption.ConditionalRequiredOptions) requiredOption)) { continue; } validateSingleChoice(option); } } } for (Option option : rule.getOptionalOptions()) { if (SingleChoiceOption.class.isAssignableFrom(option.getClass())) { validateSingleChoice(option); } } }
@Test public void testSimpleExclusiveRequiredOptions() { OptionRule rule = OptionRule.builder().exclusive(TEST_TOPIC_PATTERN, TEST_TOPIC).build(); Map<String, Object> config = new HashMap<>(); Executable executable = () -> validate(config, rule); // all absent assertEquals( "ErrorCode:[API-02], ErrorDescription:[Option item validate failed] - There are unconfigured options, these options('option.topic-pattern', 'option.topic') are mutually exclusive," + " allowing only one set(\"[] for a set\") of options to be configured.", assertThrows(OptionValidationException.class, executable).getMessage()); // only one present config.put(TEST_TOPIC_PATTERN.key(), "asuka"); Assertions.assertDoesNotThrow(executable); // present > 1 config.put(TEST_TOPIC.key(), "[\"saitou\"]"); assertEquals( "ErrorCode:[API-02], ErrorDescription:[Option item validate failed] - These options('option.topic-pattern', 'option.topic') are mutually exclusive, " + "allowing only one set(\"[] for a set\") of options to be configured.", assertThrows(OptionValidationException.class, executable).getMessage()); }
@SuppressWarnings("unchecked") public static <S, F> S visit(final Schema schema, final Visitor<S, F> visitor) { final BiFunction<Visitor<?, ?>, Schema, Object> handler = HANDLER.get(schema.type()); if (handler == null) { throw new UnsupportedOperationException("Unsupported schema type: " + schema.type()); } return (S) handler.apply(visitor, schema); }
@Test public void shouldVisitFloat32() { // Given: final Schema schema = Schema.OPTIONAL_FLOAT32_SCHEMA; when(visitor.visitFloat32(any())).thenReturn("Expected"); // When: final String result = SchemaWalker.visit(schema, visitor); // Then: verify(visitor).visitFloat32(same(schema)); assertThat(result, is("Expected")); }
@Bean public RetryRegistry retryRegistry(RetryConfigurationProperties retryConfigurationProperties, EventConsumerRegistry<RetryEvent> retryEventConsumerRegistry, RegistryEventConsumer<Retry> retryRegistryEventConsumer, @Qualifier("compositeRetryCustomizer") CompositeCustomizer<RetryConfigCustomizer> compositeRetryCustomizer) { RetryRegistry retryRegistry = createRetryRegistry(retryConfigurationProperties, retryRegistryEventConsumer, compositeRetryCustomizer); registerEventConsumer(retryRegistry, retryEventConsumerRegistry, retryConfigurationProperties); retryConfigurationProperties.getInstances() .forEach((name, properties) -> retryRegistry.retry(name, retryConfigurationProperties .createRetryConfig(name, compositeRetryCustomizer))); return retryRegistry; }
@Test public void testCreateRetryRegistryWithUnknownConfig() { RetryConfigurationProperties retryConfigurationProperties = new RetryConfigurationProperties(); InstanceProperties instanceProperties = new InstanceProperties(); instanceProperties.setBaseConfig("unknownConfig"); retryConfigurationProperties.getInstances().put("backend", instanceProperties); RetryConfiguration retryConfiguration = new RetryConfiguration(); DefaultEventConsumerRegistry<RetryEvent> eventConsumerRegistry = new DefaultEventConsumerRegistry<>(); assertThatThrownBy(() -> retryConfiguration .retryRegistry(retryConfigurationProperties, eventConsumerRegistry, new CompositeRegistryEventConsumer<>(emptyList()), compositeRetryCustomizerTest())) .isInstanceOf(ConfigurationNotFoundException.class) .hasMessage("Configuration with name 'unknownConfig' does not exist"); }
public EndpointResponse terminateCluster( final KsqlSecurityContext securityContext, final ClusterTerminateRequest request ) { LOG.info("Received: " + request); throwIfNotConfigured(); ensureValidPatterns(request.getDeleteTopicList()); try { final Map<String, Object> streamsProperties = request.getStreamsProperties(); denyListPropertyValidator.validateAll(streamsProperties); final KsqlEntityList entities = handler.execute( securityContext, TERMINATE_CLUSTER, new SessionProperties( streamsProperties, localHost, localUrl, false ) ); return EndpointResponse.ok(entities); } catch (final Exception e) { return Errors.serverErrorForStatement( e, TerminateCluster.TERMINATE_CLUSTER_STATEMENT_TEXT, new KsqlEntityList()); } }
@Test public void shouldFailIfCannotWriteTerminateCommand() { // Given: when(commandStore.enqueueCommand(any(), any(), any(Producer.class))) .thenThrow(new KsqlException("")); // When: final EndpointResponse response = ksqlResource.terminateCluster( securityContext, VALID_TERMINATE_REQUEST ); // Then: assertThat(response.getStatus(), equalTo(500)); assertThat(response.getEntity().toString(), CoreMatchers .startsWith("Could not write the statement 'TERMINATE CLUSTER;' into the command ")); assertThat(response.getEntity(), instanceOf(KsqlStatementErrorMessage.class)); final KsqlStatementErrorMessage entity = (KsqlStatementErrorMessage) response.getEntity(); assertThat(entity.getStatementText(), containsString("TERMINATE CLUSTER")); }
public PlainAccessResource buildPlainAccessResource(PlainAccessConfig plainAccessConfig) throws AclException { checkPlainAccessConfig(plainAccessConfig); return PlainAccessResource.build(plainAccessConfig, remoteAddressStrategyFactory. getRemoteAddressStrategy(plainAccessConfig.getWhiteRemoteAddress())); }
@Test(expected = AclException.class) public void accountThanTest() { plainAccessConfig.setAccessKey("123"); plainPermissionManager.buildPlainAccessResource(plainAccessConfig); }
@ConstantFunction(name = "weeks_add", argTypes = {DATETIME, INT}, returnType = DATETIME, isMonotonic = true) public static ConstantOperator weeksAdd(ConstantOperator date, ConstantOperator week) { return ConstantOperator.createDatetimeOrNull(date.getDatetime().plusWeeks(week.getInt())); }
@Test public void weeksAdd() { assertEquals("2015-06-01T09:23:55", ScalarOperatorFunctions.weeksAdd(O_DT_20150323_092355, O_INT_10).getDatetime().toString()); }
public static JibContainerBuilder toJibContainerBuilder( ArtifactProcessor processor, Jar jarOptions, CommonCliOptions commonCliOptions, CommonContainerConfigCliOptions commonContainerConfigCliOptions, ConsoleLogger logger) throws IOException, InvalidImageReferenceException { String imageReference = commonContainerConfigCliOptions.getFrom().orElseGet(() -> getDefaultBaseImage(processor)); JibContainerBuilder containerBuilder = ContainerBuilders.create(imageReference, Collections.emptySet(), commonCliOptions, logger); List<FileEntriesLayer> layers = processor.createLayers(); List<String> customEntrypoint = commonContainerConfigCliOptions.getEntrypoint(); List<String> entrypoint = customEntrypoint.isEmpty() ? processor.computeEntrypoint(jarOptions.getJvmFlags()) : customEntrypoint; containerBuilder .setEntrypoint(entrypoint) .setFileEntriesLayers(layers) .setExposedPorts(commonContainerConfigCliOptions.getExposedPorts()) .setVolumes(commonContainerConfigCliOptions.getVolumes()) .setEnvironment(commonContainerConfigCliOptions.getEnvironment()) .setLabels(commonContainerConfigCliOptions.getLabels()) .setProgramArguments(commonContainerConfigCliOptions.getProgramArguments()); commonContainerConfigCliOptions.getUser().ifPresent(containerBuilder::setUser); commonContainerConfigCliOptions.getFormat().ifPresent(containerBuilder::setFormat); commonContainerConfigCliOptions.getCreationTime().ifPresent(containerBuilder::setCreationTime); return containerBuilder; }
@Test public void testToJibContainerBuilder_packagedSpringBoot_basicInfo() throws IOException, InvalidImageReferenceException { when(mockSpringBootPackagedProcessor.getJavaVersion()).thenReturn(8); FileEntriesLayer layer = FileEntriesLayer.builder() .setName("jar") .addEntry( Paths.get("path/to/spring-boot.jar"), AbsoluteUnixPath.get("/app/spring-boot.jar")) .build(); when(mockSpringBootPackagedProcessor.createLayers()).thenReturn(Arrays.asList(layer)); when(mockSpringBootPackagedProcessor.computeEntrypoint(anyList())) .thenReturn(ImmutableList.of("java", "-jar", "/app/spring-boot.jar")); when(mockCommonContainerConfigCliOptions.getFrom()).thenReturn(Optional.empty()); JibContainerBuilder containerBuilder = JarFiles.toJibContainerBuilder( mockSpringBootPackagedProcessor, mockJarCommand, mockCommonCliOptions, mockCommonContainerConfigCliOptions, mockLogger); ContainerBuildPlan buildPlan = containerBuilder.toContainerBuildPlan(); assertThat(buildPlan.getBaseImage()).isEqualTo("eclipse-temurin:8-jre"); assertThat(buildPlan.getPlatforms()).isEqualTo(ImmutableSet.of(new Platform("amd64", "linux"))); assertThat(buildPlan.getCreationTime()).isEqualTo(Instant.EPOCH); assertThat(buildPlan.getFormat()).isEqualTo(ImageFormat.Docker); assertThat(buildPlan.getEnvironment()).isEmpty(); assertThat(buildPlan.getLabels()).isEmpty(); assertThat(buildPlan.getVolumes()).isEmpty(); assertThat(buildPlan.getExposedPorts()).isEmpty(); assertThat(buildPlan.getUser()).isNull(); assertThat(buildPlan.getWorkingDirectory()).isNull(); assertThat(buildPlan.getEntrypoint()) .containsExactly("java", "-jar", "/app/spring-boot.jar") .inOrder(); assertThat(buildPlan.getLayers()).hasSize(1); assertThat(buildPlan.getLayers().get(0).getName()).isEqualTo("jar"); assertThat(((FileEntriesLayer) buildPlan.getLayers().get(0)).getEntries()) .isEqualTo( FileEntriesLayer.builder() .addEntry( Paths.get("path/to/spring-boot.jar"), AbsoluteUnixPath.get("/app/spring-boot.jar")) .build() .getEntries()); }
@Override public void addEntry( String filename, String extension ) throws IOException { // remove folder hierarchy int index = filename.lastIndexOf( Const.FILE_SEPARATOR ); String entryPath; if ( index != -1 ) { entryPath = filename.substring( index + 1 ); } else { entryPath = filename; } // remove ZIP extension index = entryPath.toLowerCase().lastIndexOf( ".zip" ); if ( index != -1 ) { entryPath = entryPath.substring( 0, index ) + entryPath.substring( index + ".zip".length() ); } // add real extension if needed if ( !Utils.isEmpty( extension ) ) { entryPath += "." + extension; } ZipEntry zipentry = new ZipEntry( entryPath ); zipentry.setComment( "Compressed by Kettle" ); ( (ZipOutputStream) delegate ).putNextEntry( zipentry ); }
@Test public void testAddEntryAndWrite() throws IOException { CompressionProvider provider = outStream.getCompressionProvider(); ByteArrayOutputStream out = new ByteArrayOutputStream(); outStream = new ZIPCompressionOutputStream( out, provider ); outStream.addEntry( "./test.zip", null ); outStream.write( "Test".getBytes() ); }
public String getFingerprint() { return fingerprint; }
@Test public void testGetFingerprint() { final StreamMock stream1 = getStreamMock("test"); final StreamRuleMock rule1 = new StreamRuleMock(ImmutableMap.of( "_id", new ObjectId(), "field", "testfield1", "type", StreamRuleType.PRESENCE.toInteger(), "stream_id", stream1.getId() )); final StreamRuleMock rule2 = new StreamRuleMock(ImmutableMap.of( "_id", new ObjectId(), "field", "testfield2", "value", "^test", "type", StreamRuleType.REGEX.toInteger(), "stream_id", stream1.getId() )); stream1.setStreamRules(Lists.newArrayList(rule1, rule2)); final StreamMock stream2 = getStreamMock("test"); final StreamRuleMock rule3 = new StreamRuleMock(ImmutableMap.of( "_id", new ObjectId(), "field", "testfield", "value", "^test", "type", StreamRuleType.REGEX.toInteger(), "stream_id", stream2.getId() )); stream2.setStreamRules(Lists.newArrayList(rule3)); final StreamRouterEngine engine1 = newEngine(Lists.newArrayList(stream1)); final StreamRouterEngine engine2 = newEngine(Lists.newArrayList(stream1)); final StreamRouterEngine engine3 = newEngine(Lists.newArrayList(stream2)); assertEquals(engine1.getFingerprint(), engine2.getFingerprint()); assertNotEquals(engine1.getFingerprint(), engine3.getFingerprint()); }
@Override public GrokPattern load(String patternId) throws NotFoundException { final GrokPattern pattern = store.get(patternId); if (pattern == null) { throw new NotFoundException("Couldn't find Grok pattern with ID " + patternId); } return pattern; }
@Test public void load() throws Exception { final GrokPattern pattern = service.save(GrokPattern.create("NAME", ".*")); final GrokPattern loaded = service.load(pattern.id()); assertThat(loaded) .isNotNull() .isEqualTo(pattern); assertThatExceptionOfType(NotFoundException.class) .isThrownBy(() -> service.load("whatever")) .withMessage("Couldn't find Grok pattern with ID whatever"); }
@Override public String getFirstNodeValue(String value) { long hash = super.hash(value); System.out.println("value=" + value + " hash = " + hash); SortedMap<Long, String> last = treeMap.tailMap(hash); if (!last.isEmpty()) { return last.get(last.firstKey()); } if (treeMap.size() == 0){ throw new CIMException(StatusEnum.SERVER_NOT_AVAILABLE) ; } return treeMap.firstEntry().getValue(); }
@Test public void getFirstNodeValue() { AbstractConsistentHash map = new TreeMapConsistentHash() ; List<String> strings = new ArrayList<String>(); for (int i = 0; i < 10; i++) { strings.add("127.0.0." + i) ; } String process = map.process(strings,"zhangsan"); System.out.println(process); Assert.assertEquals("127.0.0.2",process); }
@Override public void debug(String msg) { logger.debug(msg); }
@Test void testMarkerDebugWithFormat3() { jobRunrDashboardLogger.debug(marker, "Debug with {} {} {}", "format1", "format2", "format3"); verify(slfLogger).debug(marker, "Debug with {} {} {}", "format1", "format2", "format3"); }
@Transactional @Cacheable(CACHE_DATABASE_SEARCH) @CacheEvict(value = CACHE_AVERAGE_REVIEW_RATING, allEntries = true) public SearchHits<ExtensionSearch> search(ISearchService.Options options) { // grab all extensions var matchingExtensions = repositories.findAllActiveExtensions(); // no extensions in the database if (matchingExtensions.isEmpty()) { return new SearchHitsImpl<>(0,TotalHitsRelation.OFF, 0f, null, null, Collections.emptyList(), null, null); } // exlude namespaces if(options.namespacesToExclude != null) { for(var namespaceToExclude : options.namespacesToExclude) { matchingExtensions = matchingExtensions.filter(extension -> !extension.getNamespace().getName().equals(namespaceToExclude)); } } // filter target platform if(TargetPlatform.isValid(options.targetPlatform)) { matchingExtensions = matchingExtensions.filter(extension -> extension.getVersions().stream().anyMatch(ev -> ev.getTargetPlatform().equals(options.targetPlatform))); } // filter category if (options.category != null) { matchingExtensions = matchingExtensions.filter(extension -> { var latest = repositories.findLatestVersion(extension, null, false, true); return latest.getCategories().stream().anyMatch(category -> category.equalsIgnoreCase(options.category)); }); } // filter text if (options.queryString != null) { matchingExtensions = matchingExtensions.filter(extension -> { var latest = repositories.findLatestVersion(extension, null, false, true); return extension.getName().toLowerCase().contains(options.queryString.toLowerCase()) || extension.getNamespace().getName().contains(options.queryString.toLowerCase()) || (latest.getDescription() != null && latest.getDescription() .toLowerCase().contains(options.queryString.toLowerCase())) || (latest.getDisplayName() != null && latest.getDisplayName() .toLowerCase().contains(options.queryString.toLowerCase())); }); } // need to perform the sortBy () // 'relevance' | 'timestamp' | 'rating' | 'downloadCount'; Stream<ExtensionSearch> searchEntries; if("relevance".equals(options.sortBy) || "rating".equals(options.sortBy)) { var searchStats = new SearchStats(repositories); searchEntries = matchingExtensions.stream().map(extension -> relevanceService.toSearchEntry(extension, searchStats)); } else { searchEntries = matchingExtensions.stream().map(extension -> { var latest = repositories.findLatestVersion(extension, null, false, true); var targetPlatforms = repositories.findExtensionTargetPlatforms(extension); return extension.toSearch(latest, targetPlatforms); }); } var comparators = new HashMap<>(Map.of( "relevance", new RelevanceComparator(), "timestamp", new TimestampComparator(), "rating", new RatingComparator(), "downloadCount", new DownloadedCountComparator() )); var comparator = comparators.get(options.sortBy); if(comparator != null) { searchEntries = searchEntries.sorted(comparator); } var sortedExtensions = searchEntries.collect(Collectors.toList()); // need to do sortOrder // 'asc' | 'desc'; if ("desc".equals(options.sortOrder)) { // reverse the order Collections.reverse(sortedExtensions); } // Paging var totalHits = sortedExtensions.size(); var endIndex = Math.min(sortedExtensions.size(), options.requestedOffset + options.requestedSize); var startIndex = Math.min(endIndex, options.requestedOffset); sortedExtensions = sortedExtensions.subList(startIndex, endIndex); List<SearchHit<ExtensionSearch>> searchHits; if (sortedExtensions.isEmpty()) { searchHits = Collections.emptyList(); } else { // client is interested only in the extension IDs searchHits = sortedExtensions.stream().map(extensionSearch -> new SearchHit<>(null, null, null, 0.0f, null, null, null, null, null, null, extensionSearch)).collect(Collectors.toList()); } return new SearchHitsImpl<>(totalHits, TotalHitsRelation.OFF, 0f, null, null, searchHits, null, null); }
@Test public void testQueryStringDisplayName() { var ext1 = mockExtension("yaml", 3.0, 100, 0, "redhat", List.of("Snippets", "Programming Languages")); ext1.getVersions().get(0).setDisplayName("This is a YAML extension"); var ext2 = mockExtension("java", 4.0, 100, 0, "redhat", List.of("Snippets", "Programming Languages")); ext2.getVersions().get(0).setDisplayName("Red Hat"); var ext3 = mockExtension("openshift", 4.0, 100, 0, "redhat", List.of("Snippets", "Other")); var ext4 = mockExtension("foo", 4.0, 100, 0, "bar", List.of("Other")); Mockito.when(repositories.findAllActiveExtensions()).thenReturn(Streamable.of(List.of(ext1, ext2, ext3, ext4))); var searchOptions = new ISearchService.Options("Red Hat", null, TargetPlatform.NAME_UNIVERSAL, 50, 0, null, null, false); var result = search.search(searchOptions); // custom displayname assertThat(result.getTotalHits()).isEqualTo(1); // Check it found the correct extension var hits = result.getSearchHits(); assertThat(getIdFromExtensionHits(hits, 0)).isEqualTo(getIdFromExtensionName("java")); }
@Override public boolean dropTable(TableIdentifier identifier, boolean purge) { TableOperations ops = newTableOps(identifier); TableMetadata lastMetadata = null; if (purge) { try { lastMetadata = ops.current(); } catch (NotFoundException e) { LOG.warn( "Failed to load table metadata for table: {}, continuing drop without purge", identifier, e); } } int deletedRecords = execute( (schemaVersion == JdbcUtil.SchemaVersion.V1) ? JdbcUtil.V1_DROP_TABLE_SQL : JdbcUtil.V0_DROP_TABLE_SQL, catalogName, JdbcUtil.namespaceToString(identifier.namespace()), identifier.name()); if (deletedRecords == 0) { LOG.info("Skipping drop, table does not exist: {}", identifier); return false; } if (purge && lastMetadata != null) { CatalogUtil.dropTableData(ops.io(), lastMetadata); } LOG.info("Dropped table: {}", identifier); return true; }
@Test public void testConcurrentCommit() throws IOException { TableIdentifier tableIdentifier = TableIdentifier.of("db", "table"); Table table = catalog.createTable(tableIdentifier, SCHEMA, PartitionSpec.unpartitioned()); // append file and commit! String data = tableDir.resolve("data.parquet").toAbsolutePath().toString(); Files.write(Paths.get(data), Lists.newArrayList(), StandardCharsets.UTF_8); DataFile dataFile = DataFiles.builder(PartitionSpec.unpartitioned()) .withPath(data) .withFileSizeInBytes(10) .withRecordCount(1) .build(); table.newAppend().appendFile(dataFile).commit(); assertThat(table.history()).hasSize(1); catalog.dropTable(tableIdentifier); data = tableDir.resolve("data2.parquet").toAbsolutePath().toString(); Files.write(Paths.get(data), Lists.newArrayList(), StandardCharsets.UTF_8); DataFile dataFile2 = DataFiles.builder(PartitionSpec.unpartitioned()) .withPath(data) .withFileSizeInBytes(10) .withRecordCount(1) .build(); assertThatThrownBy(() -> table.newAppend().appendFile(dataFile2).commit()) .isInstanceOf(NoSuchTableException.class) .hasMessage( "Failed to load table db.table from catalog test_jdbc_catalog: dropped by another process"); }
@Override public double getRingFraction(String start, String end) { return getTokenCountInRange(start, end).doubleValue() / TOTAL_TOKEN_COUNT.doubleValue(); }
@Test public void testGetRingFraction() { assertEquals(tokenRing.getTokenCountInRange("0", "1"), ONE); assertEquals(tokenRing.getTokenCountInRange("0", "200"), new BigInteger("200")); assertEquals(tokenRing.getTokenCountInRange("0", "10"), new BigInteger("10")); assertEquals(tokenRing.getTokenCountInRange("1", "11"), new BigInteger("10")); assertEquals(tokenRing.getTokenCountInRange("0", "0"), ZERO); assertEquals(tokenRing.getTokenCountInRange("-1", "-1"), BigInteger.valueOf(2).pow(127).add(ONE)); assertEquals(tokenRing.getTokenCountInRange("1", "0"), BigInteger.valueOf(2).pow(127)); }
@CanIgnoreReturnValue public final Ordered containsExactly(@Nullable Object @Nullable ... varargs) { List<@Nullable Object> expected = (varargs == null) ? newArrayList((@Nullable Object) null) : asList(varargs); return containsExactlyElementsIn( expected, varargs != null && varargs.length == 1 && varargs[0] instanceof Iterable); }
@Test public void iterableContainsExactlyWithEmptyStringAmongMissingItems() { expectFailureWhenTestingThat(asList("a")).containsExactly("", "b"); assertFailureKeys( "missing (2)", "#1", "#2", "", "unexpected (1)", "#1", "---", "expected", "but was"); assertFailureValueIndexed("#1", 0, ""); assertFailureValueIndexed("#2", 0, "b"); assertFailureValueIndexed("#1", 1, "a"); }
protected Object[] writeToTable( RowMetaInterface rowMeta, Object[] r ) throws KettleException { if ( r == null ) { // Stop: last line or error encountered if ( log.isDetailed() ) { logDetailed( "Last line inserted: stop" ); } return null; } PreparedStatement insertStatement = null; Object[] insertRowData; Object[] outputRowData = r; String tableName = null; boolean sendToErrorRow = false; String errorMessage = null; boolean rowIsSafe = false; int[] updateCounts = null; List<Exception> exceptionsList = null; boolean batchProblem = false; Object generatedKey = null; if ( meta.isTableNameInField() ) { // Cache the position of the table name field if ( data.indexOfTableNameField < 0 ) { String realTablename = environmentSubstitute( meta.getTableNameField() ); data.indexOfTableNameField = rowMeta.indexOfValue( realTablename ); if ( data.indexOfTableNameField < 0 ) { String message = "Unable to find table name field [" + realTablename + "] in input row"; logError( message ); throw new KettleStepException( message ); } if ( !meta.isTableNameInTable() && !meta.specifyFields() ) { data.insertRowMeta.removeValueMeta( data.indexOfTableNameField ); } } tableName = rowMeta.getString( r, data.indexOfTableNameField ); if ( !meta.isTableNameInTable() && !meta.specifyFields() ) { // If the name of the table should not be inserted itself, remove the table name // from the input row data as well. This forcibly creates a copy of r // insertRowData = RowDataUtil.removeItem( rowMeta.cloneRow( r ), data.indexOfTableNameField ); } else { insertRowData = r; } } else if ( meta.isPartitioningEnabled() && ( meta.isPartitioningDaily() || meta.isPartitioningMonthly() ) && ( meta.getPartitioningField() != null && meta.getPartitioningField().length() > 0 ) ) { // Initialize some stuff! if ( data.indexOfPartitioningField < 0 ) { data.indexOfPartitioningField = rowMeta.indexOfValue( environmentSubstitute( meta.getPartitioningField() ) ); if ( data.indexOfPartitioningField < 0 ) { throw new KettleStepException( "Unable to find field [" + meta.getPartitioningField() + "] in the input row!" ); } if ( meta.isPartitioningDaily() ) { data.dateFormater = new SimpleDateFormat( "yyyyMMdd" ); } else { data.dateFormater = new SimpleDateFormat( "yyyyMM" ); } } ValueMetaInterface partitioningValue = rowMeta.getValueMeta( data.indexOfPartitioningField ); if ( !partitioningValue.isDate() || r[data.indexOfPartitioningField] == null ) { throw new KettleStepException( "Sorry, the partitioning field needs to contain a data value and can't be empty!" ); } Object partitioningValueData = rowMeta.getDate( r, data.indexOfPartitioningField ); tableName = environmentSubstitute( meta.getTableName() ) + "_" + data.dateFormater.format( (Date) partitioningValueData ); insertRowData = r; } else { tableName = data.tableName; insertRowData = r; } if ( meta.specifyFields() ) { // // The values to insert are those in the fields sections // insertRowData = new Object[data.valuenrs.length]; for ( int idx = 0; idx < data.valuenrs.length; idx++ ) { insertRowData[idx] = r[data.valuenrs[idx]]; } } if ( Utils.isEmpty( tableName ) ) { throw new KettleStepException( "The tablename is not defined (empty)" ); } insertStatement = data.preparedStatements.get( tableName ); if ( insertStatement == null ) { String sql = data.db .getInsertStatement( environmentSubstitute( meta.getSchemaName() ), tableName, data.insertRowMeta ); if ( log.isDetailed() ) { logDetailed( "Prepared statement : " + sql ); } insertStatement = data.db.prepareSQL( sql, meta.isReturningGeneratedKeys() ); data.preparedStatements.put( tableName, insertStatement ); } try { // For PG & GP, we add a savepoint before the row. // Then revert to the savepoint afterwards... (not a transaction, so hopefully still fast) // if ( data.useSafePoints ) { data.savepoint = data.db.setSavepoint(); } data.db.setValues( data.insertRowMeta, insertRowData, insertStatement ); data.db.insertRow( insertStatement, data.batchMode, false ); // false: no commit, it is handled in this step differently if ( isRowLevel() ) { logRowlevel( "Written row: " + data.insertRowMeta.getString( insertRowData ) ); } // Get a commit counter per prepared statement to keep track of separate tables, etc. // Integer commitCounter = data.commitCounterMap.get( tableName ); if ( commitCounter == null ) { commitCounter = Integer.valueOf( 1 ); } else { commitCounter++; } data.commitCounterMap.put( tableName, Integer.valueOf( commitCounter.intValue() ) ); // Release the savepoint if needed // if ( data.useSafePoints ) { if ( data.releaseSavepoint ) { data.db.releaseSavepoint( data.savepoint ); } } // Perform a commit if needed // if ( ( data.commitSize > 0 ) && ( ( commitCounter % data.commitSize ) == 0 ) ) { if ( data.db.getUseBatchInsert( data.batchMode ) ) { try { insertStatement.executeBatch(); data.db.commit(); insertStatement.clearBatch(); } catch ( SQLException ex ) { throw Database.createKettleDatabaseBatchException( "Error updating batch", ex ); } catch ( Exception ex ) { throw new KettleDatabaseException( "Unexpected error inserting row", ex ); } } else { // insertRow normal commit data.db.commit(); } // Clear the batch/commit counter... // data.commitCounterMap.put( tableName, Integer.valueOf( 0 ) ); rowIsSafe = true; } else { rowIsSafe = false; } // See if we need to get back the keys as well... if ( meta.isReturningGeneratedKeys() ) { RowMetaAndData extraKeys = data.db.getGeneratedKeys( insertStatement ); if ( extraKeys.getRowMeta().size() > 0 ) { // Send out the good word! // Only 1 key at the moment. (should be enough for now :-) generatedKey = extraKeys.getRowMeta().getInteger( extraKeys.getData(), 0 ); } else { // we have to throw something here, else we don't know what the // type is of the returned key(s) and we would violate our own rule // that a hop should always contain rows of the same type. throw new KettleStepException( "No generated keys while \"return generated keys\" is active!" ); } } } catch ( KettleDatabaseBatchException be ) { errorMessage = be.toString(); batchProblem = true; sendToErrorRow = true; updateCounts = be.getUpdateCounts(); exceptionsList = be.getExceptionsList(); if ( getStepMeta().isDoingErrorHandling() ) { data.db.clearBatch( insertStatement ); data.db.commit( true ); } else { data.db.clearBatch( insertStatement ); data.db.rollback(); StringBuilder msg = new StringBuilder( "Error batch inserting rows into table [" + tableName + "]." ); msg.append( Const.CR ); msg.append( "Errors encountered (first 10):" ).append( Const.CR ); for ( int x = 0; x < be.getExceptionsList().size() && x < 10; x++ ) { Exception exception = be.getExceptionsList().get( x ); if ( exception.getMessage() != null ) { msg.append( exception.getMessage() ).append( Const.CR ); } } throw new KettleException( msg.toString(), be ); } } catch ( KettleDatabaseException dbe ) { if ( getStepMeta().isDoingErrorHandling() ) { if ( isRowLevel() ) { logRowlevel( "Written row to error handling : " + getInputRowMeta().getString( r ) ); } if ( data.useSafePoints ) { data.db.rollback( data.savepoint ); if ( data.releaseSavepoint ) { data.db.releaseSavepoint( data.savepoint ); } // data.db.commit(true); // force a commit on the connection too. } sendToErrorRow = true; errorMessage = dbe.toString(); } else { if ( meta.ignoreErrors() ) { if ( data.warnings < 20 ) { if ( log.isBasic() ) { logBasic( "WARNING: Couldn't insert row into table: " + rowMeta.getString( r ) + Const.CR + dbe.getMessage() ); } } else if ( data.warnings == 20 ) { if ( log.isBasic() ) { logBasic( "FINAL WARNING (no more then 20 displayed): Couldn't insert row into table: " + rowMeta.getString( r ) + Const.CR + dbe.getMessage() ); } } data.warnings++; } else { setErrors( getErrors() + 1 ); data.db.rollback(); throw new KettleException( "Error inserting row into table [" + tableName + "] with values: " + rowMeta.getString( r ), dbe ); } } } // We need to add a key if ( generatedKey != null ) { outputRowData = RowDataUtil.addValueData( outputRowData, rowMeta.size(), generatedKey ); } if ( data.batchMode ) { if ( sendToErrorRow ) { if ( batchProblem ) { data.batchBuffer.add( outputRowData ); outputRowData = null; processBatchException( errorMessage, updateCounts, exceptionsList ); } else { // Simply add this row to the error row putError( rowMeta, r, 1L, errorMessage, null, "TOP001" ); outputRowData = null; } } else { data.batchBuffer.add( outputRowData ); outputRowData = null; if ( rowIsSafe ) { // A commit was done and the rows are all safe (no error) for ( int i = 0; i < data.batchBuffer.size(); i++ ) { Object[] row = data.batchBuffer.get( i ); putRow( data.outputRowMeta, row ); incrementLinesOutput(); } // Clear the buffer data.batchBuffer.clear(); } } } else { if ( sendToErrorRow ) { putError( rowMeta, r, 1, errorMessage, null, "TOP001" ); outputRowData = null; } } return outputRowData; }
@Test public void testWriteToTable() throws Exception { tableOutputSpy.writeToTable( mock( RowMetaInterface.class ), new Object[]{} ); }
@Deprecated @Override public Http2DataFrame readChunk(ChannelHandlerContext ctx) throws Exception { return readChunk(ctx.alloc()); }
@Test public void testWrappedReturnNull() throws Exception { Http2DataChunkedInput input = new Http2DataChunkedInput(new ChunkedInput<ByteBuf>() { @Override public boolean isEndOfInput() throws Exception { return false; } @Override public void close() throws Exception { // NOOP } @Override public ByteBuf readChunk(ChannelHandlerContext ctx) throws Exception { return null; } @Override public ByteBuf readChunk(ByteBufAllocator allocator) throws Exception { return null; } @Override public long length() { return 0; } @Override public long progress() { return 0; } }, STREAM); assertNull(input.readChunk(ByteBufAllocator.DEFAULT)); }
public static void positiveCheck(String paramName, long value) { if (value <= 0) { throw new IllegalArgumentException(paramName + " cannot be less than or equal to <0>!"); } }
@Test public void testPositiveCheckInt() { assertThrows(IllegalArgumentException.class, () -> ValueValidationUtil.positiveCheck("param1", 0)); assertThrows(IllegalArgumentException.class, () -> ValueValidationUtil.positiveCheck("param2", -1)); ValueValidationUtil.positiveCheck("param3", 1); }
public LeaderAndIsrPartitionState toLeaderAndIsrPartitionState(TopicPartition tp, boolean isNew) { return new LeaderAndIsrPartitionState(). setTopicName(tp.topic()). setPartitionIndex(tp.partition()). setControllerEpoch(-1). setLeader(leader). setLeaderEpoch(leaderEpoch). setIsr(Replicas.toList(isr)). setPartitionEpoch(partitionEpoch). setReplicas(Replicas.toList(replicas)). setAddingReplicas(Replicas.toList(addingReplicas)). setRemovingReplicas(Replicas.toList(removingReplicas)). setLeaderRecoveryState(leaderRecoveryState.value()). setIsNew(isNew); }
@Test public void testToLeaderAndIsrPartitionState() { PartitionRegistration a = new PartitionRegistration.Builder(). setReplicas(new int[]{1, 2, 3}). setDirectories(new Uuid[]{ Uuid.fromString("NSmkU0ieQuy2IHN59Ce0Bw"), Uuid.fromString("Y8N9gnSKSLKKFCioX2laGA"), Uuid.fromString("Oi7nvb8KQPyaGEqr4JtCRw") }). setIsr(new int[]{1, 2}).setLeader(1).setLeaderRecoveryState(LeaderRecoveryState.RECOVERED).setLeaderEpoch(123).setPartitionEpoch(456).build(); PartitionRegistration b = new PartitionRegistration.Builder(). setReplicas(new int[]{2, 3, 4}). setDirectories(new Uuid[]{ Uuid.fromString("tAn3q03aQAWEYkNajXm3lA"), Uuid.fromString("zgj8rqatTmWMyWBsRZyiVg"), Uuid.fromString("bAAlGAz1TN2doZjtWlvhRQ") }). setIsr(new int[]{2, 3, 4}).setLeader(2).setLeaderRecoveryState(LeaderRecoveryState.RECOVERED).setLeaderEpoch(234).setPartitionEpoch(567).build(); assertEquals(new LeaderAndIsrPartitionState(). setTopicName("foo"). setPartitionIndex(1). setControllerEpoch(-1). setLeader(1). setLeaderEpoch(123). setIsr(Arrays.asList(1, 2)). setPartitionEpoch(456). setReplicas(Arrays.asList(1, 2, 3)). setAddingReplicas(Collections.emptyList()). setRemovingReplicas(Collections.emptyList()). setIsNew(true).toString(), a.toLeaderAndIsrPartitionState(new TopicPartition("foo", 1), true).toString()); assertEquals(new LeaderAndIsrPartitionState(). setTopicName("bar"). setPartitionIndex(0). setControllerEpoch(-1). setLeader(2). setLeaderEpoch(234). setIsr(Arrays.asList(2, 3, 4)). setPartitionEpoch(567). setReplicas(Arrays.asList(2, 3, 4)). setAddingReplicas(Collections.emptyList()). setRemovingReplicas(Collections.emptyList()). setIsNew(false).toString(), b.toLeaderAndIsrPartitionState(new TopicPartition("bar", 0), false).toString()); }
List<MappingField> resolveFields( @Nonnull String[] externalName, @Nullable String dataConnectionName, @Nonnull Map<String, String> options, @Nonnull List<MappingField> userFields, boolean stream ) { Predicate<MappingField> pkColumnName = Options.getPkColumnChecker(options, stream); Map<String, DocumentField> dbFields = readFields(externalName, dataConnectionName, options, stream); List<MappingField> resolvedFields = new ArrayList<>(); if (userFields.isEmpty()) { for (DocumentField documentField : dbFields.values()) { MappingField mappingField = new MappingField( documentField.columnName, resolveType(documentField.columnType), documentField.columnName, documentField.columnType.name() ); mappingField.setPrimaryKey(pkColumnName.test(mappingField)); resolvedFields.add(mappingField); } } else { for (MappingField f : userFields) { String prefixIfStream = stream ? "fullDocument." : ""; String nameInMongo = f.externalName() == null ? prefixIfStream + f.name() : f.externalName(); DocumentField documentField = getField(dbFields, f, stream); if (documentField == null) { throw new IllegalArgumentException("Could not resolve field with name " + nameInMongo); } MappingField mappingField = new MappingField(f.name(), f.type(), documentField.columnName, documentField.columnType.name()); mappingField.setPrimaryKey(pkColumnName.test(mappingField)); validateType(f, documentField); resolvedFields.add(mappingField); } } return resolvedFields; }
@Test public void testResolvesMappingFieldsViaSample() { try (MongoClient client = MongoClients.create(mongoContainer.getConnectionString())) { String databaseName = "testDatabase"; String collectionName = "people_3"; MongoDatabase testDatabase = client.getDatabase(databaseName); MongoCollection<Document> collection = testDatabase.getCollection(collectionName); collection.insertOne(new Document("firstName", "Tomasz") .append("lastName", "Gawęda") .append("birthYear", 1992)); FieldResolver resolver = new FieldResolver(null); Map<String, String> readOpts = new HashMap<>(); readOpts.put("connectionString", mongoContainer.getConnectionString()); List<MappingField> fields = resolver.resolveFields(new String[]{databaseName, collectionName}, null, readOpts, emptyList(), false); assertThat(fields).contains( fieldWithSameExternal("_id", OBJECT, BsonType.OBJECT_ID).setPrimaryKey(true), fieldWithSameExternal("firstName", VARCHAR, BsonType.STRING), fieldWithSameExternal("lastName", VARCHAR, BsonType.STRING), fieldWithSameExternal("birthYear", INT, BsonType.INT32) ); } }
@Override public void validateRoleList(Collection<Long> ids) { if (CollUtil.isEmpty(ids)) { return; } // 获得角色信息 List<RoleDO> roles = roleMapper.selectBatchIds(ids); Map<Long, RoleDO> roleMap = convertMap(roles, RoleDO::getId); // 校验 ids.forEach(id -> { RoleDO role = roleMap.get(id); if (role == null) { throw exception(ROLE_NOT_EXISTS); } if (!CommonStatusEnum.ENABLE.getStatus().equals(role.getStatus())) { throw exception(ROLE_IS_DISABLE, role.getName()); } }); }
@Test public void testValidateRoleList_notFound() { // 准备参数 List<Long> ids = singletonList(randomLongId()); // 调用, 并断言异常 assertServiceException(() -> roleService.validateRoleList(ids), ROLE_NOT_EXISTS); }
@Override public InterpreterResult interpret(String st, InterpreterContext context) { String[] lines = splitAndRemoveEmpty(st, "\n"); return interpret(lines, context); }
@Test void countTest() throws IOException { FileSystemTestUtils.createByteFile(fs, "/testRoot/testFileA", WritePType.MUST_CACHE, 10); FileSystemTestUtils.createByteFile(fs, "/testRoot/testDir/testFileB", WritePType.MUST_CACHE, 20); FileSystemTestUtils.createByteFile(fs, "/testRoot/testFileB", WritePType.MUST_CACHE, 30); InterpreterResult output = alluxioInterpreter.interpret("count /testRoot", null); String expected = ""; expected += String.format(COUNT_FORMAT, "File Count", "Folder Count", "Folder Size"); expected += String.format(COUNT_FORMAT, 3, 1, 60); expected += "\n"; assertEquals(expected, output.message().get(0).getData()); InterpreterResult output2 = alluxioInterpreter.interpret("count -h /testRoot", null); String expected2 = ""; expected2 += String.format(COUNT_FORMAT, "File Count", "Folder Count", "Folder Size"); expected2 += String.format(COUNT_FORMAT, 3, 1, "60B"); expected2 += "\n"; assertEquals(expected2, output2.message().get(0).getData()); }
@VisibleForTesting void checkFilesAndSeedApps(FileSystem fs, Path remoteRootLogDir, String suffix, Path workingDir) throws IOException { for (RemoteIterator<FileStatus> userIt = fs.listStatusIterator(remoteRootLogDir); userIt.hasNext();) { Path userLogPath = userIt.next().getPath(); try { for (RemoteIterator<FileStatus> appIt = fs.listStatusIterator(new Path(userLogPath, suffix)); appIt.hasNext();) { Path appLogPath = appIt.next().getPath(); try { FileStatus[] files = fs.listStatus(appLogPath); if (files.length >= minNumLogFiles) { boolean eligible = true; long totalFileSize = 0L; for (FileStatus file : files) { if (file.getPath().getName().equals(appLogPath.getName() + ".har")) { eligible = false; if (verbose) { LOG.info("Skipping " + appLogPath.getName() + " due to existing .har file"); } break; } totalFileSize += file.getLen(); if (totalFileSize > maxTotalLogsSize) { eligible = false; if (verbose) { LOG.info("Skipping " + appLogPath.getName() + " due to " + "total file size being too large (" + totalFileSize + " > " + maxTotalLogsSize + ")"); } break; } } if (eligible) { if (verbose) { LOG.info("Adding " + appLogPath.getName() + " for user " + userLogPath.getName()); } AppInfo context = new AppInfo(); context.setAppId(appLogPath.getName()); context.setUser(userLogPath.getName()); context.setSuffix(suffix); context.setRemoteRootLogDir(remoteRootLogDir); context.setWorkingDir(workingDir); eligibleApplications.add(context); } } else { if (verbose) { LOG.info("Skipping " + appLogPath.getName() + " due to not " + "having enough log files (" + files.length + " < " + minNumLogFiles + ")"); } } } catch (IOException ioe) { // Ignore any apps we can't read if (verbose) { LOG.info("Skipping logs under " + appLogPath + " due to " + ioe.getMessage()); } } } } catch (IOException ioe) { // Ignore any apps we can't read if (verbose) { LOG.info("Skipping all logs under " + userLogPath + " due to " + ioe.getMessage()); } } } }
@Test(timeout = 10000) public void testCheckFilesAndSeedApps() throws Exception { Configuration conf = new Configuration(); HadoopArchiveLogs hal = new HadoopArchiveLogs(conf); FileSystem fs = FileSystem.getLocal(conf); Path rootLogDir = new Path("target", "logs"); String suffix = "logs"; Path logDir = new Path(rootLogDir, new Path(USER, suffix)); fs.delete(logDir, true); Assert.assertFalse(fs.exists(logDir)); fs.mkdirs(logDir); // no files found ApplicationId appId1 = ApplicationId.newInstance(CLUSTER_TIMESTAMP, 1); Path app1Path = new Path(logDir, appId1.toString()); fs.mkdirs(app1Path); // too few files ApplicationId appId2 = ApplicationId.newInstance(CLUSTER_TIMESTAMP, 2); Path app2Path = new Path(logDir, appId2.toString()); fs.mkdirs(app2Path); createFile(fs, new Path(app2Path, "file1"), 1); hal.minNumLogFiles = 2; // too large ApplicationId appId3 = ApplicationId.newInstance(CLUSTER_TIMESTAMP, 3); Path app3Path = new Path(logDir, appId3.toString()); fs.mkdirs(app3Path); createFile(fs, new Path(app3Path, "file1"), 2); createFile(fs, new Path(app3Path, "file2"), 5); hal.maxTotalLogsSize = FILE_SIZE_INCREMENT * 6; // has har already ApplicationId appId4 = ApplicationId.newInstance(CLUSTER_TIMESTAMP, 4); Path app4Path = new Path(logDir, appId4.toString()); fs.mkdirs(app4Path); createFile(fs, new Path(app4Path, appId4 + ".har"), 1); // just right ApplicationId appId5 = ApplicationId.newInstance(CLUSTER_TIMESTAMP, 5); Path app5Path = new Path(logDir, appId5.toString()); fs.mkdirs(app5Path); createFile(fs, new Path(app5Path, "file1"), 2); createFile(fs, new Path(app5Path, "file2"), 3); Assert.assertEquals(0, hal.eligibleApplications.size()); hal.checkFilesAndSeedApps(fs, rootLogDir, suffix, new Path(rootLogDir, "archive-logs-work")); Assert.assertEquals(1, hal.eligibleApplications.size()); Assert.assertEquals(appId5.toString(), hal.eligibleApplications.iterator().next().getAppId()); }
public static <K, V> void putAllWithoutChecking(CheckedMap<K, V> dest, Map<K, V> src) { dest.putAllWithAssertedChecking(src); }
@Test(expectedExceptions = AssertionError.class) public void testPutAllCycleWithAssertChecking() { final DataMap map = new DataMap(); final Map<String, Object> cycleMap = new HashMap<>(); cycleMap.put("cycle", map); CheckedUtil.putAllWithoutChecking(map, cycleMap); }
@Nullable @Override public String getMainClassFromJarPlugin() { Plugin mavenJarPlugin = project.getPlugin("org.apache.maven.plugins:maven-jar-plugin"); if (mavenJarPlugin != null) { return getChildValue( (Xpp3Dom) mavenJarPlugin.getConfiguration(), "archive", "manifest", "mainClass") .orElse(null); } return null; }
@Test public void testGetMainClassFromJar_missingManifest() { when(mockMavenProject.getPlugin("org.apache.maven.plugins:maven-jar-plugin")) .thenReturn(mockPlugin); when(mockPlugin.getConfiguration()).thenReturn(pluginConfiguration); pluginConfiguration.addChild(new Xpp3Dom("archive")); assertThat(mavenProjectProperties.getMainClassFromJarPlugin()).isNull(); }
@Override public Set<Long> calculateUsers(DelegateExecution execution, String param) { Object result = FlowableUtils.getExpressionValue(execution, param); return Convert.toSet(Long.class, result); }
@Test public void testCalculateUsers() { try (MockedStatic<FlowableUtils> flowableUtilMockedStatic = mockStatic(FlowableUtils.class)) { // 准备参数 String param = "1,2"; DelegateExecution execution = mock(DelegateExecution.class); // mock 方法 flowableUtilMockedStatic.when(() -> FlowableUtils.getExpressionValue(same(execution), eq(param))) .thenReturn(asSet(1L, 2L)); // 调用 Set<Long> results = strategy.calculateUsers(execution, param); // 断言 assertEquals(asSet(1L, 2L), results); } }
@Override public void destroy() { if (this.mqttClient != null) { this.mqttClient.disconnect(); } }
@Test public void givenMqttClientIsNull_whenDestroy_thenShouldHaveNoInteractions() { ReflectionTestUtils.setField(mqttNode, "mqttClient", null); mqttNode.destroy(); then(mqttClientMock).shouldHaveNoInteractions(); }
public static List<ComponentDto> sortComponents(List<ComponentDto> components, ComponentTreeRequest wsRequest, List<MetricDto> metrics, Table<String, MetricDto, ComponentTreeData.Measure> measuresByComponentUuidAndMetric) { List<String> sortParameters = wsRequest.getSort(); if (sortParameters == null || sortParameters.isEmpty()) { return components; } boolean isAscending = wsRequest.getAsc(); Map<String, Ordering<ComponentDto>> orderingsBySortField = ImmutableMap.<String, Ordering<ComponentDto>>builder() .put(NAME_SORT, componentNameOrdering(isAscending)) .put(QUALIFIER_SORT, componentQualifierOrdering(isAscending)) .put(PATH_SORT, componentPathOrdering(isAscending)) .put(METRIC_SORT, metricValueOrdering(wsRequest, metrics, measuresByComponentUuidAndMetric)) .put(METRIC_PERIOD_SORT, metricPeriodOrdering(wsRequest, metrics, measuresByComponentUuidAndMetric)) .build(); String firstSortParameter = sortParameters.get(0); Ordering<ComponentDto> primaryOrdering = orderingsBySortField.get(firstSortParameter); if (sortParameters.size() > 1) { for (int i = 1; i < sortParameters.size(); i++) { String secondarySortParameter = sortParameters.get(i); Ordering<ComponentDto> secondaryOrdering = orderingsBySortField.get(secondarySortParameter); primaryOrdering = primaryOrdering.compound(secondaryOrdering); } } primaryOrdering = primaryOrdering.compound(componentNameOrdering(true)); return primaryOrdering.immutableSortedCopy(components); }
@Test void sortComponent_whenMetricIsImpactDataType_shouldOrderByTotalDescending() { components.add(newComponentWithoutSnapshotId("name-without-measure", "qualifier-without-measure", "path-without-measure")); ComponentTreeRequest wsRequest = newRequest(singletonList(METRIC_SORT), false, DATA_IMPACT_METRIC_KEY); List<ComponentDto> result = sortComponents(wsRequest); assertThat(result).extracting("path") .containsExactly("path-9", "path-8", "path-7", "path-6", "path-5", "path-4", "path-3", "path-2", "path-1", "path-without-measure"); }
public static DataMap getAnnotationsMap(Annotation[] as) { return annotationsToData(as, true); }
@Test(description = "Unsafe call: RestSpecAnnotation annotation with char array member", expectedExceptions = NullPointerException.class) public void failsOnRestSpecAnnotationCharArrayMember() { @UnsupportedCharArray class LocalClass { } final Annotation[] annotations = LocalClass.class.getAnnotations(); ResourceModelAnnotation.getAnnotationsMap(annotations); Assert.fail("Should fail throwing a NullPointerException"); }
public static boolean parse(final String str, ResTable_config out) { return parse(str, out, true); }
@Test public void parse_keyboard_nokeys() { ResTable_config config = new ResTable_config(); ConfigDescription.parse("nokeys", config); assertThat(config.keyboard).isEqualTo(KEYBOARD_NOKEYS); }
public ProxyContext(HttpRequest request, boolean ssl) { this(getHostColonPortFromHeader(request), ssl); }
@Test void testProxyContext() { test("http://localhost:8080", false, "localhost", 8080); test("http://localhost:8080/foo", false, "localhost", 8080); test("localhost:8080", false, "localhost", 8080); test("localhost:8080/foo", false, "localhost", 8080); test("localhost", false, "localhost", 80); test("localhost/foo", false, "localhost", 80); test("http://localhost", false, "localhost", 80); test("http://localhost/foo", false, "localhost", 80); test("httpbin.org:443", false, "httpbin.org", 443); test("httpbin.org:443", true, "httpbin.org", 443); test("httpbin.org:443/foo", true, "httpbin.org", 443); test("httpbin.org", true, "httpbin.org", 443); test("httpbin.org/foo", true, "httpbin.org", 443); }
public int sum(int... nums) { LOGGER.info("Arithmetic sum {}", VERSION); return newSource.accumulateSum(nums); }
@Test void testSum() { assertEquals(0, arithmetic.sum(-1, 0, 1)); }
public static byte[] compress(byte[] bytes) { if (bytes == null) { throw new NullPointerException("bytes is null"); } ByteArrayOutputStream bos = new ByteArrayOutputStream(); try (CBZip2OutputStream bzip2 = new CBZip2OutputStream(bos)) { bzip2.write(bytes); bzip2.finish(); return bos.toByteArray(); } catch (IOException e) { throw new RuntimeException("BZip2 compress error", e); } }
@Test public void test_compress() { Assertions.assertThrows(NullPointerException.class, () -> { BZip2Util.compress(null); }); }
public String toString() { StringBuilder sb = new StringBuilder(); sb.append("actual decoding window:=") .append(getWindow()) .append(System.getProperty("line.separator")); sb.append("window size (32Kb through 2Mb):=") .append(getWindowSize()) .append(System.getProperty("line.separator")); sb.append("current offset within the window:=") .append(getWindowPosition()) .append(System.getProperty("line.separator")); sb.append("number of main tree elements:=") .append(getMainTreeElements()) .append(System.getProperty("line.separator")); sb.append("have we started decoding at all yet?:=") .append(getHadStarted()) .append(System.getProperty("line.separator")); sb.append("type of this block:=") .append(getBlockType()) .append(System.getProperty("line.separator")); sb.append("uncompressed length of this block:=") .append(getBlockLength()) .append(System.getProperty("line.separator")); sb.append("uncompressed bytes still left to decode:=") .append(getBlockRemaining()) .append(System.getProperty("line.separator")); sb.append("the number of CFDATA blocks processed:=") .append(getFramesRead()) .append(System.getProperty("line.separator")); sb.append("magic header value used for transform:=") .append(getIntelFileSize()) .append(System.getProperty("line.separator")); sb.append("current offset in transform space:=") .append(getIntelCurrentPossition()) .append(System.getProperty("line.separator")); sb.append("have we seen any translatable data yet?:=") .append(getIntelState()) .append(System.getProperty("line.separator")); sb.append("R0 for the LRU offset system:=") .append(getR0()) .append(System.getProperty("line.separator")); sb.append("R1 for the LRU offset system:=") .append(getR1()) .append(System.getProperty("line.separator")); sb.append("R2 for the LRU offset system:=") .append(getR2()) .append(System.getProperty("line.separator")); sb.append("main tree length:=") .append(getMainTreeLengtsTable().length) .append(System.getProperty("line.separator")); sb.append("secondary tree length:=") .append(getLengthTreeLengtsTable().length) .append(System.getProperty("line.separator")); return sb.toString(); }
@Test public void testToString() throws TikaException { if (chmLzxState == null) { testChmLzxStateConstructor(); } assertTrue(chmLzxState.toString().length() > 20); }
static void configureEncryption( S3FileIOProperties s3FileIOProperties, PutObjectRequest.Builder requestBuilder) { configureEncryption( s3FileIOProperties, requestBuilder::serverSideEncryption, requestBuilder::ssekmsKeyId, requestBuilder::sseCustomerAlgorithm, requestBuilder::sseCustomerKey, requestBuilder::sseCustomerKeyMD5); }
@Test public void testConfigureServerSideS3Encryption() { S3FileIOProperties s3FileIOProperties = new S3FileIOProperties(); s3FileIOProperties.setSseType(S3FileIOProperties.SSE_TYPE_S3); S3RequestUtil.configureEncryption( s3FileIOProperties, this::setServerSideEncryption, this::setKmsKeyId, this::setCustomAlgorithm, this::setCustomKey, this::setCustomMd5); assertThat(serverSideEncryption).isEqualTo(ServerSideEncryption.AES256); assertThat(kmsKeyId).isNull(); assertThat(customAlgorithm).isNull(); assertThat(customKey).isNull(); assertThat(customMd5).isNull(); }
public int getPrefixMatchLength(ElementPath p) { if (p == null) { return 0; } int lSize = this.partList.size(); int rSize = p.partList.size(); // no match possible for empty sets if ((lSize == 0) || (rSize == 0)) { return 0; } int minLen = (lSize <= rSize) ? lSize : rSize; int match = 0; for (int i = 0; i < minLen; i++) { String l = this.partList.get(i); String r = p.partList.get(i); if (equalityCheck(l, r)) { match++; } else { break; } } return match; }
@Test public void testPrefixMatch() { { ElementPath p = new ElementPath("/a/b"); ElementSelector ruleElementSelector = new ElementSelector("/x/*"); assertEquals(0, ruleElementSelector.getPrefixMatchLength(p)); } { ElementPath p = new ElementPath("/a"); ElementSelector ruleElementSelector = new ElementSelector("/x/*"); assertEquals(0, ruleElementSelector.getPrefixMatchLength(p)); } { ElementPath p = new ElementPath("/a/b"); ElementSelector ruleElementSelector = new ElementSelector("/a/*"); assertEquals(1, ruleElementSelector.getPrefixMatchLength(p)); } { ElementPath p = new ElementPath("/a/b"); ElementSelector ruleElementSelector = new ElementSelector("/A/*"); assertEquals(1, ruleElementSelector.getPrefixMatchLength(p)); } { ElementPath p = new ElementPath("/A/b"); ElementSelector ruleElementSelector = new ElementSelector("/a/*"); assertEquals(1, ruleElementSelector.getPrefixMatchLength(p)); } { ElementPath p = new ElementPath("/a/b"); ElementSelector ruleElementSelector = new ElementSelector("/a/b/*"); assertEquals(2, ruleElementSelector.getPrefixMatchLength(p)); } { ElementPath p = new ElementPath("/a/b"); ElementSelector ruleElementSelector = new ElementSelector("/*"); assertEquals(0, ruleElementSelector.getPrefixMatchLength(p)); } }
@Override protected int[] getTypeIds() { return new int[] { TYPEID }; }
@Test public void getTypeIds() { assertArrayEquals( new int[]{ ElementTransfer.TYPEID }, elementTransfer.getTypeIds() ); }
public void store() { final URL resourceUrl = getSettingUrl(); Assert.notNull(resourceUrl, "Setting path must be not null !"); store(FileUtil.file(resourceUrl)); }
@Test public void storeTest() { Setting setting = new Setting("test.setting"); setting.set("testKey", "testValue"); setting.store(); }
@PUT @Path("{id}") @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response updatePort(@PathParam("id") String id, InputStream input) { log.trace(String.format(MESSAGE, "UPDATED")); try { ObjectNode jsonTree = readTreeFromStream(mapper(), input); JsonNode specifiedPortId = jsonTree.get("portId"); if (specifiedPortId != null && !specifiedPortId.asText().equals(id)) { throw new IllegalArgumentException(PORT_INVALID); } final K8sPort port = codec(K8sPort.class).decode(jsonTree, this); adminService.updatePort(port); } catch (IOException e) { throw new IllegalArgumentException(e); } return Response.ok().build(); }
@Test public void testUpdatePortWithModifyOperation() { mockAdminService.updatePort(anyObject()); replay(mockAdminService); String location = PATH + "/port-1"; final WebTarget wt = target(); InputStream jsonStream = K8sPortWebResourceTest.class .getResourceAsStream("k8s-port.json"); Response response = wt.path(location) .request(MediaType.APPLICATION_JSON_TYPE) .put(Entity.json(jsonStream)); final int status = response.getStatus(); assertThat(status, is(200)); verify(mockAdminService); }
public static <T> boolean isNullOrEmpty(Collection<T> collection) { if (collection == null) return true; return collection.isEmpty(); }
@Test void isNullOrEmptyIsFalseForNotEmptyArray() { assertThat(isNullOrEmpty(new String[]{"item1", "item2"})).isFalse(); }
public int getInteger(HazelcastProperty property) { return Integer.parseInt(getString(property)); }
@Test public void getInteger() { HazelcastProperty property = new HazelcastProperty("key", 3); int ioThreadCount = defaultProperties.getInteger(property); assertEquals(3, ioThreadCount); }
public boolean evaluate( RowMetaInterface rowMeta, Object[] r ) { // Start of evaluate boolean retval = false; // If we have 0 items in the list, evaluate the current condition // Otherwise, evaluate all sub-conditions // try { if ( isAtomic() ) { if ( function == FUNC_TRUE ) { return !negate; } // Get fieldnrs left value // // Check out the fieldnrs if we don't have them... if ( leftValuename != null && leftValuename.length() > 0 ) { leftFieldnr = rowMeta.indexOfValue( leftValuename ); } // Get fieldnrs right value // if ( rightValuename != null && rightValuename.length() > 0 ) { rightFieldnr = rowMeta.indexOfValue( rightValuename ); } // Get fieldnrs left field ValueMetaInterface fieldMeta = null; Object field = null; if ( leftFieldnr >= 0 ) { fieldMeta = rowMeta.getValueMeta( leftFieldnr ); field = r[ leftFieldnr ]; } else { return false; // no fields to evaluate } // Get fieldnrs right exact ValueMetaInterface fieldMeta2 = rightExact != null ? rightExact.getValueMeta() : null; Object field2 = rightExact != null ? rightExact.getValueData() : null; if ( field2 == null && rightFieldnr >= 0 ) { fieldMeta2 = rowMeta.getValueMeta( rightFieldnr ); field2 = r[ rightFieldnr ]; } // Evaluate switch ( function ) { case FUNC_EQUAL: retval = ( fieldMeta.compare( field, fieldMeta2, field2 ) == 0 ); break; case FUNC_NOT_EQUAL: retval = ( fieldMeta.compare( field, fieldMeta2, field2 ) != 0 ); break; case FUNC_SMALLER: // Added this if/else to accommodate for CUST-270 if ( "Y".equalsIgnoreCase( System.getProperty( Const.KETTLE_FILTER_TREAT_NULLS_AS_NOT_ZERO, "N" ) ) && fieldMeta.isNull( field ) ) { retval = false; } else { retval = ( fieldMeta.compare( field, fieldMeta2, field2 ) < 0 ); } break; case FUNC_SMALLER_EQUAL: // Added this if/else to accommodate for CUST-270 if ( "Y".equalsIgnoreCase( System.getProperty( Const.KETTLE_FILTER_TREAT_NULLS_AS_NOT_ZERO, "N" ) ) && fieldMeta.isNull( field ) ) { retval = false; } else { retval = ( fieldMeta.compare( field, fieldMeta2, field2 ) <= 0 ); } break; case FUNC_LARGER: retval = ( fieldMeta.compare( field, fieldMeta2, field2 ) > 0 ); break; case FUNC_LARGER_EQUAL: retval = ( fieldMeta.compare( field, fieldMeta2, field2 ) >= 0 ); break; case FUNC_REGEXP: if ( fieldMeta.isNull( field ) || field2 == null ) { retval = false; } else { retval = Pattern .matches( fieldMeta2.getCompatibleString( field2 ), fieldMeta.getCompatibleString( field ) ); } break; case FUNC_NULL: retval = ( fieldMeta.isNull( field ) ); break; case FUNC_NOT_NULL: retval = ( !fieldMeta.isNull( field ) ); break; case FUNC_IN_LIST: // performance reason: create the array first or again when it is against a field and not a constant // if ( inList == null || rightFieldnr >= 0 ) { inList = Const.splitString( fieldMeta2.getString( field2 ), ';', true ); for ( int i = 0; i < inList.length; i++ ) { inList[i] = inList[i] == null ? null : inList[i].replace( "\\", "" ); } Arrays.sort( inList ); } String searchString = fieldMeta.getCompatibleString( field ); int inIndex = -1; if ( searchString != null ) { inIndex = Arrays.binarySearch( inList, searchString ); } retval = inIndex >= 0; break; case FUNC_CONTAINS: String fm2CompatibleContains = fieldMeta2.getCompatibleString( field2 ); retval = Optional.ofNullable( fieldMeta.getCompatibleString( field ) ) .filter( s -> s.contains( fm2CompatibleContains ) ).isPresent(); break; case FUNC_STARTS_WITH: String fm2CompatibleStarts = fieldMeta2.getCompatibleString( field2 ); retval = Optional.ofNullable( fieldMeta.getCompatibleString( field ) ) .filter( s -> s.startsWith( fm2CompatibleStarts ) ).isPresent(); break; case FUNC_ENDS_WITH: String string = fieldMeta.getCompatibleString( field ); if ( !Utils.isEmpty( string ) ) { if ( rightString == null && field2 != null ) { rightString = fieldMeta2.getCompatibleString( field2 ); } if ( rightString != null ) { retval = string.endsWith( fieldMeta2.getCompatibleString( field2 ) ); } else { retval = false; } } else { retval = false; } break; case FUNC_LIKE: // Converts to a regular expression // TODO: optimize the patterns and String replacements // if ( fieldMeta.isNull( field ) || field2 == null ) { retval = false; } else { String regex = fieldMeta2.getCompatibleString( field2 ); regex = regex.replace( "%", ".*" ); regex = regex.replace( "?", "." ); retval = Pattern.matches( regex, fieldMeta.getCompatibleString( field ) ); } break; default: break; } // Only NOT makes sense, the rest doesn't, so ignore!!!! // Optionally negate // if ( isNegated() ) { retval = !retval; } } else { // Composite : get first Condition cb0 = list.get( 0 ); retval = cb0.evaluate( rowMeta, r ); // Loop over the conditions listed below. // for ( int i = 1; i < list.size(); i++ ) { // Composite : #i // Get right hand condition Condition cb = list.get( i ); // Evaluate the right hand side of the condition cb.evaluate() within // the switch statement // because the condition may be short-circuited due to the left hand // side (retval) switch ( cb.getOperator() ) { case Condition.OPERATOR_OR: retval = retval || cb.evaluate( rowMeta, r ); break; case Condition.OPERATOR_AND: retval = retval && cb.evaluate( rowMeta, r ); break; case Condition.OPERATOR_OR_NOT: retval = retval || ( !cb.evaluate( rowMeta, r ) ); break; case Condition.OPERATOR_AND_NOT: retval = retval && ( !cb.evaluate( rowMeta, r ) ); break; case Condition.OPERATOR_XOR: retval = retval ^ cb.evaluate( rowMeta, r ); break; default: break; } } // Composite: optionally negate if ( isNegated() ) { retval = !retval; } } } catch ( Exception e ) { throw new RuntimeException( "Unexpected error evaluation condition [" + toString() + "]", e ); } return retval; }
@Test public void testPdi13227() throws Exception { RowMetaInterface rowMeta1 = new RowMeta(); rowMeta1.addValueMeta( new ValueMetaNumber( "name1" ) ); rowMeta1.addValueMeta( new ValueMetaNumber( "name2" ) ); rowMeta1.addValueMeta( new ValueMetaNumber( "name3" ) ); RowMetaInterface rowMeta2 = new RowMeta(); rowMeta2.addValueMeta( new ValueMetaNumber( "name2" ) ); rowMeta2.addValueMeta( new ValueMetaNumber( "name1" ) ); rowMeta2.addValueMeta( new ValueMetaNumber( "name3" ) ); String left = "name1"; String right = "name3"; Condition condition = new Condition( left, Condition.FUNC_EQUAL, right, null ); assertTrue( condition.evaluate( rowMeta1, new Object[] { 1.0, 2.0, 1.0} ) ); assertTrue( condition.evaluate( rowMeta2, new Object[] { 2.0, 1.0, 1.0} ) ); }
public static int checkPositiveOrZero(int i, String name) { if (i < INT_ZERO) { throw new IllegalArgumentException(name + " : " + i + " (expected: >= 0)"); } return i; }
@Test public void testCheckPositiveOrZeroLongString() { Exception actualEx = null; try { ObjectUtil.checkPositiveOrZero(POS_ONE_LONG, NUM_POS_NAME); } catch (Exception e) { actualEx = e; } assertNull(actualEx, TEST_RESULT_NULLEX_NOK); actualEx = null; try { ObjectUtil.checkPositiveOrZero(ZERO_LONG, NUM_ZERO_NAME); } catch (Exception e) { actualEx = e; } assertNull(actualEx, TEST_RESULT_NULLEX_NOK); actualEx = null; try { ObjectUtil.checkPositiveOrZero(NEG_ONE_LONG, NUM_NEG_NAME); } catch (Exception e) { actualEx = e; } assertNotNull(actualEx, TEST_RESULT_NULLEX_OK); assertTrue(actualEx instanceof IllegalArgumentException, TEST_RESULT_EXTYPE_NOK); }
@Override public V compute(K key, BiFunction<K, V, V> recomputeFunction) { checkState(!destroyed, destroyedMessage); checkNotNull(key, ERROR_NULL_KEY); checkNotNull(recomputeFunction, "Recompute function cannot be null"); AtomicBoolean updated = new AtomicBoolean(false); AtomicReference<MapValue<V>> previousValue = new AtomicReference<>(); MapValue<V> computedValue = items.compute(serializer.copy(key), (k, mv) -> { previousValue.set(mv); V newRawValue = recomputeFunction.apply(key, mv == null ? null : mv.get()); if (mv != null && Objects.equals(newRawValue, mv.get())) { // value was not updated return mv; } MapValue<V> newValue = new MapValue<>(newRawValue, timestampProvider.apply(key, newRawValue)); if (mv == null || newValue.isNewerThan(mv)) { updated.set(true); // We return a copy to ensure updates to peers can be serialized. // This prevents replica divergence due to serialization failures. return serializer.copy(newValue); } else { return mv; } }); if (updated.get()) { notifyPeers(new UpdateEntry<>(key, computedValue), peerUpdateFunction.apply(key, computedValue.get())); EventuallyConsistentMapEvent.Type updateType = computedValue.isTombstone() ? REMOVE : PUT; V value = computedValue.isTombstone() ? previousValue.get() == null ? null : previousValue.get().get() : computedValue.get(); if (value != null) { notifyListeners(new EventuallyConsistentMapEvent<>(mapName, updateType, key, value)); } } return computedValue.get(); }
@Test public void testCompute() throws Exception { // Set up expectations of external events to be sent to listeners during // the test. These don't use timestamps so we can set them all up at once. EventuallyConsistentMapListener<String, String> listener = getListener(); listener.event(new EventuallyConsistentMapEvent<>( MAP_NAME, EventuallyConsistentMapEvent.Type.PUT, KEY1, VALUE1)); listener.event(new EventuallyConsistentMapEvent<>( MAP_NAME, EventuallyConsistentMapEvent.Type.REMOVE, KEY1, VALUE1)); listener.event(new EventuallyConsistentMapEvent<>( MAP_NAME, EventuallyConsistentMapEvent.Type.PUT, KEY2, VALUE2)); replay(listener); ecMap.addListener(listener); // Put in an initial value expectPeerMessage(clusterCommunicator); ecMap.compute(KEY1, (k, v) -> VALUE1); assertEquals(VALUE1, ecMap.get(KEY1)); // Remove the value and check the correct internal cluster messages // are sent expectSpecificMulticastMessage(generateRemoveMessage(KEY1, clockService.peekAtNextTimestamp()), UPDATE_MESSAGE_SUBJECT, clusterCommunicator); ecMap.compute(KEY1, (k, v) -> null); assertNull(ecMap.get(KEY1)); verify(clusterCommunicator); // Remove the same value again. Even though the value is no longer in // the map, we expect that the tombstone is updated and another remove // event is sent to the cluster and external listeners. expectSpecificMulticastMessage(generateRemoveMessage(KEY1, clockService.peekAtNextTimestamp()), UPDATE_MESSAGE_SUBJECT, clusterCommunicator); ecMap.compute(KEY1, (k, v) -> null); assertNull(ecMap.get(KEY1)); verify(clusterCommunicator); // Put in a new value for us to try and remove expectPeerMessage(clusterCommunicator); ecMap.compute(KEY2, (k, v) -> VALUE2); clockService.turnBackTime(); // Remove should have no effect, since it has an older timestamp than // the put. Expect no notifications to be sent out reset(clusterCommunicator); replay(clusterCommunicator); ecMap.compute(KEY2, (k, v) -> null); verify(clusterCommunicator); // Check that our listener received the correct events during the test verify(listener); }
public void addProperty(String key, String value) { store.put(key, value); }
@Test void testIllegalType() { memConfig.addProperty("it", "aaa"); Assertions.assertThrows(IllegalStateException.class, () -> memConfig.getInteger("it", 1)); Assertions.assertThrows(IllegalStateException.class, () -> memConfig.getInt("it", 1)); Assertions.assertThrows(IllegalStateException.class, () -> memConfig.getInt("it")); }
@PublicAPI(usage = ACCESS) public JavaClasses importUrls(Collection<URL> urls) { return importLocations(Locations.of(urls)); }
@Test public void imports_urls_of_jars() { Set<URL> urls = newHashSet(urlOf(Test.class), urlOf(RunWith.class)); assumeTrue("We can't completely ensure that this will always be taken from a JAR file, though it's very likely", "jar".equals(urls.iterator().next().getProtocol())); JavaClasses classes = new ClassFileImporter().importUrls(urls) .that(DescribedPredicate.not(type(Annotation.class))); // NOTE @Test and @RunWith implement Annotation.class assertThat(classes).as("Number of classes at the given URLs").hasSize(2); }
public static <T> Iterator<T> checkHasNext(Iterator<T> iterator, String message) throws NoSuchElementException { if (!iterator.hasNext()) { throw new NoSuchElementException(message); } return iterator; }
@Test public void test_hasNextReturnsIterator_whenNonEmptyIteratorGiven() { Iterator<Integer> iterator = Arrays.asList(1, 2).iterator(); assertEquals(iterator, checkHasNext(iterator, "")); }
Number evaluateExpectedValue(final Number input) { KiePMMLLinearNorm[] limitLinearNorms = getLimitExpectedValue(input); return evaluate(input, limitLinearNorms); }
@Test void evaluateExpectedValue() { KiePMMLNormContinuous kiePMMLNormContinuous = getKiePMMLNormContinuous(null, null, null); Number input = 24; Number retrieved = kiePMMLNormContinuous.evaluateExpectedValue(input); Number expected = kiePMMLNormContinuous.linearNorms.get(0).getNorm() + ((input.doubleValue() - kiePMMLNormContinuous.linearNorms.get(0).getOrig()) / (kiePMMLNormContinuous.linearNorms.get(1).getOrig() - kiePMMLNormContinuous.linearNorms.get(0).getOrig())) * (kiePMMLNormContinuous.linearNorms.get(1).getNorm() - kiePMMLNormContinuous.linearNorms.get(0).getNorm()); assertThat(retrieved).isEqualTo(expected); input = 28; expected = kiePMMLNormContinuous.linearNorms.get(0).getNorm() + ((input.doubleValue() - kiePMMLNormContinuous.linearNorms.get(0).getOrig()) / (kiePMMLNormContinuous.linearNorms.get(1).getOrig() - kiePMMLNormContinuous.linearNorms.get(0).getOrig())) * (kiePMMLNormContinuous.linearNorms.get(1).getNorm() - kiePMMLNormContinuous.linearNorms.get(0).getNorm()); retrieved = kiePMMLNormContinuous.evaluateExpectedValue(input); assertThat(retrieved).isEqualTo(expected); input = 30; retrieved = kiePMMLNormContinuous.evaluateExpectedValue(input); expected = kiePMMLNormContinuous.linearNorms.get(1).getNorm() + ((input.doubleValue() - kiePMMLNormContinuous.linearNorms.get(1).getOrig()) / (kiePMMLNormContinuous.linearNorms.get(2).getOrig() - kiePMMLNormContinuous.linearNorms.get(1).getOrig())) * (kiePMMLNormContinuous.linearNorms.get(2).getNorm() - kiePMMLNormContinuous.linearNorms.get(1).getNorm()); assertThat(retrieved).isEqualTo(expected); input = 31; retrieved = kiePMMLNormContinuous.evaluateExpectedValue(input); expected = kiePMMLNormContinuous.linearNorms.get(1).getNorm() + ((input.doubleValue() - kiePMMLNormContinuous.linearNorms.get(1).getOrig()) / (kiePMMLNormContinuous.linearNorms.get(2).getOrig() - kiePMMLNormContinuous.linearNorms.get(1).getOrig())) * (kiePMMLNormContinuous.linearNorms.get(2).getNorm() - kiePMMLNormContinuous.linearNorms.get(1).getNorm()); assertThat(retrieved).isEqualTo(expected); input = 36; retrieved = kiePMMLNormContinuous.evaluateExpectedValue(input); expected = kiePMMLNormContinuous.linearNorms.get(2).getNorm() + ((input.doubleValue() - kiePMMLNormContinuous.linearNorms.get(2).getOrig()) / (kiePMMLNormContinuous.linearNorms.get(3).getOrig() - kiePMMLNormContinuous.linearNorms.get(2).getOrig())) * (kiePMMLNormContinuous.linearNorms.get(3).getNorm() - kiePMMLNormContinuous.linearNorms.get(2).getNorm()); assertThat(retrieved).isEqualTo(expected); input = 37; retrieved = kiePMMLNormContinuous.evaluateExpectedValue(input); expected = kiePMMLNormContinuous.linearNorms.get(2).getNorm() + ((input.doubleValue() - kiePMMLNormContinuous.linearNorms.get(2).getOrig()) / (kiePMMLNormContinuous.linearNorms.get(3).getOrig() - kiePMMLNormContinuous.linearNorms.get(2).getOrig())) * (kiePMMLNormContinuous.linearNorms.get(3).getNorm() - kiePMMLNormContinuous.linearNorms.get(2).getNorm()); assertThat(retrieved).isEqualTo(expected); input = 40; retrieved = kiePMMLNormContinuous.evaluateExpectedValue(input); expected = kiePMMLNormContinuous.linearNorms.get(2).getNorm() + ((input.doubleValue() - kiePMMLNormContinuous.linearNorms.get(2).getOrig()) / (kiePMMLNormContinuous.linearNorms.get(3).getOrig() - kiePMMLNormContinuous.linearNorms.get(2).getOrig())) * (kiePMMLNormContinuous.linearNorms.get(3).getNorm() - kiePMMLNormContinuous.linearNorms.get(2).getNorm()); assertThat(retrieved).isEqualTo(expected); }
@Deprecated public static LegacyAddress fromBase58(@Nullable NetworkParameters params, String base58) throws AddressFormatException, AddressFormatException.WrongNetwork { return (LegacyAddress) AddressParser.getLegacy(params).parseAddress(base58); }
@Test public void errorPaths() { // Check what happens if we try and decode garbage. try { LegacyAddress.fromBase58("this is not a valid address!", TESTNET); fail(); } catch (AddressFormatException.WrongNetwork e) { fail(); } catch (AddressFormatException e) { // Success. } // Check the empty case. try { LegacyAddress.fromBase58("", TESTNET); fail(); } catch (AddressFormatException.WrongNetwork e) { fail(); } catch (AddressFormatException e) { // Success. } // Check the case of a mismatched network. try { LegacyAddress.fromBase58("17kzeh4N8g49GFvdDzSf8PjaPfyoD1MndL", TESTNET); fail(); } catch (AddressFormatException.WrongNetwork e) { // Success. } catch (AddressFormatException e) { fail(); } }
public static <T> void forEach(Iterable<T> iterable, Procedure<? super T> procedure) { FJIterate.forEach(iterable, procedure, FJIterate.FORK_JOIN_POOL); }
@Test public void testForEachImmutable() { IntegerSum sum1 = new IntegerSum(0); ImmutableList<Integer> list1 = Lists.immutable.ofAll(FJIterateTest.createIntegerList(16)); FJIterate.forEach(list1, new SumProcedure(sum1), new SumCombiner(sum1), 1, list1.size() / 2); assertEquals(16, sum1.getSum()); IntegerSum sum2 = new IntegerSum(0); ImmutableList<Integer> list2 = Lists.immutable.ofAll(FJIterateTest.createIntegerList(7)); FJIterate.forEach(list2, new SumProcedure(sum2), new SumCombiner(sum2)); assertEquals(7, sum2.getSum()); IntegerSum sum3 = new IntegerSum(0); ImmutableList<Integer> list3 = Lists.immutable.ofAll(FJIterateTest.createIntegerList(15)); FJIterate.forEach(list3, new SumProcedure(sum3), new SumCombiner(sum3), 1, list3.size() / 2); assertEquals(15, sum3.getSum()); IntegerSum sum4 = new IntegerSum(0); ImmutableList<Integer> list4 = Lists.immutable.ofAll(FJIterateTest.createIntegerList(35)); FJIterate.forEach(list4, new SumProcedure(sum4), new SumCombiner(sum4)); assertEquals(35, sum4.getSum()); IntegerSum sum5 = new IntegerSum(0); ImmutableList<Integer> list5 = FastList.newList(list4).toImmutable(); FJIterate.forEach(list5, new SumProcedure(sum5), new SumCombiner(sum5)); assertEquals(35, sum5.getSum()); IntegerSum sum6 = new IntegerSum(0); ImmutableList<Integer> list6 = Lists.immutable.ofAll(FJIterateTest.createIntegerList(40)); FJIterate.forEach(list6, new SumProcedure(sum6), new SumCombiner(sum6), 1, list6.size() / 2); assertEquals(40, sum6.getSum()); IntegerSum sum7 = new IntegerSum(0); ImmutableList<Integer> list7 = FastList.newList(list6).toImmutable(); FJIterate.forEach(list7, new SumProcedure(sum7), new SumCombiner(sum7), 1, list6.size() / 2); assertEquals(40, sum7.getSum()); }
public static Labels fromString(String stringLabels) throws IllegalArgumentException { Map<String, String> labels = new HashMap<>(); try { if (stringLabels != null && !stringLabels.isEmpty()) { String[] labelsArray = stringLabels.split(","); for (String label : labelsArray) { String[] fields = label.split("="); labels.put(fields[0].trim(), fields[1].trim()); } } } catch (Exception e) { throw new IllegalArgumentException("Failed to parse labels from string " + stringLabels, e); } return new Labels(labels); }
@Test public void testParseInvalidLabels2() { assertThrows(IllegalArgumentException.class, () -> { String invalidLabels = "key1=value1,key2="; Labels.fromString(invalidLabels); }); }
@Override public List<ApolloAuditLogDataInfluenceDTO> queryDataInfluencesByField(String entityName, String entityId, String fieldName, int page, int size) { return ApolloAuditUtil.dataInfluenceListToDTOList(dataInfluenceService.findByEntityNameAndEntityIdAndFieldName(entityName, entityId, fieldName, page, size)); }
@Test public void testQueryDataInfluencesByField() { final String entityName = "App"; final String entityId = "1"; final String fieldName = "xxx"; { List<ApolloAuditLogDataInfluence> dataInfluenceList = MockBeanFactory.mockDataInfluenceListByLength(size); Mockito.when(dataInfluenceService.findByEntityNameAndEntityIdAndFieldName(Mockito.eq(entityName), Mockito.eq(entityId), Mockito.eq(fieldName), Mockito.eq(page), Mockito.eq(size))) .thenReturn(dataInfluenceList); } List<ApolloAuditLogDataInfluenceDTO> dtoList = api.queryDataInfluencesByField(entityName, entityId, fieldName, page, size); Mockito.verify(dataInfluenceService, Mockito.times(1)) .findByEntityNameAndEntityIdAndFieldName(Mockito.eq(entityName), Mockito.eq(entityId), Mockito.eq(fieldName), Mockito.eq(page), Mockito.eq(size)); assertEquals(size, dtoList.size()); }
public ShareFetchContext newContext(String groupId, Map<TopicIdPartition, ShareFetchRequest.SharePartitionData> shareFetchData, List<TopicIdPartition> toForget, ShareRequestMetadata reqMetadata, Boolean isAcknowledgeDataPresent) { ShareFetchContext context; // TopicPartition with maxBytes as 0 should not be added in the cachedPartitions Map<TopicIdPartition, ShareFetchRequest.SharePartitionData> shareFetchDataWithMaxBytes = new HashMap<>(); shareFetchData.forEach((tp, sharePartitionData) -> { if (sharePartitionData.maxBytes > 0) shareFetchDataWithMaxBytes.put(tp, sharePartitionData); }); // If the request's epoch is FINAL_EPOCH or INITIAL_EPOCH, we should remove the existing sessions. Also, start a // new session in case it is INITIAL_EPOCH. Hence, we need to treat them as special cases. if (reqMetadata.isFull()) { ShareSessionKey key = shareSessionKey(groupId, reqMetadata.memberId()); if (reqMetadata.epoch() == ShareRequestMetadata.FINAL_EPOCH) { // If the epoch is FINAL_EPOCH, don't try to create a new session. if (!shareFetchDataWithMaxBytes.isEmpty()) { throw Errors.INVALID_REQUEST.exception(); } if (cache.remove(key) == null) { log.error("Share session error for {}: no such share session found", key); throw Errors.SHARE_SESSION_NOT_FOUND.exception(); } else { log.debug("Removed share session with key " + key); } context = new FinalContext(); } else { if (isAcknowledgeDataPresent) { log.error("Acknowledge data present in Initial Fetch Request for group {} member {}", groupId, reqMetadata.memberId()); throw Errors.INVALID_REQUEST.exception(); } if (cache.remove(key) != null) { log.debug("Removed share session with key {}", key); } ImplicitLinkedHashCollection<CachedSharePartition> cachedSharePartitions = new ImplicitLinkedHashCollection<>(shareFetchDataWithMaxBytes.size()); shareFetchDataWithMaxBytes.forEach((topicIdPartition, reqData) -> cachedSharePartitions.mustAdd(new CachedSharePartition(topicIdPartition, reqData, false))); ShareSessionKey responseShareSessionKey = cache.maybeCreateSession(groupId, reqMetadata.memberId(), time.milliseconds(), cachedSharePartitions); if (responseShareSessionKey == null) { log.error("Could not create a share session for group {} member {}", groupId, reqMetadata.memberId()); throw Errors.SHARE_SESSION_NOT_FOUND.exception(); } context = new ShareSessionContext(reqMetadata, shareFetchDataWithMaxBytes); log.debug("Created a new ShareSessionContext with key {} isSubsequent {} returning {}. A new share " + "session will be started.", responseShareSessionKey, false, partitionsToLogString(shareFetchDataWithMaxBytes.keySet())); } } else { // We update the already existing share session. synchronized (cache) { ShareSessionKey key = shareSessionKey(groupId, reqMetadata.memberId()); ShareSession shareSession = cache.get(key); if (shareSession == null) { log.error("Share session error for {}: no such share session found", key); throw Errors.SHARE_SESSION_NOT_FOUND.exception(); } if (shareSession.epoch != reqMetadata.epoch()) { log.debug("Share session error for {}: expected epoch {}, but got {} instead", key, shareSession.epoch, reqMetadata.epoch()); throw Errors.INVALID_SHARE_SESSION_EPOCH.exception(); } Map<ShareSession.ModifiedTopicIdPartitionType, List<TopicIdPartition>> modifiedTopicIdPartitions = shareSession.update( shareFetchDataWithMaxBytes, toForget); cache.touch(shareSession, time.milliseconds()); shareSession.epoch = ShareRequestMetadata.nextEpoch(shareSession.epoch); log.debug("Created a new ShareSessionContext for session key {}, epoch {}: " + "added {}, updated {}, removed {}", shareSession.key(), shareSession.epoch, partitionsToLogString(modifiedTopicIdPartitions.get( ShareSession.ModifiedTopicIdPartitionType.ADDED)), partitionsToLogString(modifiedTopicIdPartitions.get(ShareSession.ModifiedTopicIdPartitionType.UPDATED)), partitionsToLogString(modifiedTopicIdPartitions.get(ShareSession.ModifiedTopicIdPartitionType.REMOVED)) ); context = new ShareSessionContext(reqMetadata, shareSession); } } return context; }
@Test public void testSubsequentShareSession() { SharePartitionManager sharePartitionManager = SharePartitionManagerBuilder.builder().build(); Map<Uuid, String> topicNames = new HashMap<>(); Uuid fooId = Uuid.randomUuid(); Uuid barId = Uuid.randomUuid(); topicNames.put(fooId, "foo"); topicNames.put(barId, "bar"); TopicIdPartition tp0 = new TopicIdPartition(fooId, new TopicPartition("foo", 0)); TopicIdPartition tp1 = new TopicIdPartition(fooId, new TopicPartition("foo", 1)); TopicIdPartition tp2 = new TopicIdPartition(barId, new TopicPartition("bar", 0)); // Create a new share session with foo-0 and foo-1 Map<TopicIdPartition, ShareFetchRequest.SharePartitionData> reqData1 = new LinkedHashMap<>(); reqData1.put(tp0, new ShareFetchRequest.SharePartitionData(tp0.topicId(), 100)); reqData1.put(tp1, new ShareFetchRequest.SharePartitionData(tp1.topicId(), 100)); String groupId = "grp"; ShareRequestMetadata reqMetadata1 = new ShareRequestMetadata(Uuid.randomUuid(), ShareRequestMetadata.INITIAL_EPOCH); ShareFetchContext context1 = sharePartitionManager.newContext(groupId, reqData1, EMPTY_PART_LIST, reqMetadata1, false); assertEquals(ShareSessionContext.class, context1.getClass()); LinkedHashMap<TopicIdPartition, ShareFetchResponseData.PartitionData> respData1 = new LinkedHashMap<>(); respData1.put(tp0, new ShareFetchResponseData.PartitionData().setPartitionIndex(tp0.partition())); respData1.put(tp1, new ShareFetchResponseData.PartitionData().setPartitionIndex(tp1.partition())); ShareFetchResponse resp1 = context1.updateAndGenerateResponseData(groupId, reqMetadata1.memberId(), respData1); assertEquals(Errors.NONE, resp1.error()); assertEquals(2, resp1.responseData(topicNames).size()); // Create a subsequent fetch request that removes foo-0 and adds bar-0 Map<TopicIdPartition, ShareFetchRequest.SharePartitionData> reqData2 = Collections.singletonMap( tp2, new ShareFetchRequest.SharePartitionData(tp2.topicId(), 100)); List<TopicIdPartition> removed2 = new ArrayList<>(); removed2.add(tp0); ShareFetchContext context2 = sharePartitionManager.newContext(groupId, reqData2, removed2, new ShareRequestMetadata(reqMetadata1.memberId(), 1), true); assertEquals(ShareSessionContext.class, context2.getClass()); Set<TopicIdPartition> expectedTopicIdPartitions2 = new HashSet<>(); expectedTopicIdPartitions2.add(tp1); expectedTopicIdPartitions2.add(tp2); Set<TopicIdPartition> actualTopicIdPartitions2 = new HashSet<>(); ShareSessionContext shareSessionContext = (ShareSessionContext) context2; shareSessionContext.session().partitionMap().forEach(cachedSharePartition -> { TopicIdPartition topicIdPartition = new TopicIdPartition(cachedSharePartition.topicId(), new TopicPartition(cachedSharePartition.topic(), cachedSharePartition.partition())); actualTopicIdPartitions2.add(topicIdPartition); }); assertEquals(expectedTopicIdPartitions2, actualTopicIdPartitions2); LinkedHashMap<TopicIdPartition, ShareFetchResponseData.PartitionData> respData2 = new LinkedHashMap<>(); respData2.put(tp1, new ShareFetchResponseData.PartitionData().setPartitionIndex(tp1.partition())); respData2.put(tp2, new ShareFetchResponseData.PartitionData().setPartitionIndex(tp2.partition())); ShareFetchResponse resp2 = context2.updateAndGenerateResponseData(groupId, reqMetadata1.memberId(), respData2); assertEquals(Errors.NONE, resp2.error()); assertEquals(1, resp2.data().responses().size()); assertEquals(barId, resp2.data().responses().get(0).topicId()); assertEquals(1, resp2.data().responses().get(0).partitions().size()); assertEquals(0, resp2.data().responses().get(0).partitions().get(0).partitionIndex()); assertEquals(1, resp2.responseData(topicNames).size()); }
protected Collection<URL> getUserClassPaths() { return userClassPaths; }
@Test public void testGetUserClassPath() throws IOException { final File testJobDir = temporaryFolder.newFolder("_test_job"); final Collection<Path> testFiles = FileUtilsTest.prepareTestFiles(testJobDir.toPath()); final Path currentWorkingDirectory = FileUtils.getCurrentWorkingDirectory(); final TestJobGraphRetriever testJobGraphRetriever = new TestJobGraphRetriever(testJobDir); assertThat( testJobGraphRetriever.getUserClassPaths(), containsInAnyOrder( testFiles.stream() .map( file -> FileUtils.relativizePath( currentWorkingDirectory, file)) .map(FunctionUtils.uncheckedFunction(FileUtils::toURL)) .toArray())); }
public Timer timer(String name) { return timer(MetricName.build(name)); }
@Test public void accessingATimerRegistersAndReusesIt() throws Exception { final Timer timer1 = registry.timer(THING); final Timer timer2 = registry.timer(THING); assertThat(timer1) .isSameAs(timer2); verify(listener).onTimerAdded(THING, timer1); }
public static void main(String[] args) { scenario(jenkins, 1); scenario(travis, 0); }
@Test void shouldExecuteWithoutException() { assertDoesNotThrow(() -> App.main(new String[]{})); }
@Override public void execute(final List<String> args, final PrintWriter terminal) { CliCmdUtil.ensureArgCountBounds(args, 0, 1, HELP); if (args.isEmpty()) { final String setting = requestPipeliningSupplier.get() ? "ON" : "OFF"; terminal.printf("Current %s configuration: %s%n", NAME, setting); } else { final String newSetting = args.get(0); switch (newSetting.toUpperCase()) { case "ON": requestPipeliningConsumer.accept(true); break; case "OFF": requestPipeliningConsumer.accept(false); break; default: terminal.printf("Invalid %s setting: %s. ", NAME, newSetting); terminal.println("Valid options are 'ON' and 'OFF'."); return; } terminal.println(NAME + " configuration is now " + newSetting.toUpperCase()); } }
@Test public void shouldUpdateSettingToOn() { // When: requestPipeliningCommand.execute(ImmutableList.of("on"), terminal); // Then: verify(settingConsumer).accept(true); }
public void schedule(ExecutableMethod<?, ?> method) { if (hasParametersOutsideOfJobContext(method.getTargetMethod())) { throw new IllegalStateException("Methods annotated with " + Recurring.class.getName() + " can only have zero parameters or a single parameter of type JobContext."); } String id = getId(method); String cron = getCron(method); String interval = getInterval(method); if (StringUtils.isNullOrEmpty(cron) && StringUtils.isNullOrEmpty(interval)) throw new IllegalArgumentException("Either cron or interval attribute is required."); if (isNotNullOrEmpty(cron) && isNotNullOrEmpty(interval)) throw new IllegalArgumentException("Both cron and interval attribute provided. Only one is allowed."); if (Recurring.RECURRING_JOB_DISABLED.equals(cron) || Recurring.RECURRING_JOB_DISABLED.equals(interval)) { if (id == null) { LOGGER.warn("You are trying to disable a recurring job using placeholders but did not define an id."); } else { jobScheduler.deleteRecurringJob(id); } } else { JobDetails jobDetails = getJobDetails(method); ZoneId zoneId = getZoneId(method); if (isNotNullOrEmpty(cron)) { jobScheduler.scheduleRecurrently(id, jobDetails, CronExpression.create(cron), zoneId); } else { jobScheduler.scheduleRecurrently(id, jobDetails, new Interval(interval), zoneId); } } }
@Test void beansWithMethodsAnnotatedWithDisabledRecurringIntervalAnnotationWillAutomaticallyBeDeleted() { final ExecutableMethod executableMethod = mock(ExecutableMethod.class); final Method method = getRequiredMethod(MyServiceWithRecurringJob.class, "myRecurringMethod"); when(executableMethod.getTargetMethod()).thenReturn(method); when(executableMethod.stringValue(Recurring.class, "id")).thenReturn(Optional.of("my-recurring-job")); when(executableMethod.stringValue(Recurring.class, "cron")).thenReturn(Optional.empty()); when(executableMethod.stringValue(Recurring.class, "interval")).thenReturn(Optional.of("-")); jobRunrRecurringJobScheduler.schedule(executableMethod); verify(jobScheduler).deleteRecurringJob("my-recurring-job"); }
public NetworkClient.InFlightRequest completeLastSent(String node) { NetworkClient.InFlightRequest inFlightRequest = requestQueue(node).pollFirst(); inFlightRequestCount.decrementAndGet(); return inFlightRequest; }
@Test public void testCompleteLastSentThrowsIfNoInFlights() { assertThrows(IllegalStateException.class, () -> inFlightRequests.completeLastSent(dest)); }
@Override public boolean alterOffsets(Map<String, String> connectorConfig, Map<Map<String, ?>, Map<String, ?>> offsets) { for (Map.Entry<Map<String, ?>, Map<String, ?>> offsetEntry : offsets.entrySet()) { Map<String, ?> sourceOffset = offsetEntry.getValue(); if (sourceOffset == null) { // We allow tombstones for anything; if there's garbage in the offsets for the connector, we don't // want to prevent users from being able to clean it up using the REST API continue; } Map<String, ?> sourcePartition = offsetEntry.getKey(); if (sourcePartition == null) { throw new ConnectException("Source partitions may not be null"); } MirrorUtils.validateSourcePartitionString(sourcePartition, SOURCE_CLUSTER_KEY); MirrorUtils.validateSourcePartitionString(sourcePartition, TOPIC_KEY); MirrorUtils.validateSourcePartitionPartition(sourcePartition); MirrorUtils.validateSourceOffset(sourcePartition, sourceOffset, false); } // We never commit offsets with our source consumer, so no additional effort is required beyond just validating // the format of the user-supplied offsets return true; }
@Test public void testAlterOffsetsMultiplePartitions() { MirrorSourceConnector connector = new MirrorSourceConnector(); Map<String, ?> partition1 = sourcePartition("t1", 0, "primary"); Map<String, ?> partition2 = sourcePartition("t1", 1, "primary"); Map<Map<String, ?>, Map<String, ?>> offsets = new HashMap<>(); offsets.put(partition1, MirrorUtils.wrapOffset(50)); offsets.put(partition2, MirrorUtils.wrapOffset(100)); assertTrue(connector.alterOffsets(null, offsets)); }
public static Map<String, Object> map(String metricName, Metric metric) { final Map<String, Object> metricMap = Maps.newHashMap(); metricMap.put("full_name", metricName); metricMap.put("name", metricName.substring(metricName.lastIndexOf(".") + 1)); if (metric instanceof Timer) { metricMap.put("metric", buildTimerMap((Timer) metric)); metricMap.put("type", "timer"); } else if(metric instanceof Meter) { metricMap.put("metric", buildMeterMap((Meter) metric)); metricMap.put("type", "meter"); } else if(metric instanceof Histogram) { metricMap.put("metric", buildHistogramMap((Histogram) metric)); metricMap.put("type", "histogram"); } else if(metric instanceof Counter) { metricMap.put("metric", metric); metricMap.put("type", "counter"); } else if(metric instanceof Gauge) { metricMap.put("metric", metric); metricMap.put("type", "gauge"); } else { throw new IllegalArgumentException("Unknown metric type " + metric.getClass()); } return metricMap; }
@Test public void mapSupportsMeter() { final Meter meter = new Meter(); meter.mark(); final Map<String, Object> map = MetricUtils.map("metric", meter); assertThat(map) .containsEntry("type", "meter") .extracting("metric") .extracting("rate") .extracting("total") .isEqualTo(1L); }