focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public String replaceCredentials(String code) { if (code == null) { return null; } String replaced = code; Matcher matcher = userpattern.matcher(replaced); while (matcher.find()) { String key = matcher.group(1); UsernamePassword usernamePassword = creds.getUsernamePassword(key); if (usernamePassword != null) { String value = usernamePassword.getUsername(); String quotedValue = Matcher.quoteReplacement(value); replaced = matcher.replaceFirst(quotedValue); matcher = userpattern.matcher(replaced); } } matcher = passwordpattern.matcher(replaced); while (matcher.find()) { String key = matcher.group(1); UsernamePassword usernamePassword = creds.getUsernamePassword(key); if (usernamePassword != null) { passwords.add(usernamePassword.getPassword()); String value = usernamePassword.getPassword(); String quotedValue = Matcher.quoteReplacement(value); replaced = matcher.replaceFirst(quotedValue); matcher = passwordpattern.matcher(replaced); } } return replaced; }
@Test void replaceCredentials() { UserCredentials userCredentials = mock(UserCredentials.class); UsernamePassword usernamePassword = new UsernamePassword("username", "pwd"); when(userCredentials.getUsernamePassword("mysql")).thenReturn(usernamePassword); CredentialInjector testee = new CredentialInjector(userCredentials); String actual = testee.replaceCredentials(TEMPLATE); assertEquals(CORRECT_REPLACED, actual); InterpreterResult ret = new InterpreterResult(Code.SUCCESS, ANSWER); InterpreterResult hiddenResult = testee.hidePasswords(ret); assertEquals(1, hiddenResult.message().size()); assertEquals(HIDDEN, hiddenResult.message().get(0).getData()); }
@ApiOperation(value = "Query for variable instances", tags = { "History", "Query" }, notes = "All supported JSON parameter fields allowed are exactly the same as the parameters found for getting a collection of variable instances," + " but passed in as JSON-body arguments rather than URL-parameters to allow for more advanced querying and preventing errors with request-uri’s that are too long.") @ApiResponses(value = { @ApiResponse(code = 200, message = "Indicates request was successful and the tasks are returned"), @ApiResponse(code = 400, message = "Indicates an parameter was passed in the wrong format. The status-message contains additional information.") }) @PostMapping(value = "/query/variable-instances", produces = "application/json") public DataResponse<VariableInstanceResponse> queryVariableInstances(@RequestBody VariableInstanceQueryRequest queryRequest, @ApiParam(hidden = true) @RequestParam Map<String, String> allRequestParams) { return getQueryResponse(queryRequest, allRequestParams); }
@Test @Deployment public void testQueryVariableInstances() throws Exception { HashMap<String, Object> processVariables = new HashMap<>(); processVariables.put("stringVar", "Azerty"); processVariables.put("intVar", 67890); processVariables.put("booleanVar", false); ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("oneTaskProcess", processVariables); Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); taskService.complete(task.getId()); task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); taskService.setVariableLocal(task.getId(), "taskVariable", "test"); ProcessInstance processInstance2 = runtimeService.startProcessInstanceByKey("oneTaskProcess", processVariables); String url = RestUrls.createRelativeResourceUrl(RestUrls.URL_VARIABLE_INSTANCE_QUERY); ObjectNode requestNode = objectMapper.createObjectNode(); requestNode.put("variableName", "stringVar"); assertResultsPresentInDataResponse(url, requestNode, 2, "stringVar", "Azerty"); requestNode = objectMapper.createObjectNode(); requestNode.put("variableName", "booleanVar"); assertResultsPresentInDataResponse(url, requestNode, 2, "booleanVar", false); requestNode = objectMapper.createObjectNode(); requestNode.put("variableName", "booleanVar2"); assertResultsPresentInDataResponse(url, requestNode, 0, null, null); requestNode = objectMapper.createObjectNode(); requestNode.put("processInstanceId", processInstance.getId()); assertResultsPresentInDataResponse(url, requestNode, 4, "taskVariable", "test"); requestNode = objectMapper.createObjectNode(); requestNode.put("processInstanceId", processInstance.getId()); requestNode.put("excludeTaskVariables", true); assertResultsPresentInDataResponse(url, requestNode, 3, "intVar", 67890); requestNode = objectMapper.createObjectNode(); requestNode.put("processInstanceId", processInstance2.getId()); assertResultsPresentInDataResponse(url, requestNode, 3, "stringVar", "Azerty"); requestNode = objectMapper.createObjectNode(); requestNode.put("taskId", task.getId()); assertResultsPresentInDataResponse(url, requestNode, 1, "taskVariable", "test"); requestNode = objectMapper.createObjectNode(); requestNode.put("taskId", task.getId()); requestNode.put("variableName", "booleanVar"); assertResultsPresentInDataResponse(url, requestNode, 0, null, null); requestNode = objectMapper.createObjectNode(); requestNode.put("variableNameLike", "%Var"); assertResultsPresentInDataResponse(url, requestNode, 6, "stringVar", "Azerty"); requestNode = objectMapper.createObjectNode(); requestNode.put("variableNameLike", "%Var2"); assertResultsPresentInDataResponse(url, requestNode, 0, null, null); requestNode = objectMapper.createObjectNode(); ArrayNode variableArray = objectMapper.createArrayNode(); ObjectNode variableNode = objectMapper.createObjectNode(); variableArray.add(variableNode); requestNode.set("variables", variableArray); variableNode.put("name", "stringVar"); variableNode.put("value", "Azerty"); variableNode.put("operation", "equals"); assertResultsPresentInDataResponse(url, requestNode, 2, "stringVar", "Azerty"); variableNode.removeAll(); requestNode.set("variables", variableArray); variableNode.put("name", "taskVariable"); variableNode.put("value", "test"); variableNode.put("operation", "equals"); assertResultsPresentInDataResponse(url, requestNode, 1, "taskVariable", "test"); variableNode.removeAll(); requestNode.set("variables", variableArray); variableNode.put("name", "taskVariable"); variableNode.put("value", "test"); variableNode.put("operation", "notEquals"); assertErrorResult(url, requestNode, HttpStatus.SC_BAD_REQUEST); }
public Cholesky cholesky() { return cholesky(false); }
@Test public void testCholesky() { System.out.println("Cholesky"); double[][] A = { {0.9000, 0.4000, 0.7000f}, {0.4000, 0.5000, 0.3000f}, {0.7000, 0.3000, 0.8000f} }; double[][] L = { {0.9486833, 0.00000000, 0.0000000f}, {0.4216370, 0.56764621, 0.0000000f}, {0.7378648, -0.01957401, 0.5051459f} }; BigMatrix a = BigMatrix.of(A); a.uplo(UPLO.LOWER); BigMatrix.Cholesky cholesky = a.cholesky(); for (int i = 0; i < a.nrow(); i++) { for (int j = 0; j <= i; j++) { assertEquals(Math.abs(L[i][j]), Math.abs(cholesky.lu.get(i, j)), 1E-7); } } double[] b = {0.5, 0.5, 0.5f}; double[] x = {-0.2027027, 0.8783784, 0.4729730f}; double[] x2 = cholesky.solve(b); assertEquals(x.length, x2.length); for (int i = 0; i < x.length; i++) { assertEquals(x[i], x2[i], 1E-7); } double[][] B = { {0.5, 0.2f}, {0.5, 0.8f}, {0.5, 0.3f} }; double[][] X = { {-0.2027027, -1.2837838f}, { 0.8783784, 2.2297297f}, { 0.4729730, 0.6621622f} }; BigMatrix X2 = BigMatrix.of(B); cholesky.solve(X2); assertEquals(X.length, X2.nrow()); assertEquals(X[0].length, X2.ncol()); for (int i = 0; i < X.length; i++) { for (int j = 0; j < X[i].length; j++) { assertEquals(X[i][j], X2.get(i, j), 1E-6); } } }
public static SmartFilterTestExecutionResultDTO execSmartFilterTest(SmartFilterTestExecutionDTO execData) { Predicate<TopicMessageDTO> predicate; try { predicate = MessageFilters.createMsgFilter( execData.getFilterCode(), MessageFilterTypeDTO.GROOVY_SCRIPT ); } catch (Exception e) { log.info("Smart filter '{}' compilation error", execData.getFilterCode(), e); return new SmartFilterTestExecutionResultDTO() .error("Compilation error : " + e.getMessage()); } try { var result = predicate.test( new TopicMessageDTO() .key(execData.getKey()) .content(execData.getValue()) .headers(execData.getHeaders()) .offset(execData.getOffset()) .partition(execData.getPartition()) .timestamp( Optional.ofNullable(execData.getTimestampMs()) .map(ts -> OffsetDateTime.ofInstant(Instant.ofEpochMilli(ts), ZoneOffset.UTC)) .orElse(null)) ); return new SmartFilterTestExecutionResultDTO() .result(result); } catch (Exception e) { log.info("Smart filter {} execution error", execData, e); return new SmartFilterTestExecutionResultDTO() .error("Execution error : " + e.getMessage()); } }
@Test void execSmartFilterTestReturnsErrorOnFilterCompilationError() { var result = execSmartFilterTest( new SmartFilterTestExecutionDTO() .filterCode("this is invalid groovy syntax = 1") ); assertThat(result.getResult()).isNull(); assertThat(result.getError()).containsIgnoringCase("Compilation error"); }
@Override public Map<String, String> generationCodes(Long tableId) { // 校验是否已经存在 CodegenTableDO table = codegenTableMapper.selectById(tableId); if (table == null) { throw exception(CODEGEN_TABLE_NOT_EXISTS); } List<CodegenColumnDO> columns = codegenColumnMapper.selectListByTableId(tableId); if (CollUtil.isEmpty(columns)) { throw exception(CODEGEN_COLUMN_NOT_EXISTS); } // 如果是主子表,则加载对应的子表信息 List<CodegenTableDO> subTables = null; List<List<CodegenColumnDO>> subColumnsList = null; if (CodegenTemplateTypeEnum.isMaster(table.getTemplateType())) { // 校验子表存在 subTables = codegenTableMapper.selectListByTemplateTypeAndMasterTableId( CodegenTemplateTypeEnum.SUB.getType(), tableId); if (CollUtil.isEmpty(subTables)) { throw exception(CODEGEN_MASTER_GENERATION_FAIL_NO_SUB_TABLE); } // 校验子表的关联字段存在 subColumnsList = new ArrayList<>(); for (CodegenTableDO subTable : subTables) { List<CodegenColumnDO> subColumns = codegenColumnMapper.selectListByTableId(subTable.getId()); if (CollUtil.findOne(subColumns, column -> column.getId().equals(subTable.getSubJoinColumnId())) == null) { throw exception(CODEGEN_SUB_COLUMN_NOT_EXISTS, subTable.getId()); } subColumnsList.add(subColumns); } } // 执行生成 return codegenEngine.execute(table, columns, subTables, subColumnsList); }
@Test public void testGenerationCodes_columnNotExists() { // mock 数据(CodegenTableDO) CodegenTableDO table = randomPojo(CodegenTableDO.class, o -> o.setScene(CodegenSceneEnum.ADMIN.getScene()) .setTemplateType(CodegenTemplateTypeEnum.MASTER_NORMAL.getType())); codegenTableMapper.insert(table); // 准备参数 Long tableId = table.getId(); // 调用,并断言 assertServiceException(() -> codegenService.generationCodes(tableId), CODEGEN_COLUMN_NOT_EXISTS); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { return this.list(directory, listener, String.valueOf(Path.DELIMITER)); }
@Test public void testDetermineCorrectRegionFrom400Reply() throws Exception { final Path bucket = new Path(new DefaultHomeFinderService(session).find(), new AsciiRandomStringService(30).random(), EnumSet.of(Path.Type.directory, Path.Type.volume)); final S3AccessControlListFeature acl = new S3AccessControlListFeature(session); final S3DirectoryFeature feature = new S3DirectoryFeature(session, new S3WriteFeature(session, acl), acl); feature.mkdir(bucket, new TransferStatus().withRegion("eu-central-1")); // Populate incorrect region in cache final RegionEndpointCache cache = session.getClient().getRegionEndpointCache(); assertEquals("eu-central-1", cache.getRegionForBucketName(bucket.getName())); cache.putRegionForBucketName(bucket.getName(), "eu-west-1"); assertNotSame(AttributedList.emptyList(), new S3ObjectListService(session, acl).list(bucket, new DisabledListProgressListener())); assertEquals("eu-central-1", cache.getRegionForBucketName(bucket.getName())); assertFalse(session.getClient().getConfiguration().getBoolProperty("s3service.disable-dns-buckets", true)); new S3DefaultDeleteFeature(session).delete(Collections.singletonList(bucket), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static byte[] compress(String urlString) throws MalformedURLException { byte[] compressedBytes = null; if (urlString != null) { // Figure the compressed bytes can't be longer than the original string. byte[] byteBuffer = new byte[urlString.length()]; int byteBufferIndex = 0; Arrays.fill(byteBuffer, (byte) 0x00); Pattern urlPattern = Pattern.compile(EDDYSTONE_URL_REGEX); Matcher urlMatcher = urlPattern.matcher(urlString); if (urlMatcher.matches()) { // www. String wwwdot = urlMatcher.group(EDDYSTONE_URL_WWW_GROUP); boolean haswww = (wwwdot != null); // Protocol. String rawProtocol = urlMatcher.group(EDDYSTONE_URL_PROTOCOL_GROUP); String protocol = rawProtocol.toLowerCase(); if (protocol.equalsIgnoreCase(URL_PROTOCOL_HTTP)) { byteBuffer[byteBufferIndex] = (haswww ? EDDYSTONE_URL_PROTOCOL_HTTP_WWW : EDDYSTONE_URL_PROTOCOL_HTTP); } else { byteBuffer[byteBufferIndex] = (haswww ? EDDYSTONE_URL_PROTOCOL_HTTPS_WWW : EDDYSTONE_URL_PROTOCOL_HTTPS); } byteBufferIndex++; // Fully-qualified domain name (FQDN). This includes the hostname and any other components after the dots // but BEFORE the first single slash in the URL. byte[] hostnameBytes = urlMatcher.group(EDDYSTONE_URL_FQDN_GROUP).getBytes(); String rawHostname = new String(hostnameBytes); String hostname = rawHostname.toLowerCase(); String[] domains = hostname.split(Pattern.quote(".")); boolean consumedSlash = false; if (domains != null) { // Write the hostname/subdomains prior to the last one. If there's only one (e. g. http://localhost) // then that's the only thing to write out. byte[] periodBytes = {'.'}; int writableDomainsCount = (domains.length == 1 ? 1 : domains.length - 1); for (int domainIndex = 0; domainIndex < writableDomainsCount; domainIndex++) { // Write out leading period, if necessary. if (domainIndex > 0) { System.arraycopy(periodBytes, 0, byteBuffer, byteBufferIndex, periodBytes.length); byteBufferIndex += periodBytes.length; } byte[] domainBytes = domains[domainIndex].getBytes(); int domainLength = domainBytes.length; System.arraycopy(domainBytes, 0, byteBuffer, byteBufferIndex, domainLength); byteBufferIndex += domainLength; } // Is the TLD one that we can encode? if (domains.length > 1) { String tld = "." + domains[domains.length - 1]; String slash = urlMatcher.group(EDDYSTONE_URL_SLASH_GROUP); String encodableTLDCandidate = (slash == null ? tld : tld + slash); byte encodedTLDByte = encodedByteForTopLevelDomain(encodableTLDCandidate); if (encodedTLDByte != TLD_NOT_ENCODABLE) { byteBuffer[byteBufferIndex++] = encodedTLDByte; consumedSlash = (slash != null); } else { byte[] tldBytes = tld.getBytes(); int tldLength = tldBytes.length; System.arraycopy(tldBytes, 0, byteBuffer, byteBufferIndex, tldLength); byteBufferIndex += tldLength; } } } // Optional slash. if (! consumedSlash) { String slash = urlMatcher.group(EDDYSTONE_URL_SLASH_GROUP); if (slash != null) { int slashLength = slash.length(); System.arraycopy(slash.getBytes(), 0, byteBuffer, byteBufferIndex, slashLength); byteBufferIndex += slashLength; } } // Path. String path = urlMatcher.group(EDDYSTONE_URL_PATH_GROUP); if (path != null) { int pathLength = path.length(); System.arraycopy(path.getBytes(), 0, byteBuffer, byteBufferIndex, pathLength); byteBufferIndex += pathLength; } // Copy the result. compressedBytes = new byte[byteBufferIndex]; System.arraycopy(byteBuffer, 0, compressedBytes, 0, compressedBytes.length); } else { throw new MalformedURLException(); } } else { throw new MalformedURLException(); } return compressedBytes; }
@Test public void testCompressWithDotInfoTLD() throws MalformedURLException { String testURL = "http://google.info"; byte[] expectedBytes = {0x02, 'g', 'o', 'o', 'g', 'l', 'e', 0x0b}; assertTrue(Arrays.equals(expectedBytes, UrlBeaconUrlCompressor.compress(testURL))); }
@Override public String execute(CommandContext commandContext, String[] args) { int sleepMilliseconds = 0; if (args != null && args.length > 0) { if (args.length == 2 && "-t".equals(args[0]) && StringUtils.isNumber(args[1])) { sleepMilliseconds = Integer.parseInt(args[1]); } else { return "Invalid parameter,please input like shutdown -t 10000"; } } long start = System.currentTimeMillis(); if (sleepMilliseconds > 0) { try { Thread.sleep(sleepMilliseconds); } catch (InterruptedException e) { return "Failed to invoke shutdown command, cause: " + e.getMessage(); } } StringBuilder buf = new StringBuilder(); List<ApplicationModel> applicationModels = frameworkModel.getApplicationModels(); for (ApplicationModel applicationModel : new ArrayList<>(applicationModels)) { applicationModel.destroy(); } // TODO change to ApplicationDeployer.destroy() or ApplicationModel.destroy() // DubboShutdownHook.getDubboShutdownHook().unregister(); // DubboShutdownHook.getDubboShutdownHook().doDestroy(); long end = System.currentTimeMillis(); buf.append("Application has shutdown successfully"); buf.append("\r\nelapsed: "); buf.append(end - start); buf.append(" ms."); return buf.toString(); }
@Test void testInvoke() throws RemotingException { String result = shutdown.execute(mockCommandContext, new String[0]); assertTrue(result.contains("Application has shutdown successfully")); }
@CheckForNull public FileData fileData(String path) { return fileDataByPath.get(path); }
@Test public void test_file_data_when_file_exists() { FileData fileData = repository.fileData("/Abc.java"); assertNotNull(fileData); assertThat(fileData.hash()).isEqualTo("123"); assertThat(fileData.revision()).isEqualTo("456"); }
@VisibleForTesting public void validateNoticeExists(Long id) { if (id == null) { return; } NoticeDO notice = noticeMapper.selectById(id); if (notice == null) { throw exception(NOTICE_NOT_FOUND); } }
@Test public void testValidateNoticeExists_noExists() { assertServiceException(() -> noticeService.validateNoticeExists(randomLongId()), NOTICE_NOT_FOUND); }
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveStepAttribute( id_transformation, id_step, TAG_INCLUDE, includeFilename ); rep.saveStepAttribute( id_transformation, id_step, TAG_INCLUDE_FIELD, filenameField ); rep.saveStepAttribute( id_transformation, id_step, TAG_ADD_RESULT_FILE, addResultFile ); rep.saveStepAttribute( id_transformation, id_step, TAG_NAME_SPACE_AWARE, nameSpaceAware ); rep.saveStepAttribute( id_transformation, id_step, TAG_IGNORE_COMMENTS, ignorecomments ); rep.saveStepAttribute( id_transformation, id_step, TAG_READ_URL, readurl ); rep.saveStepAttribute( id_transformation, id_step, TAG_VALIDATING, validating ); rep.saveStepAttribute( id_transformation, id_step, TAG_USE_TOKEN, usetoken ); rep.saveStepAttribute( id_transformation, id_step, TAG_IS_IGNORE_EMPTY_FILE, IsIgnoreEmptyFile ); rep.saveStepAttribute( id_transformation, id_step, TAG_DO_NOT_FAIL_IF_NO_FILE, doNotFailIfNoFile ); rep.saveStepAttribute( id_transformation, id_step, TAG_ROW_NUM, includeRowNumber ); rep.saveStepAttribute( id_transformation, id_step, TAG_ROW_NUM_FIELD, rowNumberField ); rep.saveStepAttribute( id_transformation, id_step, TAG_LIMIT, rowLimit ); rep.saveStepAttribute( id_transformation, id_step, TAG_LOOPXPATH, loopxpath ); rep.saveStepAttribute( id_transformation, id_step, TAG_ENCODING, encoding ); for ( int i = 0; i < fileName.length; i++ ) { rep.saveStepAttribute( id_transformation, id_step, i, TAG_FILE_NAME, fileName[i] ); rep.saveStepAttribute( id_transformation, id_step, i, TAG_FILE_MASK, fileMask[i] ); rep.saveStepAttribute( id_transformation, id_step, i, TAG_EXCLUDE_FILE_MASK, excludeFileMask[i] ); rep.saveStepAttribute( id_transformation, id_step, i, TAG_FILE_REQUIRED, fileRequired[i] ); rep.saveStepAttribute( id_transformation, id_step, i, TAG_INCLUDE_SUBFOLDERS, includeSubFolders[i] ); } for ( int i = 0; i < inputFields.length; i++ ) { GetXMLDataField field = inputFields[i]; rep.saveStepAttribute( id_transformation, id_step, i, TAG_FIELD_NAME, field.getName() ); rep.saveStepAttribute( id_transformation, id_step, i, TAG_FIELD_XPATH, field.getXPath() ); rep.saveStepAttribute( id_transformation, id_step, i, TAG_ELEMENT_TYPE, field.getElementTypeCode() ); rep.saveStepAttribute( id_transformation, id_step, i, TAG_RESULT_TYPE, field.getResultTypeCode() ); rep.saveStepAttribute( id_transformation, id_step, i, TAG_FIELD_TYPE, field.getTypeDesc() ); rep.saveStepAttribute( id_transformation, id_step, i, TAG_FIELD_FORMAT, field.getFormat() ); rep.saveStepAttribute( id_transformation, id_step, i, TAG_FIELD_CURRENCY, field.getCurrencySymbol() ); rep.saveStepAttribute( id_transformation, id_step, i, TAG_FIELD_DECIMAL, field.getDecimalSymbol() ); rep.saveStepAttribute( id_transformation, id_step, i, TAG_FIELD_GROUP, field.getGroupSymbol() ); rep.saveStepAttribute( id_transformation, id_step, i, TAG_FIELD_LENGTH, field.getLength() ); rep.saveStepAttribute( id_transformation, id_step, i, TAG_FIELD_PRECISION, field.getPrecision() ); rep.saveStepAttribute( id_transformation, id_step, i, TAG_FIELD_TRIM_TYPE, field.getTrimTypeCode() ); rep.saveStepAttribute( id_transformation, id_step, i, TAG_FIELD_REPEAT, field.isRepeated() ); } rep.saveStepAttribute( id_transformation, id_step, TAG_IS_IN_FIELDS, inFields ); rep.saveStepAttribute( id_transformation, id_step, TAG_IS_A_FILE, IsAFile ); rep.saveStepAttribute( id_transformation, id_step, TAG_XML_FIELD, xmlField ); rep.saveStepAttribute( id_transformation, id_step, TAG_PRUNE_PATH, prunePath ); rep.saveStepAttribute( id_transformation, id_step, TAG_SHORT_FILE_FIELD_NAME, shortFileFieldName ); rep.saveStepAttribute( id_transformation, id_step, TAG_EXTENSION_FIELD_NAME, extensionFieldName ); rep.saveStepAttribute( id_transformation, id_step, TAG_PATH_FIELD_NAME, pathFieldName ); rep.saveStepAttribute( id_transformation, id_step, TAG_SIZE_FIELD_NAME, sizeFieldName ); rep.saveStepAttribute( id_transformation, id_step, TAG_HIDDEN_FIELD_NAME, hiddenFieldName ); rep.saveStepAttribute( id_transformation, id_step, TAG_LAST_MODIFICATION_TIME_FIELD_NAME, lastModificationTimeFieldName ); rep.saveStepAttribute( id_transformation, id_step, TAG_URI_NAME_FIELD_NAME, uriNameFieldName ); rep.saveStepAttribute( id_transformation, id_step, TAG_ROOT_URI_NAME_FIELD_NAME, rootUriNameFieldName ); } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "GetXMLDataMeta.Exception.ErrorSavingToRepository", "" + id_step ), e ); } }
@Test public void testSaveRepOfAdditionalOutputFields() throws KettleException { GetXMLDataMeta getXMLDataMeta = new GetXMLDataMeta(); String[] fileName = new String[] {}; GetXMLDataField[] inputFields = new GetXMLDataField[] {}; ReflectionTestUtils.setField( getXMLDataMeta, "fileName", fileName ); ReflectionTestUtils.setField( getXMLDataMeta, "inputFields", inputFields ); Repository rep = mock( Repository.class ); IMetaStore metaStore = mock( IMetaStore.class ); ObjectId idTransformation = mock( ObjectId.class ); ObjectId idStep = mock( ObjectId.class ); String shortFileFieldName = UUID.randomUUID().toString(); ReflectionTestUtils.setField( getXMLDataMeta, "shortFileFieldName", shortFileFieldName ); String extensionFieldName = UUID.randomUUID().toString(); ReflectionTestUtils.setField( getXMLDataMeta, "extensionFieldName", extensionFieldName ); String pathFieldName = UUID.randomUUID().toString(); ReflectionTestUtils.setField( getXMLDataMeta, "pathFieldName", pathFieldName ); String sizeFieldName = UUID.randomUUID().toString(); ReflectionTestUtils.setField( getXMLDataMeta, "sizeFieldName", sizeFieldName ); String hiddenFieldName = UUID.randomUUID().toString(); ReflectionTestUtils.setField( getXMLDataMeta, "hiddenFieldName", hiddenFieldName ); String lastModificationTimeFieldName = UUID.randomUUID().toString(); ReflectionTestUtils.setField( getXMLDataMeta, "lastModificationTimeFieldName", lastModificationTimeFieldName ); String uriNameFieldName = UUID.randomUUID().toString(); ReflectionTestUtils.setField( getXMLDataMeta, "uriNameFieldName", uriNameFieldName ); String rootUriNameFieldName = UUID.randomUUID().toString(); ReflectionTestUtils.setField( getXMLDataMeta, "rootUriNameFieldName", rootUriNameFieldName ); getXMLDataMeta.saveRep( rep, metaStore, idTransformation, idStep ); verify( rep, times( 1 ) ).saveStepAttribute( idTransformation, idStep, "shortFileFieldName", shortFileFieldName ); verify( rep, times( 1 ) ).saveStepAttribute( idTransformation, idStep, "extensionFieldName", extensionFieldName ); verify( rep, times( 1 ) ).saveStepAttribute( idTransformation, idStep, "pathFieldName", pathFieldName ); verify( rep, times( 1 ) ).saveStepAttribute( idTransformation, idStep, "sizeFieldName", sizeFieldName ); verify( rep, times( 1 ) ).saveStepAttribute( idTransformation, idStep, "hiddenFieldName", hiddenFieldName ); verify( rep, times( 1 ) ).saveStepAttribute( idTransformation, idStep, "lastModificationTimeFieldName", lastModificationTimeFieldName ); verify( rep, times( 1 ) ).saveStepAttribute( idTransformation, idStep, "uriNameFieldName", uriNameFieldName ); verify( rep, times( 1 ) ).saveStepAttribute( idTransformation, idStep, "rootUriNameFieldName", rootUriNameFieldName ); }
@Override public PayloadSerializer getSerializer(Schema schema, Map<String, Object> tableParams) { Class<? extends Message> protoClass = getClass(tableParams); inferAndVerifySchema(protoClass, schema); SimpleFunction<byte[], Row> toRowFn = ProtoMessageSchema.getProtoBytesToRowFn(protoClass); return PayloadSerializer.of( ProtoMessageSchema.getRowToProtoBytesFn(protoClass), bytes -> { Row rawRow = toRowFn.apply(bytes); return castRow(rawRow, rawRow.getSchema(), schema); }); }
@Test public void deserialize() { Row row = provider .getSerializer( SHUFFLED_SCHEMA, ImmutableMap.of("protoClass", PayloadMessages.TestMessage.class.getName())) .deserialize(MESSAGE.toByteArray()); assertEquals(ROW, row); }
@Override public void update(Object elem) throws Exception { // Increment object counter. if (objectCount != null) { objectCount.addValue(1L); } // Increment byte counter. if ((byteCountObserver != null || meanByteCountObserver != null) && (sampleElement() || elementByteSizeObservable.isRegisterByteSizeObserverCheap(elem))) { if (byteCountObserver != null) { byteCountObserver.setScalingFactor( Math.max(samplingToken, SAMPLING_CUTOFF) / (double) SAMPLING_CUTOFF); elementByteSizeObservable.registerByteSizeObserver(elem, byteCountObserver); } if (meanByteCountObserver != null) { elementByteSizeObservable.registerByteSizeObserver(elem, meanByteCountObserver); } if (byteCountObserver != null && !byteCountObserver.getIsLazy()) { byteCountObserver.advance(); } if (meanByteCountObserver != null && !meanByteCountObserver.getIsLazy()) { meanByteCountObserver.advance(); } } }
@Test public void testIncorrectType() throws Exception { TestOutputCounter outputCounter = new TestOutputCounter(NameContextsForTests.nameContextForTest()); thrown.expect(ClassCastException.class); outputCounter.update(5); }
@SneakyThrows({SystemException.class, RollbackException.class}) @Override public void enlistResource(final SingleXAResource xaResource) { transactionManager.getTransaction().enlistResource(xaResource); }
@Test void assertEnListResource() throws SystemException, RollbackException { SingleXAResource singleXAResource = mock(SingleXAResource.class); Transaction transaction = mock(Transaction.class); when(userTransactionManager.getTransaction()).thenReturn(transaction); transactionManagerProvider.enlistResource(singleXAResource); verify(transaction).enlistResource(singleXAResource); }
static Object parseCell(String cell, Schema.Field field) { Schema.FieldType fieldType = field.getType(); try { switch (fieldType.getTypeName()) { case STRING: return cell; case INT16: return Short.parseShort(cell); case INT32: return Integer.parseInt(cell); case INT64: return Long.parseLong(cell); case BOOLEAN: return Boolean.parseBoolean(cell); case BYTE: return Byte.parseByte(cell); case DECIMAL: return new BigDecimal(cell); case DOUBLE: return Double.parseDouble(cell); case FLOAT: return Float.parseFloat(cell); case DATETIME: return Instant.parse(cell); default: throw new UnsupportedOperationException( "Unsupported type: " + fieldType + ", consider using withCustomRecordParsing"); } } catch (IllegalArgumentException e) { throw new IllegalArgumentException( e.getMessage() + " field " + field.getName() + " was received -- type mismatch"); } }
@Test public void ignoresCaseFormat() { String allCapsBool = "TRUE"; Schema schema = Schema.builder().addBooleanField("a_boolean").build(); assertEquals(true, CsvIOParseHelpers.parseCell(allCapsBool, schema.getField("a_boolean"))); }
public void onStreamRequest(StreamRequest req, final RequestContext requestContext, final Map<String, String> wireAttrs, final NextFilter<StreamRequest, StreamResponse> nextFilter) { //Set accepted encoding for compressed response String operation = (String) requestContext.getLocalAttr(R2Constants.OPERATION); if (!_acceptEncodingHeader.isEmpty() && _helper.shouldCompressResponseForOperation(operation)) { CompressionOption responseCompressionOverride = (CompressionOption) requestContext.getLocalAttr(R2Constants.RESPONSE_COMPRESSION_OVERRIDE); req = addResponseCompressionHeaders(responseCompressionOverride, req); } if (_requestContentEncoding != StreamEncodingType.IDENTITY) { final StreamRequest request = req; final StreamingCompressor compressor = _requestContentEncoding.getCompressor(_executor); CompressionOption option = (CompressionOption) requestContext.getLocalAttr(R2Constants.REQUEST_COMPRESSION_OVERRIDE); if (option == null || option != CompressionOption.FORCE_OFF) { final int threshold = option == CompressionOption.FORCE_ON ? 0 : _requestCompressionConfig.getCompressionThreshold(); PartialReader reader = new PartialReader(threshold, new Callback<EntityStream[]>() { @Override public void onError(Throwable ex) { nextFilter.onError(ex, requestContext, wireAttrs); } @Override public void onSuccess(EntityStream[] result) { if (result.length == 1) { StreamRequest uncompressedRequest = request.builder().build(result[0]); nextFilter.onRequest(uncompressedRequest, requestContext, wireAttrs); } else { StreamRequestBuilder builder = request.builder(); EntityStream compressedStream = compressor.deflate(EntityStreams.newEntityStream(new CompositeWriter(result))); Map<String, String> headers = stripHeaders(builder.getHeaders(), HttpConstants.CONTENT_LENGTH); StreamRequest compressedRequest = builder.setHeaders(headers) .setHeader(HttpConstants.CONTENT_ENCODING, compressor.getContentEncodingName()) .build(compressedStream); nextFilter.onRequest(compressedRequest, requestContext, wireAttrs); } } }); req.getEntityStream().setReader(reader); return; } } nextFilter.onRequest(req, requestContext, wireAttrs); }
@Test(dataProvider = "requestData") public void testRequestCompressionRules(CompressionConfig requestCompressionConfig, CompressionOption requestCompressionOverride, boolean headerShouldBePresent, String operation) throws CompressionException, URISyntaxException, InterruptedException, ExecutionException, TimeoutException { Executor executor = Executors.newCachedThreadPool(); ClientStreamCompressionFilter clientCompressionFilter = new ClientStreamCompressionFilter( StreamEncodingType.GZIP.getHttpName(), requestCompressionConfig, ACCEPT_COMPRESSIONS, new CompressionConfig(Integer.MAX_VALUE), Arrays.asList(ClientCompressionHelper.COMPRESS_ALL_RESPONSES_INDICATOR), executor); // The entity should be compressible for this test. int original = 100; byte[] entity = new byte[original]; Arrays.fill(entity, (byte)'A'); StreamRequest streamRequest = new StreamRequestBuilder(new URI(URI)) .setMethod(RestMethod.POST) .build(EntityStreams.newEntityStream(new ByteStringWriter(ByteString.copy(entity)))); int compressed = EncodingType.GZIP.getCompressor().deflate(new ByteArrayInputStream(entity)).length; RequestContext context = new RequestContext(); if (operation != null) { context.putLocalAttr(R2Constants.OPERATION, operation); } context.putLocalAttr(R2Constants.REQUEST_COMPRESSION_OVERRIDE, requestCompressionOverride); int entityLength = headerShouldBePresent ? compressed : original; FutureCallback<ByteString> callback = new FutureCallback<>(); FullEntityReader reader = new FullEntityReader(callback); HeaderCaptureFilter captureFilter = new HeaderCaptureFilter(HttpConstants.CONTENT_ENCODING, headerShouldBePresent, entityLength, reader); clientCompressionFilter.onStreamRequest(streamRequest, context, Collections.<String, String>emptyMap(), captureFilter); ByteString entityRead = callback.get(10, TimeUnit.SECONDS); Assert.assertEquals(entityRead.length(), entityLength); }
public static ClusterOperatorConfig buildFromMap(Map<String, String> map) { warningsForRemovedEndVars(map); KafkaVersion.Lookup lookup = parseKafkaVersions(map.get(STRIMZI_KAFKA_IMAGES), map.get(STRIMZI_KAFKA_CONNECT_IMAGES), map.get(STRIMZI_KAFKA_MIRROR_MAKER_IMAGES), map.get(STRIMZI_KAFKA_MIRROR_MAKER_2_IMAGES)); return buildFromMap(map, lookup); }
@Test public void testConfigParsingWithAllVersionEnvVars() { Map<String, String> envVars = new HashMap<>(5); envVars.put(ClusterOperatorConfig.STRIMZI_KAFKA_IMAGES, KafkaVersionTestUtils.getKafkaImagesEnvVarString()); envVars.put(ClusterOperatorConfig.STRIMZI_KAFKA_CONNECT_IMAGES, KafkaVersionTestUtils.getKafkaConnectImagesEnvVarString()); envVars.put(ClusterOperatorConfig.STRIMZI_KAFKA_MIRROR_MAKER_IMAGES, KafkaVersionTestUtils.getKafkaMirrorMakerImagesEnvVarString()); envVars.put(ClusterOperatorConfig.STRIMZI_KAFKA_MIRROR_MAKER_2_IMAGES, KafkaVersionTestUtils.getKafkaMirrorMaker2ImagesEnvVarString()); assertDoesNotThrow(() -> ClusterOperatorConfig.buildFromMap(envVars)); }
public int length() { return (this.appendable != null ? this.appendable.toString().length() + StrUtil.length(suffix) : null == this.emptyResult ? -1 : emptyResult.length()); }
@Test public void lengthTest(){ StrJoiner joiner = StrJoiner.of(",", "[", "]"); assertEquals(joiner.toString().length(), joiner.length()); joiner.append("123"); assertEquals(joiner.toString().length(), joiner.length()); }
public static boolean sizeIsEmpty(Object object) { if (object instanceof Collection) { return ((Collection) object).isEmpty(); } else if (object instanceof Map) { return ((Map) object).isEmpty(); } else if (object instanceof Object[]) { return ((Object[]) object).length == 0; } else if (object instanceof Iterator) { return ((Iterator) object).hasNext() == false; } else if (object instanceof Enumeration) { return ((Enumeration) object).hasMoreElements() == false; } else if (object == null) { throw new IllegalArgumentException("Unsupported object type: null"); } else { try { return Array.getLength(object) == 0; } catch (IllegalArgumentException ex) { throw new IllegalArgumentException("Unsupported object type: " + object.getClass().getName()); } } }
@Test void testSizeIsEmpty2() { assertThrows(IllegalArgumentException.class, () -> { CollectionUtils.sizeIsEmpty("string"); }); }
@Override public long getBytes(PropertyKey key) { checkArgument(key.getType() == PropertyKey.PropertyType.DATASIZE); return FormatUtils.parseSpaceSize((String) get(key)); }
@Test public void getBytes() { mConfiguration.set(PropertyKey.USER_CLIENT_CACHE_PAGE_SIZE, "10b"); assertEquals(10, mConfiguration.getBytes(PropertyKey.USER_CLIENT_CACHE_PAGE_SIZE)); }
public Optional<Object> evaluate(final Map<String, Object> columnPairsMap, final String outputColumn, final String regexField) { boolean matching = true; boolean isRegex = regexField != null && columnValues.containsKey(regexField) && (boolean) columnValues.get(regexField); for (Map.Entry<String, Object> columnPairEntry : columnPairsMap.entrySet()) { Object value = columnValues.get(columnPairEntry.getKey()); matching = isRegex ? isRegexMatching(value.toString(), (String) columnPairEntry.getValue()) : isMatching(value, columnPairEntry.getValue()); if (!matching) { break; } } return matching ? Optional.ofNullable(columnValues.get(outputColumn)) : Optional.empty(); }
@Test void evaluateKeyNotFound() { KiePMMLRow kiePMMLRow = new KiePMMLRow(COLUMN_VALUES); Optional<Object> retrieved = kiePMMLRow.evaluate(Collections.singletonMap("NOT-KEY", 0), "KEY-0", null); assertThat(retrieved).isNotPresent(); }
public static int max(int a, int b, int c) { return Math.max(Math.max(a, b), c); }
@Test public void testMax_doubleArr() { System.out.println("max"); double[] x = {-2.1968219, -0.9559913, -0.0431738, 1.0567679, 0.3853515}; assertEquals(1.0567679, MathEx.max(x), 1E-7); }
public static SqlTypeName toCalciteType(QueryDataType type) { return toCalciteType(type.getTypeFamily()); }
@Test public void testHazelcastToCalcite() { assertSame(SqlTypeName.VARCHAR, HazelcastTypeUtils.toCalciteType(QueryDataType.VARCHAR)); assertSame(SqlTypeName.BOOLEAN, HazelcastTypeUtils.toCalciteType(QueryDataType.BOOLEAN)); assertSame(SqlTypeName.TINYINT, HazelcastTypeUtils.toCalciteType(QueryDataType.TINYINT)); assertSame(SqlTypeName.SMALLINT, HazelcastTypeUtils.toCalciteType(QueryDataType.SMALLINT)); assertSame(SqlTypeName.INTEGER, HazelcastTypeUtils.toCalciteType(QueryDataType.INT)); assertSame(SqlTypeName.BIGINT, HazelcastTypeUtils.toCalciteType(QueryDataType.BIGINT)); assertSame(SqlTypeName.DECIMAL, HazelcastTypeUtils.toCalciteType(QueryDataType.DECIMAL)); assertSame(SqlTypeName.REAL, HazelcastTypeUtils.toCalciteType(QueryDataType.REAL)); assertSame(SqlTypeName.DOUBLE, HazelcastTypeUtils.toCalciteType(QueryDataType.DOUBLE)); assertSame(SqlTypeName.DATE, HazelcastTypeUtils.toCalciteType(QueryDataType.DATE)); assertSame(SqlTypeName.TIME, HazelcastTypeUtils.toCalciteType(QueryDataType.TIME)); assertSame(SqlTypeName.TIMESTAMP, HazelcastTypeUtils.toCalciteType(QueryDataType.TIMESTAMP)); assertSame(SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE, HazelcastTypeUtils.toCalciteType(QueryDataType.TIMESTAMP_WITH_TZ_OFFSET_DATE_TIME)); }
@Override public KsMaterializedQueryResult<WindowedRow> get( final GenericKey key, final int partition, final Range<Instant> windowStartBounds, final Range<Instant> windowEndBounds, final Optional<Position> position ) { try { final ReadOnlyWindowStore<GenericKey, ValueAndTimestamp<GenericRow>> store = stateStore .store(QueryableStoreTypes.timestampedWindowStore(), partition); final Instant lower = calculateLowerBound(windowStartBounds, windowEndBounds); final Instant upper = calculateUpperBound(windowStartBounds, windowEndBounds); try (WindowStoreIterator<ValueAndTimestamp<GenericRow>> it = cacheBypassFetcher.fetch(store, key, lower, upper)) { final Builder<WindowedRow> builder = ImmutableList.builder(); while (it.hasNext()) { final KeyValue<Long, ValueAndTimestamp<GenericRow>> next = it.next(); final Instant windowStart = Instant.ofEpochMilli(next.key); if (!windowStartBounds.contains(windowStart)) { continue; } final Instant windowEnd = windowStart.plus(windowSize); if (!windowEndBounds.contains(windowEnd)) { continue; } final TimeWindow window = new TimeWindow(windowStart.toEpochMilli(), windowEnd.toEpochMilli()); final WindowedRow row = WindowedRow.of( stateStore.schema(), new Windowed<>(key, window), next.value.value(), next.value.timestamp() ); builder.add(row); } return KsMaterializedQueryResult.rowIterator(builder.build().iterator()); } } catch (final Exception e) { throw new MaterializationException("Failed to get value from materialized table", e); } }
@Test public void shouldFetchWithStartUpperBoundIfLowest() { // Given: final Range<Instant> startBounds = Range.closed( NOW, NOW.plusSeconds(10) ); final Range<Instant> endBounds = Range.closed( NOW.plusSeconds(5).plus(WINDOW_SIZE), NOW.plusSeconds(15).plus(WINDOW_SIZE) ); // When: table.get(A_KEY, PARTITION, startBounds, endBounds); // Then: verify(cacheBypassFetcher).fetch(eq(tableStore), any(), any(), eq(startBounds.upperEndpoint())); }
public static boolean isEditionBundled(Plugin plugin) { return SONARSOURCE_ORGANIZATION.equalsIgnoreCase(plugin.getOrganization()) && Arrays.stream(SONARSOURCE_COMMERCIAL_LICENSES).anyMatch(s -> s.equalsIgnoreCase(plugin.getLicense())); }
@Test public void isEditionBundled_on_PluginInfo_returns_false_for_license_SonarSource_and_non_SonarSource_organization() { PluginInfo pluginInfo = newPluginInfo(randomAlphanumeric(3), randomizeCase("SonarSource")); assertThat(EditionBundledPlugins.isEditionBundled(pluginInfo)).isFalse(); }
@Override public ApplicationStatisticsInfo getAppStatistics(HttpServletRequest hsr, Set<String> stateQueries, Set<String> typeQueries) { try { long startTime = clock.getTime(); Collection<SubClusterInfo> subClustersActive = federationFacade.getActiveSubClusters(); final HttpServletRequest hsrCopy = clone(hsr); Class[] argsClasses = new Class[]{HttpServletRequest.class, Set.class, Set.class}; Object[] args = new Object[]{hsrCopy, stateQueries, typeQueries}; ClientMethod remoteMethod = new ClientMethod("getAppStatistics", argsClasses, args); Map<SubClusterInfo, ApplicationStatisticsInfo> appStatisticsMap = invokeConcurrent( subClustersActive, remoteMethod, ApplicationStatisticsInfo.class); ApplicationStatisticsInfo applicationStatisticsInfo = RouterWebServiceUtil.mergeApplicationStatisticsInfo(appStatisticsMap.values()); if (applicationStatisticsInfo != null) { long stopTime = clock.getTime(); routerMetrics.succeededGetAppStatisticsRetrieved(stopTime - startTime); RouterAuditLogger.logSuccess(getUser().getShortUserName(), GET_APPSTATISTICS, TARGET_WEB_SERVICE); return applicationStatisticsInfo; } } catch (NotFoundException e) { routerMetrics.incrGetAppStatisticsFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), GET_APPSTATISTICS, UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage()); RouterServerUtil.logAndThrowRunTimeException("get all active sub cluster(s) error.", e); } catch (IOException e) { routerMetrics.incrGetAppStatisticsFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), GET_APPSTATISTICS, UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage()); RouterServerUtil.logAndThrowRunTimeException(e, "getAppStatistics error by stateQueries = %s, typeQueries = %s with io error.", StringUtils.join(stateQueries, ","), StringUtils.join(typeQueries, ",")); } catch (YarnException e) { routerMetrics.incrGetAppStatisticsFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), GET_APPSTATISTICS, UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage()); RouterServerUtil.logAndThrowRunTimeException(e, "getAppStatistics by stateQueries = %s, typeQueries = %s with yarn error.", StringUtils.join(stateQueries, ","), StringUtils.join(typeQueries, ",")); } routerMetrics.incrGetAppStatisticsFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), GET_APPSTATISTICS, UNKNOWN, TARGET_WEB_SERVICE, "getAppStatistics Failed."); throw RouterServerUtil.logAndReturnRunTimeException( "getAppStatistics by stateQueries = %s, typeQueries = %s Failed.", StringUtils.join(stateQueries, ","), StringUtils.join(typeQueries, ",")); }
@Test public void testGetAppStatistics() throws IOException, InterruptedException, YarnException { // Submit application to multiSubCluster ApplicationId appId = ApplicationId.newInstance(Time.now(), 1); ApplicationSubmissionContextInfo context = new ApplicationSubmissionContextInfo(); context.setApplicationId(appId.toString()); context.setApplicationType("MapReduce"); context.setQueue("queue"); Assert.assertNotNull(interceptor.submitApplication(context, null)); GetApplicationHomeSubClusterRequest request = GetApplicationHomeSubClusterRequest.newInstance(appId); GetApplicationHomeSubClusterResponse response = stateStore.getApplicationHomeSubCluster(request); Assert.assertNotNull(response); ApplicationHomeSubCluster homeSubCluster = response.getApplicationHomeSubCluster(); DefaultRequestInterceptorREST interceptorREST = interceptor.getInterceptorForSubCluster(homeSubCluster.getHomeSubCluster()); MockDefaultRequestInterceptorREST mockInterceptorREST = (MockDefaultRequestInterceptorREST) interceptorREST; mockInterceptorREST.updateApplicationState(YarnApplicationState.RUNNING, appId.toString()); Set<String> stateQueries = new HashSet<>(); stateQueries.add(YarnApplicationState.RUNNING.name()); Set<String> typeQueries = new HashSet<>(); typeQueries.add("MapReduce"); ApplicationStatisticsInfo response2 = interceptor.getAppStatistics(null, stateQueries, typeQueries); Assert.assertNotNull(response2); Assert.assertFalse(response2.getStatItems().isEmpty()); StatisticsItemInfo result = response2.getStatItems().get(0); Assert.assertEquals(1, result.getCount()); Assert.assertEquals(YarnApplicationState.RUNNING, result.getState()); Assert.assertEquals("MapReduce", result.getType()); }
@Override public Path copy(final Path source, final Path target, final TransferStatus status, final ConnectionCallback callback, final StreamListener listener) throws BackgroundException { try { final B2FileResponse response = session.getClient().copyFile(fileid.getVersionId(source), fileid.getVersionId(containerService.getContainer(target)), containerService.getKey(target)); listener.sent(status.getLength()); fileid.cache(target, response.getFileId()); return target.withAttributes(new B2AttributesFinderFeature(session, fileid).toAttributes(response)); } catch(B2ApiException e) { throw new B2ExceptionMappingService(fileid).map("Cannot copy {0}", e, source); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Cannot copy {0}", e, source); } }
@Test public void testCopyToExistingFile() throws Exception { final B2VersionIdProvider fileid = new B2VersionIdProvider(session); final Path container = new Path("test-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path folder = new B2DirectoryFeature(session, fileid).mkdir(new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); final String name = new AlphanumericRandomStringService().random(); final Path test = new B2TouchFeature(session, fileid).touch(new Path(folder, name, EnumSet.of(Path.Type.file)), new TransferStatus()); final Path copy = new B2TouchFeature(session, fileid).touch(new Path(folder, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); assertTrue(new B2FindFeature(session, fileid).find(new Path(folder, name, EnumSet.of(Path.Type.file)))); assertTrue(new B2FindFeature(session, fileid).find(copy)); new B2CopyFeature(session, fileid).copy(test, copy, new TransferStatus().exists(true), new DisabledConnectionCallback(), new DisabledStreamListener()); final Find find = new DefaultFindFeature(session); assertTrue(find.find(test)); assertTrue(find.find(copy)); new B2DeleteFeature(session, fileid).delete(Arrays.asList(test, copy), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public void addMimeMapping(String extension, String type) { handler.getMimeTypes().addMimeMapping(extension, type); }
@Test void addsMimeMapping() { environment.addMimeMapping("foo", "example/foo"); assertThat(handler.getMimeTypes().getMimeMap()).containsEntry("foo", "example/foo"); }
public <T0> DataSource<Tuple1<T0>> types(Class<T0> type0) { TupleTypeInfo<Tuple1<T0>> types = TupleTypeInfo.getBasicAndBasicValueTupleTypeInfo(type0); CsvInputFormat<Tuple1<T0>> inputFormat = new TupleCsvInputFormat<Tuple1<T0>>(path, types, this.includedMask); configureInputFormat(inputFormat); return new DataSource<Tuple1<T0>>( executionContext, inputFormat, types, Utils.getCallLocationName()); }
@Test void testWithInvalidValueType2() { CsvReader reader = getCsvReader(); // CsvReader doesn't support custom Value type assertThatThrownBy(() -> reader.types(ValueItem.class)) .isInstanceOf(IllegalArgumentException.class); }
public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context) throws IOException, SAXException, TikaException { if (stream == null) { throw new NullPointerException("null stream"); } Throwable t; boolean alive = false; ForkClient client = acquireClient(); try { ContentHandler tee = (handler instanceof AbstractRecursiveParserWrapperHandler) ? handler : new TeeContentHandler(handler, new MetadataContentHandler(metadata)); t = client.call("parse", stream, tee, metadata, context); alive = true; } catch (TikaException te) { // Problem occurred on our side alive = true; throw te; } catch (IOException e) { // Problem occurred on the other side throw new TikaException("Failed to communicate with a forked parser process." + " The process has most likely crashed due to some error" + " like running out of memory. A new process will be" + " started for the next parsing request.", e); } finally { releaseClient(client, alive); } if (t instanceof IOException) { throw (IOException) t; } else if (t instanceof SAXException) { throw (SAXException) t; } else if (t instanceof TikaException) { throw (TikaException) t; } else if (t != null) { throw new TikaException("Unexpected error in forked server process", t); } }
@Test public void testRPWWithEmbeddedNPE() throws Exception { Parser parser = new AutoDetectParser(); RecursiveParserWrapper wrapper = new RecursiveParserWrapper(parser); RecursiveParserWrapperHandler handler = new RecursiveParserWrapperHandler( new BasicContentHandlerFactory(BasicContentHandlerFactory.HANDLER_TYPE.TEXT, 20000)); try (ForkParser fork = new ForkParser(ForkParserTest.class.getClassLoader(), wrapper); InputStream is = getResourceAsStream("/test-documents/embedded_with_npe.xml")) { Metadata metadata = new Metadata(); ParseContext context = new ParseContext(); fork.parse(is, handler, metadata, context); } List<Metadata> metadataList = handler.getMetadataList(); Metadata m0 = metadataList.get(0); assertEquals("Nikolai Lobachevsky", m0.get(TikaCoreProperties.CREATOR)); assertContains("main_content", m0.get(TikaCoreProperties.TIKA_CONTENT)); assertContains("embed1.xml", m0.get(TikaCoreProperties.TIKA_CONTENT)); Metadata m1 = metadataList.get(1); assertEquals("embeddedAuthor", m1.get(TikaCoreProperties.CREATOR)); assertContains("some_embedded_content", m1.get(TikaCoreProperties.TIKA_CONTENT)); assertEquals("/embed1.xml", m1.get(TikaCoreProperties.EMBEDDED_RESOURCE_PATH)); assertContains("another null pointer exception", m1.get(TikaCoreProperties.EMBEDDED_EXCEPTION)); }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof ZoneTime that)) { return false; } return Objects.equals(offsetTime, that.offsetTime) && Objects.equals(zoneId, that.zoneId); }
@Test void testEquals() { ZoneTime toCompare = ZoneTime.of(DateTimeFormatter.ISO_TIME.parse("09:34:31", LocalTime::from), zoneId, false); assertFalse(zoneTime.equals(toCompare)); toCompare = ZoneTime.of(localTime, zoneId, false); assertTrue(zoneTime.equals(toCompare)); }
public int getIntHeader(String name) { String value = getHeader(name); if (value == null) { return -1; } else { return Integer.parseInt(value); } }
@Test void testGetIntHeader() { URI uri = URI.create("http://localhost:8080/test"); HttpRequest httpReq = newRequest(uri, HttpRequest.Method.GET, HttpRequest.Version.HTTP_1_1); DiscFilterRequest request = new DiscFilterRequest(httpReq); assertEquals(-1, request.getIntHeader("int_header")); request.addHeader("int_header", String.valueOf(5)); assertEquals(5, request.getIntHeader("int_header")); }
@Override public String toString() { return getFormattedText() + ": duration " + super.toString(); }
@Test public void testDurationInfoCreation() throws Exception { DurationInfo info = new DurationInfo(log, "test"); Assert.assertTrue(info.value() >= 0); Thread.sleep(1000); info.finished(); Assert.assertTrue(info.value() > 0); info = new DurationInfo(log, true, "test format %s", "value"); Assert.assertEquals("test format value: duration 0:00.000s", info.toString()); info = new DurationInfo(log, false, "test format %s", "value"); Assert.assertEquals("test format value: duration 0:00.000s", info.toString()); }
@Override public void transform(Message message, DataType fromType, DataType toType) { if (message.getHeaders().containsKey(Ddb2Constants.ITEM) || message.getHeaders().containsKey(Ddb2Constants.KEY)) { return; } JsonNode jsonBody = getBodyAsJsonNode(message); String operation = Optional.ofNullable(jsonBody.get("operation")).map(JsonNode::asText).orElse(Ddb2Operations.PutItem.name()); if (message.getExchange().hasProperties() && message.getExchange().getProperty("operation", String.class) != null) { operation = message.getExchange().getProperty("operation", String.class); } if (message.getHeaders().containsKey(Ddb2Constants.OPERATION)) { operation = message.getHeader(Ddb2Constants.OPERATION, Ddb2Operations.class).name(); } JsonNode key = jsonBody.get("key"); JsonNode item = jsonBody.get("item"); Map<String, Object> keyProps; if (key != null) { keyProps = dataFormat.getObjectMapper().convertValue(key, new TypeReference<>() { }); } else { keyProps = dataFormat.getObjectMapper().convertValue(jsonBody, new TypeReference<>() { }); } Map<String, Object> itemProps; if (item != null) { itemProps = dataFormat.getObjectMapper().convertValue(item, new TypeReference<>() { }); } else { itemProps = keyProps; } final Map<String, AttributeValue> keyMap = getAttributeValueMap(keyProps); switch (Ddb2Operations.valueOf(operation)) { case PutItem: message.setHeader(Ddb2Constants.OPERATION, Ddb2Operations.PutItem); message.setHeader(Ddb2Constants.ITEM, getAttributeValueMap(itemProps)); setHeaderIfNotPresent(Ddb2Constants.RETURN_VALUES, ReturnValue.ALL_OLD.toString(), message); break; case UpdateItem: message.setHeader(Ddb2Constants.OPERATION, Ddb2Operations.UpdateItem); message.setHeader(Ddb2Constants.KEY, keyMap); message.setHeader(Ddb2Constants.UPDATE_VALUES, getAttributeValueUpdateMap(itemProps)); setHeaderIfNotPresent(Ddb2Constants.RETURN_VALUES, ReturnValue.ALL_NEW.toString(), message); break; case DeleteItem: message.setHeader(Ddb2Constants.OPERATION, Ddb2Operations.DeleteItem); message.setHeader(Ddb2Constants.KEY, keyMap); setHeaderIfNotPresent(Ddb2Constants.RETURN_VALUES, ReturnValue.ALL_OLD.toString(), message); break; default: throw new UnsupportedOperationException(String.format("Unsupported operation '%s'", operation)); } }
@Test @SuppressWarnings("unchecked") void shouldMapNestedObjects() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.getMessage().setBody(Json.mapper().readTree("{\"user\":" + itemJson + "}")); exchange.setProperty("operation", Ddb2Operations.PutItem.name()); transformer.transform(exchange.getMessage(), DataType.ANY, new DataType(AWS_2_DDB_APPLICATION_JSON_TRANSFORMER)); Assertions.assertTrue(exchange.getMessage().hasHeaders()); Assertions.assertEquals(Ddb2Operations.PutItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION)); Assertions.assertEquals(ReturnValue.ALL_OLD.toString(), exchange.getMessage().getHeader(Ddb2Constants.RETURN_VALUES)); Map<String, AttributeValue> attributeValueMap = exchange.getMessage().getHeader(Ddb2Constants.ITEM, Map.class); Assertions.assertEquals(1L, attributeValueMap.size()); Assertions.assertEquals("AttributeValue(M={name=AttributeValue(S=Rajesh Koothrappali), " + "age=AttributeValue(N=29), " + "super-heroes=AttributeValue(SS=[batman, spiderman, wonderwoman]), " + "issues=AttributeValue(NS=[5, 3, 9, 1]), " + "girlfriend=AttributeValue(NUL=true), " + "doctorate=AttributeValue(BOOL=true)})", attributeValueMap.get("user").toString()); }
@Override public ValidationResult validate(final TaskConfig configuration) { return pluginRequestHelper.submitRequest(pluginId, TaskExtension.VALIDATION_REQUEST, new DefaultPluginInteractionCallback<>() { @Override public String requestBody(String resolvedExtensionVersion) { return handlerMap.get(resolvedExtensionVersion).convertTaskConfigToJson(configuration); } @Override public ValidationResult onSuccess(String responseBody, Map<String, String> responseHeaders, String resolvedExtensionVersion) { return handlerMap.get(resolvedExtensionVersion).toValidationResult(responseBody); } }); }
@Test public void shouldValidateTaskConfig() { String jsonResponse = "{\"errors\":{\"key1\":\"err1\",\"key2\":\"err3\"}}"; String config = "{\"URL\":{\"secure\":false,\"value\":\"http://foo\",\"required\":true}}"; when(goPluginApiResponse.responseBody()).thenReturn(jsonResponse); TaskConfig configuration = new TaskConfig(); final TaskConfigProperty property = new TaskConfigProperty("URL", "http://foo"); property.with(Property.SECURE, false); property.with(Property.REQUIRED, true); configuration.add(property); ValidationResult result = task.validate(configuration); assertThat(result.isSuccessful(), is(false)); assertThat(result.getErrors().get(0).getKey(), is("key1")); assertThat(result.getErrors().get(0).getMessage(), is("err1")); assertThat(result.getErrors().get(1).getKey(), is("key2")); assertThat(result.getErrors().get(1).getMessage(), is("err3")); ArgumentCaptor<GoPluginApiRequest> argument = ArgumentCaptor.forClass(GoPluginApiRequest.class); verify(pluginManager).submitTo(eq(pluginId), eq(PLUGGABLE_TASK_EXTENSION), argument.capture()); assertThat(argument.getValue().requestBody(), is(config)); MatcherAssert.assertThat(argument.getValue().extension(), Matchers.is(PLUGGABLE_TASK_EXTENSION)); MatcherAssert.assertThat(argument.getValue().extensionVersion(), Matchers.is(JsonBasedTaskExtensionHandler_V1.VERSION)); MatcherAssert.assertThat(argument.getValue().requestName(), Matchers.is(TaskExtension.VALIDATION_REQUEST)); }
public String generatePublicKey() { PublicKey publicKey = x509cert.getPublicKey(); if (publicKey instanceof RSAPublicKey) { return generateRSAPublicKey(); } if (publicKey instanceof DSAPublicKey) { return generateDSAPublicKey(); } return ""; }
@Test public void decodeRSAPubKey() { assertThat(certificateManagerRSA.generatePublicKey()) .contains("RSA") .contains("65537") .contains("1681953129031804462554643735709908030601939275292568895111488068832920121318010916" + "889038430576806710152191447376363866950356097752126932858298006033288814768019331823126004318941179" + "4465899645633586173494259691101582064441956032924396850221679489313043628562082670183392670094163371" + "8586841184804093747497905514737738452134274762361473284344272721776230189352829291523087538543142199" + "8761760403746876947208990209024335828599173964217021197086277312193991177728010193707324300633538463" + "6193260583579409760790138329893534549366882523130765297472656435892831796545149793228897111760122091" + "442123535919361963075454640516520743"); }
public void addProperty(String key, String value) { store.put(key, value); }
@Test void testGetInt() { memConfig.addProperty("a", "1"); Assertions.assertEquals(1, memConfig.getInt("a")); Assertions.assertEquals(Integer.valueOf(1), memConfig.getInteger("a", 2)); Assertions.assertEquals(2, memConfig.getInt("b", 2)); }
public ClientAuth getClientAuth() { String clientAuth = getString(SSL_CLIENT_AUTHENTICATION_CONFIG); if (originals().containsKey(SSL_CLIENT_AUTH_CONFIG)) { if (originals().containsKey(SSL_CLIENT_AUTHENTICATION_CONFIG)) { log.warn( "The {} configuration is deprecated. Since a value has been supplied for the {} " + "configuration, that will be used instead", SSL_CLIENT_AUTH_CONFIG, SSL_CLIENT_AUTHENTICATION_CONFIG ); } else { log.warn( "The configuration {} is deprecated and should be replaced with {}", SSL_CLIENT_AUTH_CONFIG, SSL_CLIENT_AUTHENTICATION_CONFIG ); clientAuth = getBoolean(SSL_CLIENT_AUTH_CONFIG) ? SSL_CLIENT_AUTHENTICATION_REQUIRED : SSL_CLIENT_AUTHENTICATION_NONE; } } return getClientAuth(clientAuth); }
@Test public void shouldResolveClientAuthenticationRequired() { // Given: final KsqlRestConfig config = new KsqlRestConfig(ImmutableMap.<String, Object>builder() .put(KsqlRestConfig.SSL_CLIENT_AUTHENTICATION_CONFIG, KsqlRestConfig.SSL_CLIENT_AUTHENTICATION_REQUIRED) .build() ); // When: final ClientAuth clientAuth = config.getClientAuth(); // Then: assertThat(clientAuth, is(ClientAuth.REQUIRED)); }
public static SlidingWindows ofTimeDifferenceAndGrace(final Duration timeDifference, final Duration afterWindowEnd) throws IllegalArgumentException { final String timeDifferenceMsgPrefix = prepareMillisCheckFailMsgPrefix(timeDifference, "timeDifference"); final long timeDifferenceMs = validateMillisecondDuration(timeDifference, timeDifferenceMsgPrefix); final String afterWindowEndMsgPrefix = prepareMillisCheckFailMsgPrefix(afterWindowEnd, "afterWindowEnd"); final long afterWindowEndMs = validateMillisecondDuration(afterWindowEnd, afterWindowEndMsgPrefix); return new SlidingWindows(timeDifferenceMs, afterWindowEndMs); }
@Test public void timeDifferenceMustNotBeNegative() { assertThrows(IllegalArgumentException.class, () -> SlidingWindows.ofTimeDifferenceAndGrace(ofMillis(-1), ofMillis(5))); }
public static String getLocalHost() { return LOCALHOST; }
@Test public void getLocalHost() { Assert.assertNotNull(SystemInfo.getLocalHost()); }
public String abbreviate(String fqClassName) { if (fqClassName == null) { throw new IllegalArgumentException("Class name may not be null"); } int inLen = fqClassName.length(); if (inLen < targetLength) { return fqClassName; } StringBuilder buf = new StringBuilder(inLen); int rightMostDotIndex = fqClassName.lastIndexOf(DOT); if (rightMostDotIndex == -1) return fqClassName; // length of last segment including the dot int lastSegmentLength = inLen - rightMostDotIndex; int leftSegments_TargetLen = targetLength - lastSegmentLength; if (leftSegments_TargetLen < 0) leftSegments_TargetLen = 0; int leftSegmentsLen = inLen - lastSegmentLength; // maxPossibleTrim denotes the maximum number of characters we aim to trim // the actual number of character trimmed may be higher since segments, when // reduced, are reduced to just one character int maxPossibleTrim = leftSegmentsLen - leftSegments_TargetLen; int trimmed = 0; boolean inDotState = true; int i = 0; for (; i < rightMostDotIndex; i++) { char c = fqClassName.charAt(i); if (c == DOT) { // if trimmed too many characters, let us stop if (trimmed >= maxPossibleTrim) break; buf.append(c); inDotState = true; } else { if (inDotState) { buf.append(c); inDotState = false; } else { trimmed++; } } } // append from the position of i which may include the last seen DOT buf.append(fqClassName.substring(i)); return buf.toString(); }
@Test public void testXDot() { { TargetLengthBasedClassNameAbbreviator abbreviator = new TargetLengthBasedClassNameAbbreviator(21); String name = "com.logback.wombat.alligator.Foobar"; assertEquals("c.l.w.a.Foobar", abbreviator.abbreviate(name)); } { TargetLengthBasedClassNameAbbreviator abbreviator = new TargetLengthBasedClassNameAbbreviator(22); String name = "com.logback.wombat.alligator.Foobar"; assertEquals("c.l.w.alligator.Foobar", abbreviator.abbreviate(name)); } { TargetLengthBasedClassNameAbbreviator abbreviator = new TargetLengthBasedClassNameAbbreviator(1); String name = "com.logback.wombat.alligator.tomato.Foobar"; assertEquals("c.l.w.a.t.Foobar", abbreviator.abbreviate(name)); } { TargetLengthBasedClassNameAbbreviator abbreviator = new TargetLengthBasedClassNameAbbreviator(21); String name = "com.logback.wombat.alligator.tomato.Foobar"; assertEquals("c.l.w.a.tomato.Foobar", abbreviator.abbreviate(name)); } { TargetLengthBasedClassNameAbbreviator abbreviator = new TargetLengthBasedClassNameAbbreviator(29); String name = "com.logback.wombat.alligator.tomato.Foobar"; assertEquals("c.l.w.alligator.tomato.Foobar", abbreviator.abbreviate(name)); } }
public static boolean fullyDeleteContents(final File dir) { return fullyDeleteContents(dir, false); }
@Test (timeout = 30000) public void testFailFullyDeleteContents() throws IOException { // Windows Dir.setWritable(false) does not work for directories assumeNotWindows(); LOG.info("Running test to verify failure of fullyDeleteContents()"); setupDirsAndNonWritablePermissions(); boolean ret = FileUtil.fullyDeleteContents(new MyFile(del)); validateAndSetWritablePermissions(true, ret); }
@Override public T addShort(K name, short value) { throw new UnsupportedOperationException("read only"); }
@Test public void testAddShort() { assertThrows(UnsupportedOperationException.class, new Executable() { @Override public void execute() { HEADERS.addShort("name", (short) 0); } }); }
LatencyTrackingReducingState( String stateName, InternalReducingState<K, N, T> original, LatencyTrackingStateConfig latencyTrackingStateConfig) { super( original, new ReducingStateLatencyMetrics( stateName, latencyTrackingStateConfig.getMetricGroup(), latencyTrackingStateConfig.getSampleInterval(), latencyTrackingStateConfig.getHistorySize(), latencyTrackingStateConfig.isStateNameAsVariable())); }
@Test @SuppressWarnings({"unchecked", "rawtypes"}) void testLatencyTrackingReducingState() throws Exception { AbstractKeyedStateBackend<Integer> keyedBackend = createKeyedBackend(getKeySerializer()); try { LatencyTrackingReducingState<Integer, VoidNamespace, Long> latencyTrackingState = (LatencyTrackingReducingState) createLatencyTrackingState(keyedBackend, getStateDescriptor()); latencyTrackingState.setCurrentNamespace(VoidNamespace.INSTANCE); LatencyTrackingReducingState.ReducingStateLatencyMetrics latencyTrackingStateMetric = latencyTrackingState.getLatencyTrackingStateMetric(); assertThat(latencyTrackingStateMetric.getAddCount()).isZero(); assertThat(latencyTrackingStateMetric.getGetCount()).isZero(); assertThat(latencyTrackingStateMetric.getMergeNamespaceCount()).isZero(); setCurrentKey(keyedBackend); ThreadLocalRandom random = ThreadLocalRandom.current(); for (int index = 1; index <= SAMPLE_INTERVAL; index++) { int expectedResult = index == SAMPLE_INTERVAL ? 0 : index; latencyTrackingState.add(random.nextLong()); assertThat(latencyTrackingStateMetric.getAddCount()).isEqualTo(expectedResult); latencyTrackingState.get(); assertThat(latencyTrackingStateMetric.getGetCount()).isEqualTo(expectedResult); latencyTrackingState.mergeNamespaces( VoidNamespace.INSTANCE, Collections.emptyList()); assertThat(latencyTrackingStateMetric.getMergeNamespaceCount()) .isEqualTo(expectedResult); } } finally { if (keyedBackend != null) { keyedBackend.close(); keyedBackend.dispose(); } } }
public static FactoryBuilder newFactoryBuilder(Propagation.Factory delegate) { return new FactoryBuilder(delegate); }
@Test void inject_no_key_names() { BaggageField userId = BaggageField.create("userId"); BaggageField sessionId = BaggageField.create("sessionId"); factory = newFactoryBuilder(B3SinglePropagation.FACTORY) .add(SingleBaggageField.local(userId)) .add(SingleBaggageField.remote(sessionId)) .build(); initialize(); userId.updateValue(context, "bob"); sessionId.updateValue(context, "12345"); injector.inject(context, request); assertThat(request) .doesNotContainKey("userid") .containsEntry("sessionid", "12345"); }
public static GeneratorResult run(String resolverPath, String defaultPackage, final boolean generateImported, final boolean generateDataTemplates, RestliVersion version, RestliVersion deprecatedByVersion, String targetDirectoryPath, String[] sources) throws IOException { return run(resolverPath, defaultPackage, null, generateImported, generateDataTemplates, version, deprecatedByVersion, targetDirectoryPath, sources); }
@Test(dataProvider = "arrayDuplicateDataProvider") public void testGeneration(RestliVersion version, String ABuildersName, String BBuildersName) throws Exception { final String pegasusDir = moduleDir + FS + RESOURCES_DIR + FS + "pegasus"; final String outPath = outdir.getPath(); RestRequestBuilderGenerator.run(pegasusDir, null, moduleDir, true, false, version, null, outPath, new String[] { moduleDir + FS + RESOURCES_DIR + FS + "idls" + FS + "arrayDuplicateA.restspec.json" }); RestRequestBuilderGenerator.run(pegasusDir, null, moduleDir, true, false, version, null, outPath, new String[] { moduleDir + FS + RESOURCES_DIR + FS + "idls" + FS + "arrayDuplicateB.restspec.json" }); final File aBuilderFile = new File(outPath + FS + ABuildersName); final File bBuilderFile = new File(outPath + FS + BBuildersName); Assert.assertTrue(aBuilderFile.exists()); Assert.assertTrue(bBuilderFile.exists()); final String aBuilderFileContent = IOUtils.toString(new FileInputStream(aBuilderFile)); Assert.assertTrue(aBuilderFileContent.contains("Generated from " + RESOURCES_DIR + FS + "idls" + FS + "arrayDuplicateA.restspec.json")); final String bBuilderFileContent = IOUtils.toString(new FileInputStream(bBuilderFile)); Assert.assertTrue(bBuilderFileContent.contains("Generated from " + RESOURCES_DIR + FS + "idls" + FS + "arrayDuplicateB.restspec.json")); }
public static Option<Set<String>> getBaseAndLogFilePathsFromTimeline( HoodieTimeline timeline, HoodieInstant instant) throws IOException { if (!instant.isCompleted()) { throw new HoodieException("Cannot get base and log file paths from " + "instant not completed: " + instant.getTimestamp()); } switch (instant.getAction()) { case COMMIT_ACTION: case DELTA_COMMIT_ACTION: final HoodieCommitMetadata commitMetadata = HoodieCommitMetadata.fromBytes( timeline.getInstantDetails(instant).get(), HoodieCommitMetadata.class); return Option.of(commitMetadata.getPartitionToWriteStats().values().stream().flatMap(List::stream) .map(HoodieWriteStat::getPath).collect(Collectors.toSet())); case REPLACE_COMMIT_ACTION: case CLUSTERING_ACTION: final HoodieReplaceCommitMetadata replaceCommitMetadata = HoodieReplaceCommitMetadata.fromBytes( timeline.getInstantDetails(instant).get(), HoodieReplaceCommitMetadata.class); return Option.of(replaceCommitMetadata.getPartitionToWriteStats().values().stream().flatMap(List::stream) .map(HoodieWriteStat::getPath).collect(Collectors.toSet())); default: return Option.empty(); } }
@Test public void testGetBaseAndLogFilePathsFromTimeline() throws IOException { setupTimelineInFS(); HoodieTimeline timeline = metaClient.getActiveTimeline(); HoodieInstant commitInstant = new HoodieInstant( HoodieInstant.State.COMPLETED, HoodieTimeline.COMMIT_ACTION, "001"); HoodieInstant inflightInstant = new HoodieInstant( HoodieInstant.State.INFLIGHT, HoodieTimeline.COMMIT_ACTION, "005"); HoodieInstant compactionInstant = new HoodieInstant( HoodieInstant.State.COMPLETED, HoodieTimeline.COMPACTION_ACTION, "006"); Map<String, List<Pair<String, String>>> partitionToFileIdAndNameMap = instantInfoMap.get(commitInstant.getTimestamp()); Set<String> expectedPaths = partitionToFileIdAndNameMap.entrySet().stream() .flatMap(entry -> entry.getValue().stream() .map(fileInfo -> new Path(entry.getKey(), fileInfo.getValue()).toString()) .collect(Collectors.toList()) .stream() ).collect(Collectors.toSet()); assertEquals(Option.of(expectedPaths), RepairUtils.getBaseAndLogFilePathsFromTimeline(timeline, commitInstant)); assertThrows(HoodieException.class, () -> RepairUtils.getBaseAndLogFilePathsFromTimeline(timeline, inflightInstant)); assertEquals(Option.empty(), RepairUtils.getBaseAndLogFilePathsFromTimeline(timeline, compactionInstant)); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { return this.list(directory, listener, new HostPreferences(session.getHost()).getInteger("s3.listing.chunksize")); }
@Test public void testListPlaceholderDot() throws Exception { final Path container = new SpectraDirectoryFeature(session, new SpectraWriteFeature(session)).mkdir( new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus()); final Path placeholder = new SpectraDirectoryFeature(session, new SpectraWriteFeature(session)).mkdir( new Path(container, ".", EnumSet.of(Path.Type.directory)), new TransferStatus()); assertTrue(new SpectraObjectListService(session).list(container, new DisabledListProgressListener()).contains(placeholder)); new SpectraDeleteFeature(session).delete(Collections.singletonList(container), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override @Nonnull public <T> List<Future<T>> invokeAll(@Nonnull Collection<? extends Callable<T>> tasks) { throwRejectedExecutionExceptionIfShutdown(); ArrayList<Future<T>> result = new ArrayList<>(); for (Callable<T> task : tasks) { try { result.add(new CompletedFuture<>(task.call(), null)); } catch (Exception e) { result.add(new CompletedFuture<>(null, e)); } } return result; }
@Test void testInvokeAllWithNoopShutdown() { final CompletableFuture<Thread> future = new CompletableFuture<>(); testWithNoopShutdown( testInstance -> testInstance.invokeAll(callableCollectionFromFuture(future))); assertThat(future).isCompletedWithValue(Thread.currentThread()); }
@Override public void warn(String msg) { logger.warn(msg); }
@Test public void testWarnWithException() { Logger mockLogger = mock(Logger.class); when(mockLogger.getName()).thenReturn("foo"); InternalLogger logger = new Slf4JLogger(mockLogger); logger.warn("a", e); verify(mockLogger).getName(); verify(mockLogger).warn("a", e); }
@Override protected int command() { if (!validateConfigFilePresent()) { return 1; } final MigrationConfig config; try { config = MigrationConfig.load(getConfigFile()); } catch (KsqlException | MigrationException e) { LOGGER.error(e.getMessage()); return 1; } return command( config, MigrationsUtil::getKsqlClient, getMigrationsDir(getConfigFile(), config), Clock.systemDefaultZone() ); }
@Test public void shouldFailOnInvalidArgumentVariable() throws Exception { // Given: command = PARSER.parse("-a", "-d", "woooo"); createMigrationFile(1, NAME, migrationsDir, "INSERT INTO FOO VALUES ('${name}');"); when(versionQueryResult.get()).thenReturn(ImmutableList.of()); // When: final int result = command.command(config, (cfg, headers) -> ksqlClient, migrationsDir, Clock.fixed( Instant.ofEpochMilli(1000), ZoneId.systemDefault())); // Then: assertThat(result, is(1)); }
public <E extends SamplingEntry> Iterable<E> getRandomSamples(int sampleCount) { if (sampleCount < 0) { throw new IllegalArgumentException("Sample count cannot be a negative value."); } if (sampleCount == 0 || size() == 0) { return Collections.emptyList(); } return new LazySamplingEntryIterableIterator<>(sampleCount); }
@Test(expected = IllegalArgumentException.class) public void test_getRandomSamples_whenSampleCountIsNegative() { map = new SampleableConcurrentHashMap<>(10); map.getRandomSamples(-1); }
public boolean isDisableRecursionDesired() { return disableRecursionDesired; }
@Test void disableRecursionDesired() { assertThat(builder.build().isDisableRecursionDesired()).isFalse(); builder.disableRecursionDesired(true); assertThat(builder.build().isDisableRecursionDesired()).isTrue(); }
public BeamFnApi.InstructionResponse.Builder processBundle(BeamFnApi.InstructionRequest request) throws Exception { BeamFnApi.ProcessBundleResponse.Builder response = BeamFnApi.ProcessBundleResponse.newBuilder(); BundleProcessor bundleProcessor = bundleProcessorCache.get( request, () -> { try { return createBundleProcessor( request.getProcessBundle().getProcessBundleDescriptorId(), request.getProcessBundle()); } catch (IOException e) { throw new RuntimeException(e); } }); try { PTransformFunctionRegistry startFunctionRegistry = bundleProcessor.getStartFunctionRegistry(); PTransformFunctionRegistry finishFunctionRegistry = bundleProcessor.getFinishFunctionRegistry(); ExecutionStateTracker stateTracker = bundleProcessor.getStateTracker(); try (HandleStateCallsForBundle beamFnStateClient = bundleProcessor.getBeamFnStateClient()) { stateTracker.start(request.getInstructionId()); try { // Already in reverse topological order so we don't need to do anything. for (ThrowingRunnable startFunction : startFunctionRegistry.getFunctions()) { LOG.debug("Starting function {}", startFunction); startFunction.run(); } if (request.getProcessBundle().hasElements()) { boolean inputFinished = bundleProcessor .getInboundObserver() .multiplexElements(request.getProcessBundle().getElements()); if (!inputFinished) { throw new RuntimeException( "Elements embedded in ProcessBundleRequest do not contain stream terminators for " + "all data and timer inputs. Unterminated endpoints: " + bundleProcessor.getInboundObserver().getUnfinishedEndpoints()); } } else if (!bundleProcessor.getInboundEndpointApiServiceDescriptors().isEmpty()) { BeamFnDataInboundObserver observer = bundleProcessor.getInboundObserver(); beamFnDataClient.registerReceiver( request.getInstructionId(), bundleProcessor.getInboundEndpointApiServiceDescriptors(), observer); observer.awaitCompletion(); beamFnDataClient.unregisterReceiver( request.getInstructionId(), bundleProcessor.getInboundEndpointApiServiceDescriptors()); } // Need to reverse this since we want to call finish in topological order. for (ThrowingRunnable finishFunction : Lists.reverse(finishFunctionRegistry.getFunctions())) { LOG.debug("Finishing function {}", finishFunction); finishFunction.run(); } // If bundleProcessor has not flushed any elements, embed them in response. embedOutboundElementsIfApplicable(response, bundleProcessor); // Add all checkpointed residuals to the response. response.addAllResidualRoots(bundleProcessor.getSplitListener().getResidualRoots()); // Add all metrics to the response. bundleProcessor.getProgressRequestLock().lock(); Map<String, ByteString> monitoringData = finalMonitoringData(bundleProcessor); if (runnerAcceptsShortIds) { response.putAllMonitoringData(monitoringData); } else { for (Map.Entry<String, ByteString> metric : monitoringData.entrySet()) { response.addMonitoringInfos( shortIds.get(metric.getKey()).toBuilder().setPayload(metric.getValue())); } } if (!bundleProcessor.getBundleFinalizationCallbackRegistrations().isEmpty()) { finalizeBundleHandler.registerCallbacks( bundleProcessor.getInstructionId(), ImmutableList.copyOf(bundleProcessor.getBundleFinalizationCallbackRegistrations())); response.setRequiresFinalization(true); } } finally { // We specifically deactivate state tracking while we are holding the progress request and // sampling locks. stateTracker.reset(); } } // Mark the bundle processor as re-usable. bundleProcessorCache.release( request.getProcessBundle().getProcessBundleDescriptorId(), bundleProcessor); return BeamFnApi.InstructionResponse.newBuilder().setProcessBundle(response); } catch (Exception e) { // Make sure we clean-up from the active set of bundle processors. bundleProcessorCache.discard(bundleProcessor); throw e; } }
@Test public void testBundleFinalizationIsPropagated() throws Exception { BeamFnApi.ProcessBundleDescriptor processBundleDescriptor = BeamFnApi.ProcessBundleDescriptor.newBuilder() .putTransforms( "2L", RunnerApi.PTransform.newBuilder() .setSpec(RunnerApi.FunctionSpec.newBuilder().setUrn(DATA_INPUT_URN).build()) .build()) .build(); Map<String, BeamFnApi.ProcessBundleDescriptor> fnApiRegistry = ImmutableMap.of("1L", processBundleDescriptor); FinalizeBundleHandler mockFinalizeBundleHandler = mock(FinalizeBundleHandler.class); BundleFinalizer.Callback mockCallback = mock(BundleFinalizer.Callback.class); ProcessBundleHandler handler = new ProcessBundleHandler( PipelineOptionsFactory.create(), Collections.emptySet(), fnApiRegistry::get, beamFnDataClient, null /* beamFnStateGrpcClientCache */, mockFinalizeBundleHandler, new ShortIdMap(), executionStateSampler, ImmutableMap.of( DATA_INPUT_URN, (PTransformRunnerFactory<Object>) (context) -> { BundleFinalizer bundleFinalizer = context.getBundleFinalizer(); context.addStartBundleFunction( () -> bundleFinalizer.afterBundleCommit( Instant.ofEpochMilli(42L), mockCallback)); return null; }), Caches.noop(), new BundleProcessorCache(), null /* dataSampler */); BeamFnApi.InstructionResponse.Builder response = handler.processBundle( BeamFnApi.InstructionRequest.newBuilder() .setInstructionId("2L") .setProcessBundle( BeamFnApi.ProcessBundleRequest.newBuilder().setProcessBundleDescriptorId("1L")) .build()); assertTrue(response.getProcessBundle().getRequiresFinalization()); verify(mockFinalizeBundleHandler) .registerCallbacks( eq("2L"), argThat( (Collection<CallbackRegistration> arg) -> { CallbackRegistration registration = Iterables.getOnlyElement(arg); assertEquals(Instant.ofEpochMilli(42L), registration.getExpiryTime()); assertSame(mockCallback, registration.getCallback()); return true; })); }
public static ListenableFuture<EntityId> findEntityAsync( TbContext ctx, EntityId originator, RelationsQuery relationsQuery ) { var relationService = ctx.getRelationService(); var query = buildQuery(originator, relationsQuery); var relationListFuture = relationService.findByQuery(ctx.getTenantId(), query); if (relationsQuery.getDirection() == EntitySearchDirection.FROM) { return Futures.transformAsync(relationListFuture, relationList -> CollectionUtils.isNotEmpty(relationList) ? Futures.immediateFuture(relationList.get(0).getTo()) : Futures.immediateFuture(null), ctx.getDbCallbackExecutor()); } else if (relationsQuery.getDirection() == EntitySearchDirection.TO) { return Futures.transformAsync(relationListFuture, relationList -> CollectionUtils.isNotEmpty(relationList) ? Futures.immediateFuture(relationList.get(0).getFrom()) : Futures.immediateFuture(null), ctx.getDbCallbackExecutor()); } return Futures.immediateFailedFuture(new IllegalStateException("Unknown direction")); }
@Test public void givenRelationQuery_whenFindEntityAsync_thenOK() { // GIVEN List<EntityRelation> entityRelations = new ArrayList<>(); entityRelations.add(createEntityRelation(TENANT_ID, ASSET_ORIGINATOR_ID)); when(relationServiceMock.findByQuery(ArgumentMatchers.any(), ArgumentMatchers.any())).thenReturn(Futures.immediateFuture(entityRelations)); // WHEN ListenableFuture<EntityId> entityIdFuture = EntitiesRelatedEntityIdAsyncLoader.findEntityAsync(ctxMock, TENANT_ID, relationsQuery); // THEN verifyEntityIdFuture(entityIdFuture, ASSET_ORIGINATOR_ID); }
public void notifyLifecycleListeners( SpoonLifecycleListener.SpoonLifeCycleEvent evt ) { for ( SpoonPluginInterface p : plugins.values() ) { SpoonLifecycleListener listener = p.getLifecycleListener(); if ( listener != null ) { listener.onEvent( evt ); } } }
@Test public void testNotifyLifecycleListeners() throws Exception { spoonPluginManager.pluginAdded( plugin1 ); spoonPluginManager.pluginAdded( plugin2 ); spoonPluginManager.notifyLifecycleListeners( SpoonLifecycleListener.SpoonLifeCycleEvent.STARTUP ); assertEquals( 2, notifications.get() ); }
public static double getContainerProcessCpuLoad(double cpuUtil) throws IOException { int logicalProcessorsOfNode = getAvailableProcessors(); double cpuQuota = getCpuQuota(); if (cpuQuota == NO_CPU_QUOTA) { return cpuUtil; } // Get the number of CPUs of a node that can be used by the operating environment double cpuLimit = cpuQuota / getCpuPeriod(); // Get the minimal number of CPUs needed to achieve the reported CPU utilization double cpus = cpuUtil * logicalProcessorsOfNode; /* Calculate the CPU utilization of a JVM process with respect to the operating environment. * Since the operating environment will only use the CPU resources allocated by CGroups, * it will always be that: cpuLimit >= cpus and the result is in the [0.0,1.0] interval. */ return cpus / cpuLimit; }
@Test public void testGetContainerProcessCpuLoad() throws Exception { /* * expectedContainerProcessCpuLoad = (cpuUtil * processors) / (cpuQuota / cpuPeriod) */ mockGetContainerProcessCpuLoad(1, 100000.0, 1.0, 1.0); mockGetContainerProcessCpuLoad(1, 100000.0, 0.5, 0.5); mockGetContainerProcessCpuLoad(1, 50000.0, 0.5, 1.0); mockGetContainerProcessCpuLoad(1, 75000.0, 0.5, 0.66); mockGetContainerProcessCpuLoad(2, 100000.0, 0.5, 1.0); mockGetContainerProcessCpuLoad(2, 200000.0, 1.0, 1.0); mockGetContainerProcessCpuLoad(2, 25000.0, 0.125, 1.0); mockGetContainerProcessCpuLoad(2, 2500.0, 0.0125, 1.0); mockGetContainerProcessCpuLoad(2, ContainerMetricUtils.NO_CPU_QUOTA, 0.125, 0.125); }
@Override public boolean processArgument(final ShenyuRequest shenyuRequest, final Annotation annotation, final Object arg) { RequestTemplate requestTemplate = shenyuRequest.getRequestTemplate(); RequestParam requestParam = ANNOTATION.cast(annotation); String name = requestParam.value(); checkState(emptyToNull(name) != null || arg instanceof Map, "RequestParam.value() was empty on parameter %s#%s", requestTemplate.getMethod().getDeclaringClass().getSimpleName(), requestTemplate.getMethod().getName()); StringBuilder pathResult = new StringBuilder(requestTemplate.getPath()); Map<Object, Object> params = Maps.newHashMap(); if (!(arg instanceof Map) && !(arg instanceof MultipartFile)) { params.put(name, arg); } else if (arg instanceof Map) { params = (Map<Object, Object>) arg; } params.forEach((key, value) -> { if (pathResult.indexOf("?") > 0) { pathResult.append("&"); } else { pathResult.append("?"); } pathResult.append(key).append("=").append(value); }); shenyuRequest.setUrl(requestTemplate.getUrl() + pathResult); return true; }
@Test public void processArgumentMapTest() { RequestTemplate template = new RequestTemplate(Void.class, method2, "method1", "/dev/url/param", "", "/path", ShenyuRequest.HttpMethod.GET, null, null, null); this.request = ShenyuRequest.create(ShenyuRequest.HttpMethod.POST, "", Maps.newHashMap(), "", "test", template); final RequestParam param = spy(RequestParam.class); Map<String, Object> params = Maps.newHashMap(); params.put("id", Integer.SIZE); params.put("name", "nameValue"); params.put("price", BigDecimal.ONE); processor.processArgument(request, param, params); assertTrue(request.getUrl().contains("id=32"), "param resolve failed."); assertTrue(request.getUrl().contains("name=nameValue"), "param resolve failed."); assertTrue(request.getUrl().contains("price=1"), "param resolve failed."); }
public String getPlugin() { return plugin; }
@Test public void theDefaultPluginIsOpenfire() { final SystemProperty<Long> longProperty = SystemProperty.Builder.ofType(Long.class) .setKey("an-openfire-property") .setDefaultValue(42L) .setDynamic(false) .build(); assertThat(longProperty.getPlugin(), is("Openfire")); }
public CredentialRetriever dockerConfig() { return dockerConfig( DockerConfigCredentialRetriever.create( imageReference.getRegistry(), Paths.get(System.getProperty("user.home"), ".docker", "config.json"))); }
@Test public void testDockerConfig() throws IOException, CredentialRetrievalException { CredentialRetrieverFactory credentialRetrieverFactory = createCredentialRetrieverFactory("registry", "repo"); Path dockerConfig = Paths.get("/foo/config.json"); DockerConfigCredentialRetriever dockerConfigCredentialRetriever = Mockito.mock(DockerConfigCredentialRetriever.class); Mockito.when(dockerConfigCredentialRetriever.retrieve(mockLogger)) .thenReturn(Optional.of(FAKE_CREDENTIALS)); Mockito.when(dockerConfigCredentialRetriever.getDockerConfigFile()).thenReturn(dockerConfig); Assert.assertEquals( Optional.of(FAKE_CREDENTIALS), credentialRetrieverFactory.dockerConfig(dockerConfigCredentialRetriever).retrieve()); Mockito.verify(mockLogger) .accept( LogEvent.lifecycle( "Using credentials from Docker config (" + dockerConfig + ") for registry/repo")); }
@Override public boolean equals(@Nullable Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } PluginClassLoaderDef that = (PluginClassLoaderDef) o; return basePluginKey.equals(that.basePluginKey); }
@Test public void test_equals_and_hashCode() { PluginClassLoaderDef one = new PluginClassLoaderDef("one"); PluginClassLoaderDef oneBis = new PluginClassLoaderDef("one"); PluginClassLoaderDef two = new PluginClassLoaderDef("two"); assertThat(one.equals(one)).isTrue(); assertThat(one.equals(oneBis)).isTrue(); assertThat(one) .hasSameHashCodeAs(one) .hasSameHashCodeAs(oneBis); assertThat(one.equals(two)).isFalse(); assertThat(one.equals("one")).isFalse(); assertThat(one.equals(null)).isFalse(); }
@SuppressFBWarnings("RCN_REDUNDANT_NULLCHECK_WOULD_HAVE_BEEN_A_NPE") public List<Shard> shards(String... indices) { Map<String, String> idToAddress = nodes().stream().collect(toMap(Node::getId, Node::getHttpAddress)); try { Request r = new Request("GET", "/_cat/shards/" + String.join(",", indices)); r.addParameter("format", "json"); r.addParameter("h", "id,index,shard,prirep,docs,state,ip,node"); Response res = withRetry(() -> client.performRequest(r), retries); try (InputStreamReader reader = new InputStreamReader(res.getEntity().getContent(), UTF_8)) { JsonArray array = Json.parse(reader).asArray(); List<Shard> shards = new ArrayList<>(array.size()); for (JsonValue value : array) { Optional<Shard> shard = convertToShard(value, idToAddress); shard.ifPresent(shards::add); } LOG.log(FINE, "Shards " + shards); return shards; } } catch (IOException e) { throw new JetException("Could not get ES shards", e); } }
@Test public void shouldFailAfterFiveAttemptsShards() throws IOException { ElasticCatClient catClient = new ElasticCatClient(restClient, 2); when(restClient.performRequest(any())) .thenThrow(new IOException("Could not connect")); assertThatThrownBy(() -> catClient.shards("my-index")) .isInstanceOf(JetException.class) .hasRootCauseInstanceOf(IOException.class) .hasRootCauseMessage("Could not connect"); }
@Nullable static ProxyProvider createFrom(Properties properties) { Objects.requireNonNull(properties, "properties"); if (properties.containsKey(HTTP_PROXY_HOST) || properties.containsKey(HTTPS_PROXY_HOST)) { return createHttpProxyFrom(properties); } if (properties.containsKey(SOCKS_PROXY_HOST)) { return createSocksProxyFrom(properties); } return null; }
@Test void proxyFromSystemProperties_errorWhenSocksVersionInvalid() { Properties properties = new Properties(); properties.setProperty(ProxyProvider.SOCKS_PROXY_HOST, "host"); properties.setProperty(ProxyProvider.SOCKS_VERSION, "42"); assertThatIllegalArgumentException() .isThrownBy(() -> ProxyProvider.createFrom(properties)) .withMessage("only socks versions 4 and 5 supported but got 42"); }
public boolean eval(ContentFile<?> file) { // TODO: detect the case where a column is missing from the file using file's max field id. return new MetricsEvalVisitor().eval(file); }
@Test public void testCaseSensitiveIntegerNotEqRewritten() { assertThatThrownBy( () -> new InclusiveMetricsEvaluator(SCHEMA, not(equal("ID", 5)), true).eval(FILE)) .isInstanceOf(ValidationException.class) .hasMessageContaining("Cannot find field 'ID'"); }
static KiePMMLSimpleSetPredicate getKiePMMLSimpleSetPredicate(final SimpleSetPredicate simpleSetPredicate) { return KiePMMLSimpleSetPredicate.builder(simpleSetPredicate.getField(), getKiePMMLExtensions(simpleSetPredicate.getExtensions()), ARRAY_TYPE.byName(simpleSetPredicate.getArray().getType().value()), IN_NOTIN.byName(simpleSetPredicate.getBooleanOperator().value())) .withValues(getObjectsFromArray(simpleSetPredicate.getArray())) .build(); }
@Test void getKiePMMLSimpleSetPredicate() { final SimpleSetPredicate toConvert = getRandomSimpleSetPredicate(); final KiePMMLSimpleSetPredicate retrieved = KiePMMLSimpleSetPredicateInstanceFactory.getKiePMMLSimpleSetPredicate(toConvert); commonVerifyKiePMMLSimpleSetPredicate(retrieved, toConvert); }
@Override public void transitionToActive(final StreamTask streamTask, final RecordCollector recordCollector, final ThreadCache newCache) { if (stateManager.taskType() != TaskType.ACTIVE) { throw new IllegalStateException("Tried to transition processor context to active but the state manager's " + "type was " + stateManager.taskType()); } this.streamTask = streamTask; this.collector = recordCollector; this.cache = newCache; addAllFlushListenersToNewCache(); }
@Test public void globalTimestampedKeyValueStoreShouldBeReadOnly() { foreachSetUp(); when(stateManager.taskType()).thenReturn(TaskType.ACTIVE); when(stateManager.getGlobalStore(anyString())).thenReturn(null); final TimestampedKeyValueStore<String, Long> timestampedKeyValueStoreMock = mock(TimestampedKeyValueStore.class); when(stateManager.getGlobalStore("GlobalTimestampedKeyValueStore")).thenAnswer(answer -> timestampedKeyValueStoreMock(timestampedKeyValueStoreMock)); context = buildProcessorContextImpl(streamsConfig, stateManager); final StreamTask task = mock(StreamTask.class); context.transitionToActive(task, null, null); mockProcessorNodeWithLocalKeyValueStore(); doTest("GlobalTimestampedKeyValueStore", (Consumer<TimestampedKeyValueStore<String, Long>>) store -> { verifyStoreCannotBeInitializedOrClosed(store); checkThrowsUnsupportedOperation(store::flush, "flush()"); checkThrowsUnsupportedOperation(() -> store.put("1", ValueAndTimestamp.make(1L, 2L)), "put()"); checkThrowsUnsupportedOperation(() -> store.putIfAbsent("1", ValueAndTimestamp.make(1L, 2L)), "putIfAbsent()"); checkThrowsUnsupportedOperation(() -> store.putAll(Collections.emptyList()), "putAll()"); checkThrowsUnsupportedOperation(() -> store.delete("1"), "delete()"); assertEquals(VALUE_AND_TIMESTAMP, store.get(KEY)); assertEquals(timestampedRangeIter, store.range("one", "two")); assertEquals(timestampedAllIter, store.all()); assertEquals(VALUE, store.approximateNumEntries()); }); }
@VisibleForTesting static Optional<String> findEntryClass(File jarFile) throws IOException { return findFirstManifestAttribute( jarFile, PackagedProgram.MANIFEST_ATTRIBUTE_ASSEMBLER_CLASS, PackagedProgram.MANIFEST_ATTRIBUTE_MAIN_CLASS); }
@Test void testFindEntryClassMainClass() throws IOException { File jarFile = createJarFileWithManifest( ImmutableMap.of( PackagedProgram.MANIFEST_ATTRIBUTE_MAIN_CLASS, "MainClass")); Optional<String> entry = JarManifestParser.findEntryClass(jarFile); assertThat(entry).get().isEqualTo("MainClass"); }
@Override public void cancel() { isRunning = false; // we need to close the socket as well, because the Thread.interrupt() function will // not wake the thread in the socketStream.read() method when blocked. Socket theSocket = this.currentSocket; if (theSocket != null) { IOUtils.closeSocket(theSocket); } }
@Test void testSocketSourceSimpleOutput() throws Exception { ServerSocket server = new ServerSocket(0); Socket channel = null; try { SocketTextStreamFunction source = new SocketTextStreamFunction(LOCALHOST, server.getLocalPort(), "\n", 0); SocketSourceThread runner = new SocketSourceThread(source, "test1", "check"); runner.start(); channel = NetUtils.acceptWithoutTimeout(server); OutputStreamWriter writer = new OutputStreamWriter(channel.getOutputStream()); writer.write("test1\n"); writer.write("check\n"); writer.flush(); runner.waitForNumElements(2); runner.cancel(); runner.interrupt(); runner.waitUntilDone(); channel.close(); } finally { if (channel != null) { IOUtils.closeQuietly(channel); } IOUtils.closeQuietly(server); } }
public static Object typeConvert(String tableName ,String columnName, String value, int sqlType, String mysqlType) { if (value == null || (value.equals("") && !(isText(mysqlType) || sqlType == Types.CHAR || sqlType == Types.VARCHAR || sqlType == Types.LONGVARCHAR))) { return null; } try { Object res; switch (sqlType) { case Types.INTEGER: res = Integer.parseInt(value); break; case Types.SMALLINT: res = Short.parseShort(value); break; case Types.BIT: case Types.TINYINT: res = Byte.parseByte(value); break; case Types.BIGINT: if (mysqlType.startsWith("bigint") && mysqlType.endsWith("unsigned")) { res = new BigInteger(value); } else { res = Long.parseLong(value); } break; // case Types.BIT: case Types.BOOLEAN: res = !"0".equals(value); break; case Types.DOUBLE: case Types.FLOAT: res = Double.parseDouble(value); break; case Types.REAL: res = Float.parseFloat(value); break; case Types.DECIMAL: case Types.NUMERIC: res = new BigDecimal(value); break; case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: case Types.BLOB: res = value.getBytes("ISO-8859-1"); break; case Types.DATE: if (!value.startsWith("0000-00-00")) { java.util.Date date = Util.parseDate(value); if (date != null) { res = new Date(date.getTime()); } else { res = null; } } else { res = null; } break; case Types.TIME: { java.util.Date date = Util.parseDate(value); if (date != null) { res = new Time(date.getTime()); } else { res = null; } break; } case Types.TIMESTAMP: if (!value.startsWith("0000-00-00")) { java.util.Date date = Util.parseDate(value); if (date != null) { res = new Timestamp(date.getTime()); } else { res = null; } } else { res = null; } break; case Types.CLOB: default: res = value; break; } return res; } catch (Exception e) { logger.error("table: {} column: {}, failed convert type {} to {}", tableName, columnName, value, sqlType); return value; } }
@Test public void typeConvertInputNotNullNotNullNotNullPositiveNotNullOutputZero3() { // Arrange final String tableName = "?????????"; final String columnName = "?"; final String value = "0"; final int sqlType = 5; final String mysqlType = "bigintiunsigned"; // Act final Object actual = JdbcTypeUtil.typeConvert(tableName, columnName, value, sqlType, mysqlType); // Assert result Assert.assertEquals((short)0, actual); }
@Override public int hashCode() { int hash = 3; hash = 97 * hash + (this.qualifyingNames != null ? this.qualifyingNames.hashCode() : 0); hash = 97 * hash + (this.resultType != null ? this.resultType.toString().hashCode() : 0); return hash; }
@Test public void testHashCodeWithAllNulls() { SelectionParameters params = new SelectionParameters( null, null, null, null ); assertThat( params.hashCode() ).as( "All nulls hashCode" ).isEqualTo( 3 * 97 * 97 ); }
public static ParamType getSchemaFromType(final Type type) { return getSchemaFromType(type, JAVA_TO_ARG_TYPE); }
@Test public void shouldGetIntegerSchemaForIntPrimitiveClass() { assertThat( UdfUtil.getSchemaFromType(int.class), equalTo(ParamTypes.INTEGER) ); }
@Override public void unsubscribe(URL url, NotifyListener listener) { if (url == null) { throw new IllegalArgumentException("unsubscribe url == null"); } if (listener == null) { throw new IllegalArgumentException("unsubscribe listener == null"); } if (logger.isInfoEnabled()) { logger.info("Unsubscribe: " + url); } Set<NotifyListener> listeners = subscribed.get(url); if (listeners != null) { listeners.remove(listener); } // do not forget remove notified notified.remove(url); }
@Test void testUnsubscribeIfNotifyNull() { Assertions.assertThrows(IllegalArgumentException.class, () -> { final AtomicReference<Boolean> notified = new AtomicReference<Boolean>(false); URL url = new ServiceConfigURL("dubbo", "192.168.0.1", 2200); abstractRegistry.unsubscribe(url, null); Assertions.fail("unsubscribe listener == null"); }); }
public static String extractArgumentsFromAttributeName(String attributeNameWithArguments) { int start = StringUtil.lastIndexOf(attributeNameWithArguments, '['); int end = StringUtil.lastIndexOf(attributeNameWithArguments, ']'); if (start > 0 && end > 0 && end > start) { return attributeNameWithArguments.substring(start + 1, end); } if (start < 0 && end < 0) { return null; } throw new IllegalArgumentException("Wrong argument input passed " + attributeNameWithArguments); }
@Test(expected = IllegalArgumentException.class) public void extractArgument_wrongArguments_noArgument_noOpening() { extractArgumentsFromAttributeName("car.wheel]"); }
@PostMapping("") @RequiresPermissions("system:pluginHandler:add") public ShenyuAdminResult createPluginHandle(@Valid @RequestBody final PluginHandleDTO pluginHandleDTO) { Integer createCount = pluginHandleService.createOrUpdate(pluginHandleDTO); return ShenyuAdminResult.success(ShenyuResultMessage.CREATE_SUCCESS, createCount); }
@Test public void testCreatePluginHandle() throws Exception { PluginHandleDTO pluginHandleDTO = new PluginHandleDTO(); pluginHandleDTO.setPluginId("1213"); pluginHandleDTO.setDataType(1); pluginHandleDTO.setField("f"); pluginHandleDTO.setType(1); pluginHandleDTO.setSort(1); given(this.pluginHandleService.createOrUpdate(pluginHandleDTO)).willReturn(1); this.mockMvc.perform(MockMvcRequestBuilders.post("/plugin-handle/") .contentType(MediaType.APPLICATION_JSON) .content(GsonUtils.getInstance().toJson(pluginHandleDTO))) .andExpect(status().isOk()) .andExpect(jsonPath("$.message", is(ShenyuResultMessage.CREATE_SUCCESS))) .andReturn(); }
@Override public void endInput() throws Exception { userFunction.endInput(nonPartitionedContext); }
@Test void testEndInput() throws Exception { AtomicInteger counter = new AtomicInteger(); ProcessOperator<Integer, String> processOperator = new ProcessOperator<>( new OneInputStreamProcessFunction<Integer, String>() { @Override public void processRecord( Integer record, Collector<String> output, PartitionedContext ctx) { // do nothing. } @Override public void endInput(NonPartitionedContext<String> ctx) { try { ctx.applyToAllPartitions( (out, context) -> { counter.incrementAndGet(); out.collect("end"); }); } catch (Exception e) { throw new RuntimeException(e); } } }); try (OneInputStreamOperatorTestHarness<Integer, String> testHarness = new OneInputStreamOperatorTestHarness<>(processOperator)) { testHarness.open(); testHarness.endInput(); Collection<StreamRecord<String>> recordOutput = testHarness.getRecordOutput(); assertThat(recordOutput).containsExactly(new StreamRecord<>("end")); assertThat(counter).hasValue(1); } }
@Override public String toString() { return "Trade{" + "type=" + type + ", index=" + index + ", price=" + pricePerAsset + ", amount=" + amount + '}'; }
@Test public void overrideToString() { assertEquals(opEquals1.toString(), opEquals2.toString()); assertNotEquals(opEquals1.toString(), opNotEquals1.toString()); assertNotEquals(opEquals1.toString(), opNotEquals2.toString()); }
public WeightedItem<T> addOrVote(T item) { for (int i = 0; i < list.size(); i++) { WeightedItem<T> weightedItem = list.get(i); if (weightedItem.item.equals(item)) { voteFor(weightedItem); return weightedItem; } } return organizeAndAdd(item); }
@Test public void testAddDoesNotDuplicate() { WeightedEvictableList<String> list = new WeightedEvictableList<>(3, 3); list.addOrVote("a"); list.addOrVote("a"); assertItemsInOrder(list, "a"); }
public double getJobManagerCPULimitFactor() { final double limitFactor = flinkConfig.get(KubernetesConfigOptions.JOB_MANAGER_CPU_LIMIT_FACTOR); checkArgument( limitFactor >= 1, "%s should be greater or equal to 1.", KubernetesConfigOptions.JOB_MANAGER_CPU_LIMIT_FACTOR.key()); return limitFactor; }
@Test void testGetJobManagerCPULimitFactor() { flinkConfig.set( KubernetesConfigOptions.JOB_MANAGER_CPU_LIMIT_FACTOR, JOB_MANAGER_CPU_LIMIT_FACTOR); assertThat(kubernetesJobManagerParameters.getJobManagerCPULimitFactor()) .isEqualTo(JOB_MANAGER_CPU_LIMIT_FACTOR, within(0.00001)); }
public static boolean updateHostnamePort(InstanceConfig instanceConfig, String hostname, int port) { boolean updated = false; String existingHostname = instanceConfig.getHostName(); if (!hostname.equals(existingHostname)) { LOGGER.info("Updating instance: {} with hostname: {}", instanceConfig.getId(), hostname); instanceConfig.setHostName(hostname); updated = true; } String portStr = Integer.toString(port); String existingPortStr = instanceConfig.getPort(); if (!portStr.equals(existingPortStr)) { LOGGER.info("Updating instance: {} with port: {}", instanceConfig.getId(), port); instanceConfig.setPort(portStr); updated = true; } return updated; }
@Test public void testUpdateHostName() { String instanceId = "Server_myInstance"; InstanceConfig instanceConfig = new InstanceConfig(instanceId); assertEquals(instanceConfig.getInstanceName(), instanceId); assertNull(instanceConfig.getHostName()); assertNull(instanceConfig.getPort()); assertTrue(HelixHelper.updateHostnamePort(instanceConfig, "myHost", 1234)); assertEquals(instanceConfig.getInstanceName(), instanceId); assertEquals(instanceConfig.getHostName(), "myHost"); assertEquals(instanceConfig.getPort(), "1234"); assertTrue(HelixHelper.updateHostnamePort(instanceConfig, "myHost2", 1234)); assertEquals(instanceConfig.getInstanceName(), instanceId); assertEquals(instanceConfig.getHostName(), "myHost2"); assertEquals(instanceConfig.getPort(), "1234"); assertTrue(HelixHelper.updateHostnamePort(instanceConfig, "myHost2", 2345)); assertEquals(instanceConfig.getInstanceName(), instanceId); assertEquals(instanceConfig.getHostName(), "myHost2"); assertEquals(instanceConfig.getPort(), "2345"); assertFalse(HelixHelper.updateHostnamePort(instanceConfig, "myHost2", 2345)); assertEquals(instanceConfig.getInstanceName(), instanceId); assertEquals(instanceConfig.getHostName(), "myHost2"); assertEquals(instanceConfig.getPort(), "2345"); }
public static byte[] compress(String urlString) throws MalformedURLException { byte[] compressedBytes = null; if (urlString != null) { // Figure the compressed bytes can't be longer than the original string. byte[] byteBuffer = new byte[urlString.length()]; int byteBufferIndex = 0; Arrays.fill(byteBuffer, (byte) 0x00); Pattern urlPattern = Pattern.compile(EDDYSTONE_URL_REGEX); Matcher urlMatcher = urlPattern.matcher(urlString); if (urlMatcher.matches()) { // www. String wwwdot = urlMatcher.group(EDDYSTONE_URL_WWW_GROUP); boolean haswww = (wwwdot != null); // Protocol. String rawProtocol = urlMatcher.group(EDDYSTONE_URL_PROTOCOL_GROUP); String protocol = rawProtocol.toLowerCase(); if (protocol.equalsIgnoreCase(URL_PROTOCOL_HTTP)) { byteBuffer[byteBufferIndex] = (haswww ? EDDYSTONE_URL_PROTOCOL_HTTP_WWW : EDDYSTONE_URL_PROTOCOL_HTTP); } else { byteBuffer[byteBufferIndex] = (haswww ? EDDYSTONE_URL_PROTOCOL_HTTPS_WWW : EDDYSTONE_URL_PROTOCOL_HTTPS); } byteBufferIndex++; // Fully-qualified domain name (FQDN). This includes the hostname and any other components after the dots // but BEFORE the first single slash in the URL. byte[] hostnameBytes = urlMatcher.group(EDDYSTONE_URL_FQDN_GROUP).getBytes(); String rawHostname = new String(hostnameBytes); String hostname = rawHostname.toLowerCase(); String[] domains = hostname.split(Pattern.quote(".")); boolean consumedSlash = false; if (domains != null) { // Write the hostname/subdomains prior to the last one. If there's only one (e. g. http://localhost) // then that's the only thing to write out. byte[] periodBytes = {'.'}; int writableDomainsCount = (domains.length == 1 ? 1 : domains.length - 1); for (int domainIndex = 0; domainIndex < writableDomainsCount; domainIndex++) { // Write out leading period, if necessary. if (domainIndex > 0) { System.arraycopy(periodBytes, 0, byteBuffer, byteBufferIndex, periodBytes.length); byteBufferIndex += periodBytes.length; } byte[] domainBytes = domains[domainIndex].getBytes(); int domainLength = domainBytes.length; System.arraycopy(domainBytes, 0, byteBuffer, byteBufferIndex, domainLength); byteBufferIndex += domainLength; } // Is the TLD one that we can encode? if (domains.length > 1) { String tld = "." + domains[domains.length - 1]; String slash = urlMatcher.group(EDDYSTONE_URL_SLASH_GROUP); String encodableTLDCandidate = (slash == null ? tld : tld + slash); byte encodedTLDByte = encodedByteForTopLevelDomain(encodableTLDCandidate); if (encodedTLDByte != TLD_NOT_ENCODABLE) { byteBuffer[byteBufferIndex++] = encodedTLDByte; consumedSlash = (slash != null); } else { byte[] tldBytes = tld.getBytes(); int tldLength = tldBytes.length; System.arraycopy(tldBytes, 0, byteBuffer, byteBufferIndex, tldLength); byteBufferIndex += tldLength; } } } // Optional slash. if (! consumedSlash) { String slash = urlMatcher.group(EDDYSTONE_URL_SLASH_GROUP); if (slash != null) { int slashLength = slash.length(); System.arraycopy(slash.getBytes(), 0, byteBuffer, byteBufferIndex, slashLength); byteBufferIndex += slashLength; } } // Path. String path = urlMatcher.group(EDDYSTONE_URL_PATH_GROUP); if (path != null) { int pathLength = path.length(); System.arraycopy(path.getBytes(), 0, byteBuffer, byteBufferIndex, pathLength); byteBufferIndex += pathLength; } // Copy the result. compressedBytes = new byte[byteBufferIndex]; System.arraycopy(byteBuffer, 0, compressedBytes, 0, compressedBytes.length); } else { throw new MalformedURLException(); } } else { throw new MalformedURLException(); } return compressedBytes; }
@Test public void testCompressWithDotCaTLD() throws MalformedURLException { String testURL = "http://google.ca"; byte[] expectedBytes = {0x02, 'g', 'o', 'o', 'g', 'l', 'e', '.', 'c', 'a'}; assertTrue(Arrays.equals(expectedBytes, UrlBeaconUrlCompressor.compress(testURL))); }
public static boolean validateCSConfiguration( final Configuration oldConfParam, final Configuration newConf, final RMContext rmContext) throws IOException { // ensure that the oldConf is deep copied Configuration oldConf = new Configuration(oldConfParam); QueueMetrics.setConfigurationValidation(oldConf, true); QueueMetrics.setConfigurationValidation(newConf, true); CapacityScheduler liveScheduler = (CapacityScheduler) rmContext.getScheduler(); CapacityScheduler newCs = new CapacityScheduler(); try { //TODO: extract all the validation steps and replace reinitialize with //the specific validation steps newCs.setConf(oldConf); newCs.setRMContext(rmContext); newCs.init(oldConf); newCs.addNodes(liveScheduler.getAllNodes()); newCs.reinitialize(newConf, rmContext, true); return true; } finally { newCs.stop(); } }
@Test public void testValidateDoesNotModifyTheDefaultMetricsSystem() throws Exception { try { YarnConfiguration conf = new YarnConfiguration(CapacitySchedulerConfigGeneratorForTest .createBasicCSConfiguration()); conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class, ResourceScheduler.class); mockRM = new MockRM(conf); cs = (CapacityScheduler) mockRM.getResourceScheduler(); mockRM.start(); cs.start(); RMContext rmContext = mockRM.getRMContext(); Configuration oldConfig = cs.getConfig(); final Map<String, QueueMetrics> cache = QueueMetrics.getQueueMetrics(); final MetricsSystem ms = DefaultMetricsSystem.instance(); QueueMetrics origQM1 = cache.get("root.test1"); QueueMetrics origQM2 = cache.get("root.test2"); Assert.assertNotNull("Original queues should be found in the cache", origQM1); Assert.assertNotNull("Original queues should be found in the cache", origQM2); QueueMetrics origPQM1 = cache.get("default.root.test1"); QueueMetrics origPQM2 = cache.get("default.root.test2"); Assert.assertNotNull("Original queues should be found in the cache (PartitionQueueMetrics)", origPQM1); Assert.assertNotNull("Original queues should be found in the cache (PartitionQueueMetrics)", origPQM2); MetricsSource origMS1 = ms.getSource("QueueMetrics,q0=root,q1=test1"); MetricsSource origMS2 = ms.getSource("QueueMetrics,q0=root,q1=test2"); Assert.assertNotNull("Original queues should be found in the Metrics System", origMS1); Assert.assertNotNull("Original queues should be found in the Metrics System", origMS2); MetricsSource origPMS1 = ms .getSource("PartitionQueueMetrics,partition=,q0=root,q1=test1"); MetricsSource origPMS2 = ms .getSource("PartitionQueueMetrics,partition=,q0=root,q1=test2"); Assert.assertNotNull( "Original queues should be found in Metrics System (PartitionQueueMetrics)", origPMS1); Assert.assertNotNull( "Original queues should be found in Metrics System (PartitionQueueMetrics)", origPMS2); Configuration newConfig = new Configuration(oldConfig); newConfig .set("yarn.scheduler.capacity.root.queues", "test1, test2, test3"); newConfig .set("yarn.scheduler.capacity.root.test3.state", "RUNNING"); newConfig .set("yarn.scheduler.capacity.root.test3.capacity", "30"); newConfig .set("yarn.scheduler.capacity.root.test1.capacity", "20"); boolean isValidConfig = CapacitySchedulerConfigValidator .validateCSConfiguration(oldConfig, newConfig, rmContext); Assert.assertTrue(isValidConfig); Assert.assertFalse("Validated new queue should not be in the cache", cache.containsKey("root.test3")); Assert.assertFalse("Validated new queue should not be in the cache (PartitionQueueMetrics)", cache.containsKey("default.root.test3")); Assert.assertNull("Validated new queue should not be in the Metrics System", ms.getSource("QueueMetrics,q0=root,q1=test3")); Assert.assertNull( "Validated new queue should not be in Metrics System (PartitionQueueMetrics)", ms .getSource("PartitionQueueMetrics,partition=,q0=root,q1=test3")); // Config validation should not change the existing // objects in the cache and the metrics system Assert.assertEquals(origQM1, cache.get("root.test1")); Assert.assertEquals(origQM2, cache.get("root.test2")); Assert.assertEquals(origPQM1, cache.get("default.root.test1")); Assert.assertEquals(origPQM1, cache.get("default.root.test1")); Assert.assertEquals(origMS1, ms.getSource("QueueMetrics,q0=root,q1=test1")); Assert.assertEquals(origMS2, ms.getSource("QueueMetrics,q0=root,q1=test2")); Assert.assertEquals(origPMS1, ms.getSource("PartitionQueueMetrics,partition=,q0=root,q1=test1")); Assert.assertEquals(origPMS2, ms.getSource("PartitionQueueMetrics,partition=,q0=root,q1=test2")); } finally { mockRM.stop(); } }
public static int compareVersion(final String versionA, final String versionB) { final String[] sA = versionA.split("\\."); final String[] sB = versionB.split("\\."); int expectSize = 3; if (sA.length != expectSize || sB.length != expectSize) { throw new IllegalArgumentException("version must be like x.y.z(-beta)"); } int first = Objects.compare(sA[0], sB[0], STRING_COMPARATOR); if (first != 0) { return first; } int second = Objects.compare(sA[1], sB[1], STRING_COMPARATOR); if (second != 0) { return second; } return Objects.compare(sA[2].split("-")[0], sB[2].split("-")[0], STRING_COMPARATOR); }
@Test void testVersionCompareVersionNotValid1() { assertThrows(IllegalArgumentException.class, () -> { VersionUtils.compareVersion("1.2.1.1", "1.2.1.1"); }); }
@Deprecated public static boolean parseBoolean(final String text, final boolean defaultValue) { // This methods expects |text| is not null. final String textTrimmed = text.trim(); if ("true".equals(textTrimmed) || "True".equals(textTrimmed)) { return true; } if ("false".equals(textTrimmed) || "False".equals(textTrimmed)) { return false; } if (textTrimmed.length() == 0) { return defaultValue; } if ("null".equals(textTrimmed)) { return defaultValue; } throw new IllegalArgumentException("Only \"true\" or \"false\" is recognized."); }
@Test public void testParseBoolean() { assertBoolean("true", true, false); assertBoolean("True", true, false); assertBoolean("false", false, false); assertBoolean("false", false, false); assertBoolean(" true", true, true); assertBoolean(" True", true, true); assertBoolean(" false ", false, true); assertBoolean(" false ", false, true); assertBoolean("", false, false); assertBoolean("", true, true); assertBoolean(" ", false, false); assertBoolean(" ", true, true); assertBoolean("null", false, false); assertBoolean("null", true, true); assertBoolean(" null ", false, false); assertBoolean(" null ", true, true); }
public byte[] allocDecodeBuffer(int size) { byte[] buf = decodingBuffer; if (buf == null || buf.length < size) { buf = new byte[size]; } else { decodingBuffer = null; } return buf; }
@Test public void allocDecodeBuffer() { byte[] b = BufferRecycler.instance().allocDecodeBuffer(10); Assert.assertEquals(10, b.length); BufferRecycler.instance().releaseDecodeBuffer(b); }
public static Set<String> validateScopes(String scopeClaimName, Collection<String> scopes) throws ValidateException { if (scopes == null) throw new ValidateException(String.format("%s value must be non-null", scopeClaimName)); Set<String> copy = new HashSet<>(); for (String scope : scopes) { scope = validateString(scopeClaimName, scope); if (copy.contains(scope)) throw new ValidateException(String.format("%s value must not contain duplicates - %s already present", scopeClaimName, scope)); copy.add(scope); } return Collections.unmodifiableSet(copy); }
@Test public void testValidateScopesResultIsImmutable() { SortedSet<String> callerSet = new TreeSet<>(Arrays.asList("a", "b", "c")); Set<String> scopes = ClaimValidationUtils.validateScopes("scope", callerSet); assertEquals(3, scopes.size()); callerSet.add("d"); assertEquals(4, callerSet.size()); assertTrue(callerSet.contains("d")); assertEquals(3, scopes.size()); assertFalse(scopes.contains("d")); callerSet.remove("c"); assertEquals(3, callerSet.size()); assertFalse(callerSet.contains("c")); assertEquals(3, scopes.size()); assertTrue(scopes.contains("c")); callerSet.clear(); assertEquals(0, callerSet.size()); assertEquals(3, scopes.size()); }
public MetricRegistry getRegistry() { return registry; }
@Test public void testGetRegistry() { assertThat(endpoint.getRegistry(), is(registry)); }
@Override public Set<Class<? extends BaseStepMeta>> getSupportedSteps() { return new HashSet<Class<? extends BaseStepMeta>>() { { add( ExcelInputMeta.class ); } }; }
@Test public void testGetSupportedSteps() { ExcelInputStepAnalyzer analyzer = new ExcelInputStepAnalyzer(); Set<Class<? extends BaseStepMeta>> types = analyzer.getSupportedSteps(); assertNotNull( types ); assertEquals( types.size(), 1 ); assertTrue( types.contains( ExcelInputMeta.class ) ); }
public static UserAgent parse(String userAgentString) { return UserAgentParser.parse(userAgentString); }
@Test public void parseEdgeTest() { final String uaStr = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.69 Safari/537.36 Edg/81.0.416.34"; final UserAgent ua = UserAgentUtil.parse(uaStr); assertEquals("MSEdge", ua.getBrowser().toString()); assertEquals("81.0.416.34", ua.getVersion()); assertEquals("Webkit", ua.getEngine().toString()); assertEquals("537.36", ua.getEngineVersion()); assertEquals("Windows 10 or Windows Server 2016", ua.getOs().toString()); assertEquals("10.0", ua.getOsVersion()); assertEquals("Windows", ua.getPlatform().toString()); assertFalse(ua.isMobile()); }
public QueryCacheConfig setPredicateConfig(PredicateConfig predicateConfig) { this.predicateConfig = checkNotNull(predicateConfig, "predicateConfig can not be null"); return this; }
@Test(expected = NullPointerException.class) public void testSetPredicate_throwsException_whenPredicateNull() { QueryCacheConfig config = new QueryCacheConfig(); config.setPredicateConfig(null); }
public static Autoscaling empty() { return empty(""); }
@Test public void test_autoscaling_group_size_1() { var min = new ClusterResources( 2, 2, new NodeResources(1, 1, 1, 1)); var now = new ClusterResources(5, 5, new NodeResources(3, 100, 100, 1)); var max = new ClusterResources(20, 20, new NodeResources(10, 1000, 1000, 1)); var fixture = DynamicProvisioningTester.fixture() .awsProdSetup(true) .initialResources(Optional.of(now)) .capacity(Capacity.from(min, max, IntRange.of(1), false, true, Optional.empty(), ClusterInfo.empty())) .build(); fixture.tester().clock().advance(Duration.ofDays(2)); fixture.setScalingDuration(Duration.ofHours(6)); fixture.loader().applyCpuLoad(0.9, 120); fixture.tester().assertResources("Scaling up to 2 nodes, scaling memory and disk down at the same time", 8, 8, 8.6, 78.6, 235.8, fixture.autoscale()); }
public final T apply(Schema left, Schema right) { return visit(this, Context.EMPTY, FieldType.row(left), FieldType.row(right)); }
@Test public void testCountCommonFields() { assertEquals(6, new CountCommonFields().apply(LEFT, RIGHT).intValue()); }
public static String resolveMainClass( @Nullable String configuredMainClass, ProjectProperties projectProperties) throws MainClassInferenceException, IOException { if (configuredMainClass != null) { if (isValidJavaClass(configuredMainClass)) { return configuredMainClass; } throw new MainClassInferenceException( HelpfulSuggestions.forMainClassNotFound( "'mainClass' configured in " + projectProperties.getPluginName() + " is not a valid Java class: " + configuredMainClass, projectProperties.getPluginName())); } projectProperties.log( LogEvent.info( "Searching for main class... Add a 'mainClass' configuration to '" + projectProperties.getPluginName() + "' to improve build speed.")); String mainClassFromJarPlugin = projectProperties.getMainClassFromJarPlugin(); if (mainClassFromJarPlugin != null && isValidJavaClass(mainClassFromJarPlugin)) { return mainClassFromJarPlugin; } if (mainClassFromJarPlugin != null) { projectProperties.log( LogEvent.warn( "'mainClass' configured in " + projectProperties.getJarPluginName() + " is not a valid Java class: " + mainClassFromJarPlugin)); } projectProperties.log( LogEvent.info( "Could not find a valid main class from " + projectProperties.getJarPluginName() + "; looking into all class files to infer main class.")); MainClassFinder.Result mainClassFinderResult = MainClassFinder.find(projectProperties.getClassFiles(), projectProperties::log); switch (mainClassFinderResult.getType()) { case MAIN_CLASS_FOUND: return mainClassFinderResult.getFoundMainClass(); case MAIN_CLASS_NOT_FOUND: throw new MainClassInferenceException( HelpfulSuggestions.forMainClassNotFound( "Main class was not found", projectProperties.getPluginName())); case MULTIPLE_MAIN_CLASSES: throw new MainClassInferenceException( HelpfulSuggestions.forMainClassNotFound( "Multiple valid main classes were found: " + String.join(", ", mainClassFinderResult.getFoundMainClasses()), projectProperties.getPluginName())); default: throw new IllegalStateException("Cannot reach here"); } }
@Test public void testResolveMainClass_validMainClassConfigured() throws MainClassInferenceException, IOException { Assert.assertEquals( "configured.main.class", MainClassResolver.resolveMainClass("configured.main.class", mockProjectProperties)); Mockito.verify(mockProjectProperties, Mockito.never()).log(Mockito.any()); }
public void configure(SSLConfigurable socket) { socket.setEnabledProtocols(enabledProtocols( socket.getSupportedProtocols(), socket.getDefaultProtocols())); socket.setEnabledCipherSuites(enabledCipherSuites( socket.getSupportedCipherSuites(), socket.getDefaultCipherSuites())); if (isNeedClientAuth() != null) { socket.setNeedClientAuth(isNeedClientAuth()); } if (isWantClientAuth() != null) { socket.setWantClientAuth(isWantClientAuth()); } if (hostnameVerification != null) { addInfo("hostnameVerification="+hostnameVerification); socket.setHostnameVerification(hostnameVerification); } }
@Test public void testPassDefaultCipherSuites() throws Exception { final String[] cipherSuites = new String[] { "A" }; configurable.setDefaultCipherSuites(cipherSuites); configuration.configure(configurable); assertTrue(Arrays.equals(cipherSuites, configurable.getEnabledCipherSuites())); }
@Override public List<String> get(long blockId) { return mCache.getIfPresent(blockId); }
@Test public void get() throws Exception { final long blockId = IdUtils.getRandomNonNegativeLong(); final AlluxioURI fileUri = new AlluxioURI("/mnt/file"); final String localFilePath = new AlluxioURI(mLocalUfsPath).join("file").getPath(); mLocalUfs.create(localFilePath); final List<String> ufsLocations = mLocalUfs.getFileLocations(localFilePath); for (String location : ufsLocations) { System.out.println(location); } Assert.assertNull(mUfsBlockLocationCache.get(blockId)); List<String> locations = mUfsBlockLocationCache.get(blockId, fileUri, 0); Assert.assertArrayEquals(ufsLocations.toArray(), locations.toArray()); locations = mUfsBlockLocationCache.get(blockId); Assert.assertArrayEquals(ufsLocations.toArray(), locations.toArray()); mUfsBlockLocationCache.invalidate(blockId); Assert.assertNull(mUfsBlockLocationCache.get(blockId)); }
@Udf public Long round(@UdfParameter final long val) { return val; }
@Test public void shoulldHandleNullDecimalPlaces() { assertThat(udf.round(1.75d, null), is(nullValue())); assertThat(udf.round(new BigDecimal("1.75"), null), is(nullValue())); }
public void merge(Map<?, ?> properties, Source source) { if (properties == null || properties.isEmpty()) { return; } // merge the properties for (Map.Entry<?, ?> entry : properties.entrySet()) { String key = entry.getKey().toString().trim(); String value = entry.getValue() == null ? null : entry.getValue().toString().trim(); PropertyKey propertyKey; if (PropertyKey.isValid(key)) { propertyKey = PropertyKey.fromString(key); } else { // Add unrecognized properties LOG.debug("Property {} from source {} is unrecognized", key, source); // Workaround for issue https://alluxio.atlassian.net/browse/ALLUXIO-3108 // This will register the key as a valid PropertyKey // TODO(adit): Do not add properties unrecognized by Ufs extensions when Configuration // is made dynamic propertyKey = PropertyKey.getOrBuildCustom(key); } put(propertyKey, propertyKey.parseValue(value), source); } mHash.markOutdated(); }
@Test public void merge() { PropertyKey newKey = stringBuilder("mergeNew").setDefaultValue("value3").build(); Properties sysProp = new Properties(); sysProp.put(mKeyWithValue, "value1"); sysProp.put(mKeyWithoutValue, "value2"); mProperties.merge(sysProp, Source.SYSTEM_PROPERTY); assertEquals(Source.SYSTEM_PROPERTY, mProperties.getSource(mKeyWithValue)); assertEquals(Source.SYSTEM_PROPERTY, mProperties.getSource(mKeyWithoutValue)); assertEquals(Source.DEFAULT, mProperties.getSource(newKey)); assertEquals("value1", mProperties.get(mKeyWithValue)); assertEquals("value2", mProperties.get(mKeyWithoutValue)); assertEquals("value3", mProperties.get(newKey)); }
@Deprecated public static RowMutationInformation of(MutationType mutationType, long sequenceNumber) { checkArgument(sequenceNumber >= 0, "sequenceNumber must be non-negative"); return new AutoValue_RowMutationInformation( mutationType, null, Long.toHexString(sequenceNumber)); }
@Test public void givenLong_SQL_LT_Zero_throws() { IllegalArgumentException error = assertThrows( IllegalArgumentException.class, () -> RowMutationInformation.of(RowMutationInformation.MutationType.UPSERT, -1L)); assertEquals("sequenceNumber must be non-negative", error.getMessage()); }
public static Map<String, String> resolveCaseInsensitiveOptions(Map<String, String> queryOptions) { if (CLASS_LOAD_ERROR != null) { throw CLASS_LOAD_ERROR; } Map<String, String> resolved = new HashMap<>(); for (Map.Entry<String, String> configEntry : queryOptions.entrySet()) { String config = CONFIG_RESOLVER.get(configEntry.getKey().toLowerCase()); if (config != null) { resolved.put(config, configEntry.getValue()); } else { resolved.put(configEntry.getKey(), configEntry.getValue()); } } return resolved; }
@Test public void shouldConvertCaseInsensitiveMapToUseCorrectValues() { // Given: Map<String, String> configs = ImmutableMap.of( "ENABLENullHandling", "true", "useMULTISTAGEEngine", "false" ); // When: Map<String, String> resolved = QueryOptionsUtils.resolveCaseInsensitiveOptions(configs); // Then: Assert.assertEquals(resolved.get(CommonConstants.Broker.Request.QueryOptionKey.ENABLE_NULL_HANDLING), "true"); Assert.assertEquals(resolved.get(CommonConstants.Broker.Request.QueryOptionKey.USE_MULTISTAGE_ENGINE), "false"); }
@Override public void appendDataInfluence(String entityName, String entityId, String fieldName, String fieldCurrentValue) { // might be if (traceContext.tracer() == null) { return; } if (traceContext.tracer().getActiveSpan() == null) { return; } String spanId = traceContext.tracer().getActiveSpan().spanId(); OpType type = traceContext.tracer().getActiveSpan().getOpType(); ApolloAuditLogDataInfluence.Builder builder = ApolloAuditLogDataInfluence.builder().spanId(spanId) .entityName(entityName).entityId(entityId).fieldName(fieldName); if (type == null) { return; } switch (type) { case CREATE: case UPDATE: builder.newVal(fieldCurrentValue); break; case DELETE: builder.oldVal(fieldCurrentValue); } dataInfluenceService.save(builder.build()); }
@Test public void testAppendDataInfluenceCaseCreateOrUpdate() { { ApolloAuditSpan span = Mockito.mock(ApolloAuditSpan.class); Mockito.when(tracer.getActiveSpan()).thenReturn(span); Mockito.when(span.spanId()).thenReturn(spanId); Mockito.when(span.getOpType()).thenReturn(create); } api.appendDataInfluence(entityName, entityId, fieldName, fieldCurrentValue); Mockito.verify(dataInfluenceService, Mockito.times(1)).save(influenceCaptor.capture()); ApolloAuditLogDataInfluence capturedInfluence = influenceCaptor.getValue(); assertEquals(entityId, capturedInfluence.getInfluenceEntityId()); assertEquals(entityName, capturedInfluence.getInfluenceEntityName()); assertEquals(fieldName, capturedInfluence.getFieldName()); assertNull(capturedInfluence.getFieldOldValue()); assertEquals(fieldCurrentValue, capturedInfluence.getFieldNewValue()); assertEquals(spanId, capturedInfluence.getSpanId()); }
public String transform() throws ScanException { StringBuilder stringBuilder = new StringBuilder(); compileNode(node, stringBuilder, new Stack<Node>()); return stringBuilder.toString(); }
@Test public void emptyDefault() throws ScanException { propertyContainer0.putProperty("empty", ""); String input = "a=${undef:-${empty}}"; Node node = makeNode(input); NodeToStringTransformer nodeToStringTransformer = new NodeToStringTransformer(node, propertyContainer0); Assertions.assertEquals("a=", nodeToStringTransformer.transform()); }