focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static String toJSONString(Object object) { return JSONSerializer.serialize(object); }
@Test public void testBeanWithMapSerialization() { TestJsonBean bean = new TestJsonBean(); bean.setName("xxxx"); Map<String, Object> map = new HashMap<String, Object>(); map.put("key", new Object()); bean.setMap(map); String jsonString = JSON.toJSONString(bean, true); bean.getMap().values().forEach(value -> Assert.assertEquals(value.getClass(), Object.class)); }
public static Set<ServiceInstance> getServiceInstances( List<ClusterLoadAssignment> loadAssignments) { return loadAssignments.stream() .filter(Objects::nonNull) .flatMap(loadAssignment -> getServiceInstancesFromLoadAssignment(loadAssignment).stream()) .collect(Collectors.toSet()); }
@Test public void testGetServiceInstances() { List<ClusterLoadAssignment> assignments = Arrays.asList( null, createLoadAssignment("outbound|8080|subset1|serviceB.default.svc.cluster.local"), createLoadAssignment("outbound|8080|subset2|serviceB.default.svc.cluster.local"), createLoadAssignment("outbound|8080|serviceB.default.svc.cluster.local") ); Set<ServiceInstance> result = XdsProtocolTransformer.getServiceInstances(assignments); Assert.assertEquals(2, result.size()); Iterator<ServiceInstance> iterator = result.iterator(); while (iterator.hasNext()) { ServiceInstance next = iterator.next(); Assert.assertEquals("serviceB", next.getServiceName()); } }
@Override public void registerInstance(String serviceName, String ip, int port) throws NacosException { registerInstance(serviceName, ip, port, Constants.DEFAULT_CLUSTER_NAME); }
@Test void testRegisterInstance4() throws NacosException { //given String serviceName = "service1"; String groupName = "group1"; String clusterName = "cluster1"; String ip = "1.1.1.1"; int port = 10000; //when client.registerInstance(serviceName, groupName, ip, port, clusterName); //then verify(proxy, times(1)).registerService(eq(serviceName), eq(groupName), argThat(instance -> instance.getIp().equals(ip) && instance.getPort() == port && Math.abs(instance.getWeight() - 1.0) < 0.01f && instance.getClusterName() .equals(clusterName))); }
public void matches(@Nullable String regex) { checkNotNull(regex); if (actual == null) { failWithActual("expected a string that matches", regex); } else if (!actual.matches(regex)) { if (regex.equals(actual)) { failWithoutActual( fact("expected to match", regex), fact("but was", actual), simpleFact("Looks like you want to use .isEqualTo() for an exact equality assertion.")); } else if (Platform.containsMatch(actual, regex)) { failWithoutActual( fact("expected to match", regex), fact("but was", actual), simpleFact("Did you mean to call containsMatch() instead of match()?")); } else { failWithActual("expected to match", regex); } } }
@Test public void stringMatchesString() { assertThat("abcaaadev").matches(".*aaa.*"); }
public ProviderBuilder threads(Integer threads) { this.threads = threads; return getThis(); }
@Test void threads() { ProviderBuilder builder = ProviderBuilder.newBuilder(); builder.threads(20); Assertions.assertEquals(20, builder.build().getThreads()); }
public String getConfigurationValue(String key) { return getConfigurationDataMap().get(key); }
@Test public void shouldGetValueForConfigurationKey() { assertThat(plugin.getConfigurationValue("k1")).isEqualTo("v1"); }
@GetMapping(params = "exportV2=true") @Secured(action = ActionTypes.READ, signType = SignType.CONFIG) public ResponseEntity<byte[]> exportConfigV2(@RequestParam(value = "dataId", required = false) String dataId, @RequestParam(value = "group", required = false) String group, @RequestParam(value = "appName", required = false) String appName, @RequestParam(value = "tenant", required = false, defaultValue = StringUtils.EMPTY) String tenant, @RequestParam(value = "ids", required = false) List<Long> ids) { ids.removeAll(Collections.singleton(null)); tenant = NamespaceUtil.processNamespaceParameter(tenant); List<ConfigAllInfo> dataList = configInfoPersistService.findAllConfigInfo4Export(dataId, group, tenant, appName, ids); List<ZipUtils.ZipItem> zipItemList = new ArrayList<>(); List<ConfigMetadata.ConfigExportItem> configMetadataItems = new ArrayList<>(); for (ConfigAllInfo ci : dataList) { ConfigMetadata.ConfigExportItem configMetadataItem = new ConfigMetadata.ConfigExportItem(); configMetadataItem.setAppName(ci.getAppName()); configMetadataItem.setDataId(ci.getDataId()); configMetadataItem.setDesc(ci.getDesc()); configMetadataItem.setGroup(ci.getGroup()); configMetadataItem.setType(ci.getType()); configMetadataItems.add(configMetadataItem); Pair<String, String> pair = EncryptionHandler.decryptHandler(ci.getDataId(), ci.getEncryptedDataKey(), ci.getContent()); String itemName = ci.getGroup() + Constants.CONFIG_EXPORT_ITEM_FILE_SEPARATOR + ci.getDataId(); zipItemList.add(new ZipUtils.ZipItem(itemName, pair.getSecond())); } ConfigMetadata configMetadata = new ConfigMetadata(); configMetadata.setMetadata(configMetadataItems); zipItemList.add( new ZipUtils.ZipItem(Constants.CONFIG_EXPORT_METADATA_NEW, YamlParserUtil.dumpObject(configMetadata))); HttpHeaders headers = new HttpHeaders(); String fileName = EXPORT_CONFIG_FILE_NAME + DateFormatUtils.format(new Date(), EXPORT_CONFIG_FILE_NAME_DATE_FORMAT) + EXPORT_CONFIG_FILE_NAME_EXT; headers.add("Content-Disposition", "attachment;filename=" + fileName); return new ResponseEntity<>(ZipUtils.zip(zipItemList), headers, HttpStatus.OK); }
@Test void testExportConfigV2() throws Exception { String dataId = "dataId2.json"; String group = "group2"; String tenant = "tenant234"; String appname = "appname2"; ConfigAllInfo configAllInfo = new ConfigAllInfo(); configAllInfo.setDataId(dataId); configAllInfo.setGroup(group); configAllInfo.setTenant(tenant); configAllInfo.setAppName(appname); configAllInfo.setContent("content1234"); List<ConfigAllInfo> dataList = new ArrayList<>(); dataList.add(configAllInfo); Mockito.when(configInfoPersistService.findAllConfigInfo4Export(eq(dataId), eq(group), eq(tenant), eq(appname), eq(Arrays.asList(1L, 2L)))).thenReturn(dataList); MockHttpServletRequestBuilder builder = MockMvcRequestBuilders.get(Constants.CONFIG_CONTROLLER_PATH).param("exportV2", "true") .param("dataId", dataId).param("group", group).param("tenant", tenant).param("appName", appname).param("ids", "1,2"); int actualValue = mockmvc.perform(builder).andReturn().getResponse().getStatus(); assertEquals(200, actualValue); }
@Override public Map<String, String> run(final Session<?> session) throws BackgroundException { final Metadata feature = session.getFeature(Metadata.class); if(log.isDebugEnabled()) { log.debug(String.format("Run with feature %s", feature)); } // Map for metadata entry key > File & Metadata Values final Map<String, Map<Path, String>> graphMetadata = new HashMap<>(); for(Path next : files) { // Read online metadata next.attributes().setMetadata(feature.getMetadata(next)); // take every entry of current metadata and store it in metaGraph for(Map.Entry<String, String> entry : next.attributes().getMetadata().entrySet()) { if(graphMetadata.containsKey(entry.getKey())) { // if existing, get map, put value graphMetadata.get(entry.getKey()).put(next, entry.getValue()); } else { // if not existent create hashmap and put it back Map<Path, String> map = new HashMap<>(); graphMetadata.put(entry.getKey(), map); map.put(next, entry.getValue()); } } } // Store result metadata in hashmap Map<String, String> metadata = new HashMap<>(); for(Map.Entry<String, Map<Path, String>> entry : graphMetadata.entrySet()) { if(entry.getValue().size() != files.size()) { metadata.put(entry.getKey(), null); } else { // single use of streams, reason: distinct is easier in Streams than it would be writing it manually Stream<String> values = entry.getValue().values().stream().distinct(); // Use reducing collector, that returns null on non-unique values final String value = values.collect(Collectors.reducing((a, v) -> null)).orElse(null); // store it metadata.put(entry.getKey(), value); } } return metadata; }
@Test public void testDifferent() throws Exception { final List<Path> files = new ArrayList<>(); files.add(new Path("a", EnumSet.of(Path.Type.file))); files.add(new Path("b", EnumSet.of(Path.Type.file))); files.add(new Path("c", EnumSet.of(Path.Type.file))); ReadMetadataWorker worker = new ReadMetadataWorker(files) { @Override public void cleanup(final Map<String, String> result) { fail(); } }; final Map<String, String> map = worker.run(new NullSession(new Host(new TestProtocol())) { @Override @SuppressWarnings("unchecked") public <T> T _getFeature(final Class<T> type) { if(type == Metadata.class) { return (T) new Metadata() { @Override public Map<String, String> getDefault(final Local local) { return Collections.emptyMap(); } @Override public Map<String, String> getMetadata(final Path file) { switch(file.getName()) { case "a": return Collections.singletonMap("key1", "value1"); case "b": return Collections.singletonMap("key2", "value2"); case "c": return Collections.singletonMap("key2", "value2"); default: fail(); break; } throw new UnsupportedOperationException(); } @Override public void setMetadata(final Path file, final TransferStatus status) { throw new UnsupportedOperationException(); } }; } return super._getFeature(type); } }); assertTrue(map.containsKey("key1")); assertTrue(map.containsKey("key2")); assertNull(map.get("key1")); assertNull(map.get("key2")); }
@Override public ShenyuContext decorator(final ShenyuContext shenyuContext, final MetaData metaData) { shenyuContext.setModule(metaData.getAppName()); shenyuContext.setMethod(metaData.getServiceName()); shenyuContext.setContextPath(metaData.getContextPath()); shenyuContext.setRpcType(RpcTypeEnum.SOFA.getName()); return shenyuContext; }
@Test public void decoratorTest() { final MetaData metaData = MetaData.builder().contextPath("path").build(); final ShenyuContext shenyuContext = new ShenyuContext(); final ShenyuContext context = sofaShenyuContextDecorator.decorator(shenyuContext, metaData); assertEquals(context.getContextPath(), "path"); }
@Nullable public Span currentSpan() { TraceContext context = currentTraceContext.get(); if (context == null) return null; // Returns a lazy span to reduce overhead when tracer.currentSpan() is invoked just to see if // one exists, or when the result is never used. return new LazySpan(this, context); }
@Test void currentSpan_retainsSharedFlag() { TraceContext context = TraceContext.newBuilder().traceId(1L).spanId(2L).shared(true).sampled(true).build(); try (Scope scope = currentTraceContext.newScope(context)) { assertThat(tracer.currentSpan().context().shared()).isTrue(); } }
public Rule<ProjectNode> projectNodeRule() { return new PullUpExpressionInLambdaProjectNodeRule(); }
@Test public void testSwitchWhenExpression() { tester().assertThat(new PullUpExpressionInLambdaRules(getFunctionManager()).projectNodeRule()) .setSystemProperty(PULL_EXPRESSION_FROM_LAMBDA_ENABLED, "true") .on(p -> { p.variable("arr", new ArrayType(VARCHAR)); p.variable("arr2", new ArrayType(VARCHAR)); return p.project( Assignments.builder().put(p.variable("expr", VARCHAR), p.rowExpression( "transform(arr, x -> concat(case when arr2 is null then '*' when contains(arr2, x) then '+' else ' ' end, x))")).build(), p.values(p.variable("arr", new ArrayType(VARCHAR)), p.variable("arr2", new ArrayType(VARCHAR)))); }).matches( project( ImmutableMap.of("expr", expression("transform(arr, x -> concat(case when expr_0 then '*' when contains(arr2, x) then '+' else ' ' end, x))")), project(ImmutableMap.of("expr_0", expression("arr2 is null")), values("arr", "arr2")))); }
public static String digitToChinese(Number n) { if (null == n) { n = 0; } return NumberChineseFormatter.format(n.doubleValue(), true, true); }
@Test public void issue3662Test() { String s = Convert.digitToChinese(0); assertEquals("零元整", s); s = Convert.digitToChinese(null); assertEquals("零元整", s); }
@VisibleForTesting public Set<NodeAttribute> parseAttributes(String config) throws IOException { if (Strings.isNullOrEmpty(config)) { return ImmutableSet.of(); } Set<NodeAttribute> attributeSet = new HashSet<>(); // Configuration value should be in one line, format: // "ATTRIBUTE_NAME,ATTRIBUTE_TYPE,ATTRIBUTE_VALUE", // multiple node-attributes are delimited by ":". // Each attribute str should not container any space. String[] attributeStrs = config.split(NODE_ATTRIBUTES_DELIMITER); for (String attributeStr : attributeStrs) { String[] fields = attributeStr.split(NODE_ATTRIBUTE_DELIMITER); if (fields.length != 3) { throw new IOException("Invalid value for " + YarnConfiguration.NM_PROVIDER_CONFIGURED_NODE_ATTRIBUTES + "=" + config); } // We don't allow user config to overwrite our dist prefix, // so disallow any prefix set in the configuration. if (fields[0].contains("/")) { throw new IOException("Node attribute set in " + YarnConfiguration.NM_PROVIDER_CONFIGURED_NODE_ATTRIBUTES + " should not contain any prefix."); } // Make sure attribute type is valid. if (!EnumUtils.isValidEnum(NodeAttributeType.class, fields[1])) { throw new IOException("Invalid node attribute type: " + fields[1] + ", valid values are " + Arrays.asList(NodeAttributeType.values())); } // Automatically setup prefix for collected attributes NodeAttribute na = NodeAttribute.newInstance( NodeAttribute.PREFIX_DISTRIBUTED, fields[0], NodeAttributeType.valueOf(fields[1]), fields[2]); // Since a NodeAttribute is identical with another one as long as // their prefix and name are same, to avoid attributes getting // overwritten by ambiguous attribute, make sure it fails in such // case. if (!attributeSet.add(na)) { throw new IOException("Ambiguous node attribute is found: " + na.toString() + ", a same attribute already exists"); } } // Before updating the attributes to the provider, // verify if they are valid try { NodeLabelUtil.validateNodeAttributes(attributeSet); } catch (IOException e) { throw new IOException("Node attributes set by configuration property: " + YarnConfiguration.NM_PROVIDER_CONFIGURED_NODE_ATTRIBUTES + " is not valid. Detail message: " + e.getMessage()); } return attributeSet; }
@Test public void testParseConfiguration() throws IOException { // ATTRIBUTE_NAME,ATTRIBUTE_TYPE,ATTRIBUTE_VALUE String attributesStr = "hostname,STRING,host1234:uptime,STRING,321543"; Set<NodeAttribute> attributes = nodeAttributesProvider .parseAttributes(attributesStr); Assert.assertEquals(2, attributes.size()); Iterator<NodeAttribute> ait = attributes.iterator(); while(ait.hasNext()) { NodeAttribute attr = ait.next(); NodeAttributeKey at = attr.getAttributeKey(); if (at.getAttributeName().equals("hostname")) { Assert.assertEquals("hostname", at.getAttributeName()); Assert.assertEquals(NodeAttribute.PREFIX_DISTRIBUTED, at.getAttributePrefix()); Assert.assertEquals(NodeAttributeType.STRING, attr.getAttributeType()); Assert.assertEquals("host1234", attr.getAttributeValue()); } else if (at.getAttributeName().equals("uptime")) { Assert.assertEquals("uptime", at.getAttributeName()); Assert.assertEquals(NodeAttribute.PREFIX_DISTRIBUTED, at.getAttributePrefix()); Assert.assertEquals(NodeAttributeType.STRING, attr.getAttributeType()); Assert.assertEquals("321543", attr.getAttributeValue()); } else { Assert.fail("Unexpected attribute"); } } // Missing type attributesStr = "hostname,host1234"; try { nodeAttributesProvider.parseAttributes(attributesStr); Assert.fail("Expecting a parsing failure"); } catch (IOException e) { Assert.assertNotNull(e); Assert.assertTrue(e.getMessage().contains("Invalid value")); } // Extra prefix attributesStr = "prefix/hostname,STRING,host1234"; try { nodeAttributesProvider.parseAttributes(attributesStr); Assert.fail("Expecting a parsing failure"); } catch (IOException e) { Assert.assertNotNull(e); Assert.assertTrue(e.getMessage() .contains("should not contain any prefix.")); } // Invalid type attributesStr = "hostname,T,host1234"; try { nodeAttributesProvider.parseAttributes(attributesStr); Assert.fail("Expecting a parsing failure"); } catch (IOException e) { e.printStackTrace(); Assert.assertNotNull(e); Assert.assertTrue(e.getMessage() .contains("Invalid node attribute type")); } }
public static <T extends Throwable> void checkMustEmpty(final Collection<?> values, final Supplier<T> exceptionSupplierIfUnexpected) throws T { if (!values.isEmpty()) { throw exceptionSupplierIfUnexpected.get(); } }
@Test void assertCheckMustEmptyWithCollectionToThrowsException() { assertThrows(SQLException.class, () -> ShardingSpherePreconditions.checkMustEmpty(Collections.singleton("foo"), SQLException::new)); }
@Override public SelBoolean binaryOps(SelOp op, SelType rhs) { if (rhs.type() == SelTypes.NULL && (op == SelOp.EQUAL || op == SelOp.NOT_EQUAL)) { return (SelBoolean) rhs.binaryOps(op, this); } SelTypeUtil.checkTypeMatch(this.type(), rhs.type()); boolean another = ((SelBoolean) rhs).booleanVal(); switch (op) { case AND: return SelBoolean.of(val && another); case OR: return SelBoolean.of(val || another); case EQUAL: return SelBoolean.of(val == another); case NOT_EQUAL: return SelBoolean.of(val != another); case NOT: return SelBoolean.of(!val); default: throw new UnsupportedOperationException( this.type() + " DO NOT support expression operation " + op); } }
@Test public void testBinaryOps() { assertFalse(one.binaryOps(SelOp.AND, another).booleanVal()); assertTrue(one.binaryOps(SelOp.OR, another).booleanVal()); assertFalse(one.binaryOps(SelOp.EQUAL, another).booleanVal()); assertTrue(one.binaryOps(SelOp.NOT_EQUAL, another).booleanVal()); assertFalse(one.binaryOps(SelOp.NOT, another).booleanVal()); assertFalse(one.binaryOps(SelOp.EQUAL, SelType.NULL).booleanVal()); assertTrue(one.binaryOps(SelOp.NOT_EQUAL, SelType.NULL).booleanVal()); }
public static Optional<Expression> convert( org.apache.flink.table.expressions.Expression flinkExpression) { if (!(flinkExpression instanceof CallExpression)) { return Optional.empty(); } CallExpression call = (CallExpression) flinkExpression; Operation op = FILTERS.get(call.getFunctionDefinition()); if (op != null) { switch (op) { case IS_NULL: return onlyChildAs(call, FieldReferenceExpression.class) .map(FieldReferenceExpression::getName) .map(Expressions::isNull); case NOT_NULL: return onlyChildAs(call, FieldReferenceExpression.class) .map(FieldReferenceExpression::getName) .map(Expressions::notNull); case LT: return convertFieldAndLiteral(Expressions::lessThan, Expressions::greaterThan, call); case LT_EQ: return convertFieldAndLiteral( Expressions::lessThanOrEqual, Expressions::greaterThanOrEqual, call); case GT: return convertFieldAndLiteral(Expressions::greaterThan, Expressions::lessThan, call); case GT_EQ: return convertFieldAndLiteral( Expressions::greaterThanOrEqual, Expressions::lessThanOrEqual, call); case EQ: return convertFieldAndLiteral( (ref, lit) -> { if (NaNUtil.isNaN(lit)) { return Expressions.isNaN(ref); } else { return Expressions.equal(ref, lit); } }, call); case NOT_EQ: return convertFieldAndLiteral( (ref, lit) -> { if (NaNUtil.isNaN(lit)) { return Expressions.notNaN(ref); } else { return Expressions.notEqual(ref, lit); } }, call); case NOT: return onlyChildAs(call, CallExpression.class) .flatMap(FlinkFilters::convert) .map(Expressions::not); case AND: return convertLogicExpression(Expressions::and, call); case OR: return convertLogicExpression(Expressions::or, call); case STARTS_WITH: return convertLike(call); } } return Optional.empty(); }
@Test public void testEqualsNaN() { UnboundPredicate<Float> expected = org.apache.iceberg.expressions.Expressions.isNaN("field3"); Optional<org.apache.iceberg.expressions.Expression> actual = FlinkFilters.convert(resolve(Expressions.$("field3").isEqual(Expressions.lit(Float.NaN)))); assertThat(actual).isPresent(); assertPredicatesMatch(expected, actual.get()); Optional<org.apache.iceberg.expressions.Expression> actual1 = FlinkFilters.convert(resolve(Expressions.lit(Float.NaN).isEqual(Expressions.$("field3")))); assertThat(actual1).isPresent(); assertPredicatesMatch(expected, actual1.get()); }
static void checkNearCacheNativeMemoryConfig(InMemoryFormat inMemoryFormat, NativeMemoryConfig nativeMemoryConfig, boolean isEnterprise) { if (!isEnterprise) { return; } if (inMemoryFormat != NATIVE) { return; } if (nativeMemoryConfig != null && nativeMemoryConfig.isEnabled()) { return; } throw new InvalidConfigurationException("Enable native memory config to use NATIVE in-memory-format for Near Cache"); }
@Test public void checkNearCacheNativeMemoryConfig_shouldNotThrowExceptionWithoutNativeMemoryConfig_NATIVE_onOS() { checkNearCacheNativeMemoryConfig(NATIVE, null, false); }
@Override public String getCompatibleString( Object object ) throws KettleValueException { try { String string; switch ( type ) { case TYPE_DATE: switch ( storageType ) { case STORAGE_TYPE_NORMAL: string = convertDateToCompatibleString( (Date) object ); break; case STORAGE_TYPE_BINARY_STRING: string = convertDateToCompatibleString( (Date) convertBinaryStringToNativeType( (byte[]) object ) ); break; case STORAGE_TYPE_INDEXED: if ( object == null ) { string = null; } else { string = convertDateToCompatibleString( (Date) index[( (Integer) object ).intValue()] ); } break; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } break; case TYPE_NUMBER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: string = convertNumberToCompatibleString( (Double) object ); break; case STORAGE_TYPE_BINARY_STRING: string = convertNumberToCompatibleString( (Double) convertBinaryStringToNativeType( (byte[]) object ) ); break; case STORAGE_TYPE_INDEXED: string = object == null ? null : convertNumberToCompatibleString( (Double) index[( (Integer) object ) .intValue()] ); break; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } break; case TYPE_INTEGER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: string = convertIntegerToCompatibleString( (Long) object ); break; case STORAGE_TYPE_BINARY_STRING: try { string = convertIntegerToCompatibleString( (Long) convertBinaryStringToNativeType( (byte[]) object ) ); } catch ( ClassCastException e ) { string = convertIntegerToCompatibleString( (Long) object ); } break; case STORAGE_TYPE_INDEXED: string = object == null ? null : convertIntegerToCompatibleString( (Long) index[( (Integer) object ) .intValue()] ); break; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } break; default: return getString( object ); } return string; } catch ( ClassCastException e ) { throw new KettleValueException( toString() + " : There was a data type error: the data type of " + object.getClass().getName() + " object [" + object + "] does not correspond to value meta [" + toStringMeta() + "]" ); } }
@Test public void testGetCompatibleString() throws KettleValueException { ValueMetaInteger valueMetaInteger = new ValueMetaInteger( "INTEGER" ); valueMetaInteger.setType( 5 ); // Integer valueMetaInteger.setStorageType( 1 ); // STORAGE_TYPE_BINARY_STRING assertEquals( "2", valueMetaInteger.getCompatibleString( 2L ) ); //BACKLOG-15750 }
public List<MappingField> resolveAndValidateFields( List<MappingField> userFields, Map<String, String> options, NodeEngine nodeEngine ) { final InternalSerializationService serializationService = (InternalSerializationService) nodeEngine .getSerializationService(); final AbstractRelationsStorage relationsStorage = ((CalciteSqlOptimizer) nodeEngine.getSqlService().getOptimizer()) .relationsStorage(); // normalize and validate the names and external names for (MappingField field : userFields) { String name = field.name(); String externalName = field.externalName(); if (externalName == null) { if (name.equals(KEY) || name.equals(VALUE)) { externalName = name; } else { externalName = VALUE_PREFIX + name; } field.setExternalName(name); } if ((name.equals(KEY) && !externalName.equals(KEY)) || (name.equals(VALUE) && !externalName.equals(VALUE))) { throw QueryException.error("Cannot rename field: '" + name + '\''); } if (!EXT_NAME_PATTERN.matcher(externalName).matches()) { throw QueryException.error("Invalid external name: " + externalName); } } Stream<MappingField> keyFields = resolveAndValidateFields(true, userFields, options, serializationService, relationsStorage); Stream<MappingField> valueFields = resolveAndValidateFields(false, userFields, options, serializationService, relationsStorage); Map<String, MappingField> fields = Stream.concat(keyFields, valueFields) .collect(LinkedHashMap::new, (map, field) -> map.putIfAbsent(field.name(), field), Map::putAll); if (fields.isEmpty()) { throw QueryException.error("The resolved field list is empty"); } return new ArrayList<>(fields.values()); }
@Test public void test_resolveAndValidateFields() { Map<String, String> options = ImmutableMap.of( OPTION_KEY_FORMAT, JAVA_FORMAT, OPTION_VALUE_FORMAT, JAVA_FORMAT ); given(resolver.resolveAndValidateFields(eq(true), eq(emptyList()), eq(options), eq(ss))) .willReturn(Stream.of(field("__key", QueryDataType.INT))); given(resolver.resolveAndValidateFields(eq(false), eq(emptyList()), eq(options), eq(ss))) .willReturn(Stream.of(field("this", QueryDataType.VARCHAR))); List<MappingField> fields = resolvers.resolveAndValidateFields(emptyList(), options, nodeEngine); assertThat(fields).containsExactly( field("__key", QueryDataType.INT), field("this", QueryDataType.VARCHAR) ); }
@Nullable @Override public ThreadPoolPluginSupport getManagedThreadPoolPluginSupport(String threadPoolId) { return managedThreadPoolPluginSupports.get(threadPoolId); }
@Test public void testGetManagedThreadPoolPluginSupport() { GlobalThreadPoolPluginManager manager = new DefaultGlobalThreadPoolPluginManager(); TestSupport support = new TestSupport("1"); manager.registerThreadPoolPluginSupport(support); Assert.assertSame(support, manager.getManagedThreadPoolPluginSupport(support.getThreadPoolId())); support = new TestSupport("2"); manager.registerThreadPoolPluginSupport(support); Assert.assertSame(support, manager.getManagedThreadPoolPluginSupport(support.getThreadPoolId())); }
public int format(String... args) throws UsageException { CommandLineOptions parameters = processArgs(args); if (parameters.version()) { errWriter.println(versionString()); return 0; } if (parameters.help()) { throw new UsageException(); } JavaFormatterOptions options = JavaFormatterOptions.builder() .style(parameters.aosp() ? Style.AOSP : Style.GOOGLE) .formatJavadoc(parameters.formatJavadoc()) .build(); if (parameters.stdin()) { return formatStdin(parameters, options); } else { return formatFiles(parameters, options); } }
@Test public void packageInfo() throws Exception { String[] input = { "@CheckReturnValue", "@ParametersAreNonnullByDefault", "package com.google.common.labs.base;", "", "import com.google.errorprone.annotations.CheckReturnValue;", "import javax.annotation.ParametersAreNonnullByDefault;", "", }; StringWriter out = new StringWriter(); StringWriter err = new StringWriter(); Main main = new Main( new PrintWriter(out, true), new PrintWriter(err, true), new ByteArrayInputStream(joiner.join(input).getBytes(UTF_8))); assertThat(main.format("-")).isEqualTo(0); assertThat(out.toString()).isEqualTo(joiner.join(input)); }
@Description("hyperbolic tangent") @ScalarFunction @SqlType(StandardTypes.DOUBLE) public static double tanh(@SqlType(StandardTypes.DOUBLE) double num) { return Math.tanh(num); }
@Test public void testTanh() { for (double doubleValue : DOUBLE_VALUES) { assertFunction("tanh(" + doubleValue + ")", DOUBLE, Math.tanh(doubleValue)); assertFunction("tanh(REAL '" + doubleValue + "')", DOUBLE, Math.tanh((float) doubleValue)); } assertFunction("tanh(NULL)", DOUBLE, null); }
public DataNode elementToDataNode( final RepositoryElementInterface element ) throws KettleException { JobMeta jobMeta = (JobMeta) element; DataNode rootNode = new DataNode( NODE_JOB ); if ( jobMeta.getPrivateDatabases() != null ) { // save all private database names http://jira.pentaho.com/browse/PPP-3413 String privateDatabaseNames = StringUtils.join( jobMeta.getPrivateDatabases(), JOB_PRIVATE_DATABASE_DELIMITER ); DataNode privateDatabaseNode = rootNode.addNode( NODE_JOB_PRIVATE_DATABASES ); privateDatabaseNode.setProperty( PROP_JOB_PRIVATE_DATABASE_NAMES, privateDatabaseNames ); } // Save the notes // DataNode notesNode = rootNode.addNode( NODE_NOTES ); notesNode.setProperty( PROP_NR_NOTES, jobMeta.nrNotes() ); for ( int i = 0; i < jobMeta.nrNotes(); i++ ) { NotePadMeta note = jobMeta.getNote( i ); DataNode noteNode = notesNode.addNode( NOTE_PREFIX + i ); noteNode.setProperty( PROP_XML, note.getXML() ); } // // Save the job entry copies // if ( log.isDetailed() ) { log.logDetailed( toString(), "Saving " + jobMeta.nrJobEntries() + " Job entry copies to repository..." ); //$NON-NLS-1$ //$NON-NLS-2$ } DataNode entriesNode = rootNode.addNode( NODE_ENTRIES ); entriesNode.setProperty( PROP_NR_JOB_ENTRY_COPIES, jobMeta.nrJobEntries() ); for ( int i = 0; i < jobMeta.nrJobEntries(); i++ ) { JobEntryCopy copy = jobMeta.getJobEntry( i ); JobEntryInterface entry = copy.getEntry(); // Create a new node for each entry... // DataNode copyNode = entriesNode.addNode( sanitizeNodeName( copy.getName() ) + "_" + ( i + 1 ) //$NON-NLS-1$ + EXT_JOB_ENTRY_COPY ); copyNode.setProperty( PROP_NAME, copy.getName() ); copyNode.setProperty( PROP_DESCRIPTION, copy.getDescription() ); copyNode.setProperty( PROP_NR, copy.getNr() ); copyNode.setProperty( PROP_GUI_LOCATION_X, copy.getLocation().x ); copyNode.setProperty( PROP_GUI_LOCATION_Y, copy.getLocation().y ); copyNode.setProperty( PROP_GUI_DRAW, copy.isDrawn() ); copyNode.setProperty( PROP_PARALLEL, copy.isLaunchingInParallel() ); // Save the job entry group attributes map if ( entry instanceof JobEntryBase ) { AttributesMapUtil.saveAttributesMap( copyNode, (JobEntryBase) entry ); } // And save the job entry copy group attributes map AttributesMapUtil.saveAttributesMap( copyNode, copy, PROP_ATTRIBUTES_JOB_ENTRY_COPY ); // Save the entry information here as well, for completeness. // TODO: since this slightly stores duplicate information, figure out how to store this separately. // copyNode.setProperty( PROP_JOBENTRY_TYPE, entry.getPluginId() ); DataNode customNode = new DataNode( NODE_CUSTOM ); RepositoryProxy proxy = new RepositoryProxy( customNode ); entry.saveRep( proxy, MetaStoreConst.getDefaultMetastore(), null ); compatibleEntrySaveRep( entry, proxy, null ); copyNode.addNode( customNode ); } // Finally, save the hops // DataNode hopsNode = rootNode.addNode( NODE_HOPS ); hopsNode.setProperty( PROP_NR_HOPS, jobMeta.nrJobHops() ); for ( int i = 0; i < jobMeta.nrJobHops(); i++ ) { JobHopMeta hop = jobMeta.getJobHop( i ); DataNode hopNode = hopsNode.addNode( JOB_HOP_PREFIX + i ); hopNode.setProperty( JOB_HOP_FROM, hop.getFromEntry().getName() ); hopNode.setProperty( JOB_HOP_FROM_NR, hop.getFromEntry().getNr() ); hopNode.setProperty( JOB_HOP_TO, hop.getToEntry().getName() ); hopNode.setProperty( JOB_HOP_TO_NR, hop.getToEntry().getNr() ); hopNode.setProperty( JOB_HOP_ENABLED, hop.isEnabled() ); hopNode.setProperty( JOB_HOP_EVALUATION, hop.getEvaluation() ); hopNode.setProperty( JOB_HOP_UNCONDITIONAL, hop.isUnconditional() ); } String[] paramKeys = jobMeta.listParameters(); DataNode paramsNode = rootNode.addNode( NODE_PARAMETERS ); paramsNode.setProperty( PROP_NR_PARAMETERS, paramKeys == null ? 0 : paramKeys.length ); for ( int idx = 0; idx < paramKeys.length; idx++ ) { DataNode paramNode = paramsNode.addNode( PARAM_PREFIX + idx ); String key = paramKeys[idx]; String description = jobMeta.getParameterDescription( paramKeys[idx] ); String defaultValue = jobMeta.getParameterDefault( paramKeys[idx] ); paramNode.setProperty( PARAM_KEY, key != null ? key : "" ); //$NON-NLS-1$ paramNode.setProperty( PARAM_DEFAULT, defaultValue != null ? defaultValue : "" ); //$NON-NLS-1$ paramNode.setProperty( PARAM_DESC, description != null ? description : "" ); //$NON-NLS-1$ } // Let's not forget to save the details of the transformation itself. // This includes logging information, parameters, etc. // saveJobDetails( rootNode, jobMeta ); return rootNode; }
@Test public void testElementToDataNodeSavesAttributes() throws KettleException { when( mockJobEntry.getAttributesMap() ).thenReturn( attributes ); DataNode dataNode = jobDelegate.elementToDataNode( mockJobMeta ); DataNode groups = dataNode.getNode( "entries" ).getNodes().iterator().next().getNode( AttributesMapUtil.NODE_ATTRIBUTE_GROUPS ); DataNode mockGroupNode = groups.getNode( MOCK_GROUP ); assertEquals( MOCK_VALUE, mockGroupNode.getProperty( MOCK_PROPERTY ).getString() ); }
public Matcher parse(String xpath) { if (xpath.equals("/text()")) { return TextMatcher.INSTANCE; } else if (xpath.equals("/node()")) { return NodeMatcher.INSTANCE; } else if (xpath.equals("/descendant::node()") || xpath.equals("/descendant:node()")) { // for compatibility return new CompositeMatcher(TextMatcher.INSTANCE, new ChildMatcher(new SubtreeMatcher(NodeMatcher.INSTANCE))); } else if (xpath.equals("/@*")) { return AttributeMatcher.INSTANCE; } else if (xpath.length() == 0) { return ElementMatcher.INSTANCE; } else if (xpath.startsWith("/@")) { String name = xpath.substring(2); String prefix = null; int colon = name.indexOf(':'); if (colon != -1) { prefix = name.substring(0, colon); name = name.substring(colon + 1); } if (prefixes.containsKey(prefix)) { return new NamedAttributeMatcher(prefixes.get(prefix), name); } else { return Matcher.FAIL; } } else if (xpath.startsWith("/*")) { return new ChildMatcher(parse(xpath.substring(2))); } else if (xpath.startsWith("///")) { return Matcher.FAIL; } else if (xpath.startsWith("//")) { return new SubtreeMatcher(parse(xpath.substring(1))); } else if (xpath.startsWith("/")) { int slash = xpath.indexOf('/', 1); if (slash == -1) { slash = xpath.length(); } String name = xpath.substring(1, slash); String prefix = null; int colon = name.indexOf(':'); if (colon != -1) { prefix = name.substring(0, colon); name = name.substring(colon + 1); } if (prefixes.containsKey(prefix)) { return new NamedElementMatcher(prefixes.get(prefix), name, parse(xpath.substring(slash))); } else { return Matcher.FAIL; } } else { return Matcher.FAIL; } }
@Test public void testPrefixedAttribute() { Matcher matcher = parser.parse("/@prefix:name"); assertFalse(matcher.matchesText()); assertFalse(matcher.matchesElement()); assertFalse(matcher.matchesAttribute(null, "name")); assertTrue(matcher.matchesAttribute(NS, "name")); assertFalse(matcher.matchesAttribute(NS, "eman")); assertEquals(Matcher.FAIL, matcher.descend(NS, "name")); }
@Override public int getConnectTimeout() { return clientConfig.getPropertyAsInteger(IClientConfigKey.Keys.ConnectTimeout, DEFAULT_CONNECT_TIMEOUT); }
@Test void testGetConnectTimeout() { assertEquals(ConnectionPoolConfigImpl.DEFAULT_CONNECT_TIMEOUT, connectionPoolConfig.getConnectTimeout()); }
public static List<AclEntry> replaceAclEntries(List<AclEntry> existingAcl, List<AclEntry> inAclSpec) throws AclException { ValidatedAclSpec aclSpec = new ValidatedAclSpec(inAclSpec); ArrayList<AclEntry> aclBuilder = Lists.newArrayListWithCapacity(MAX_ENTRIES); // Replacement is done separately for each scope: access and default. EnumMap<AclEntryScope, AclEntry> providedMask = Maps.newEnumMap(AclEntryScope.class); EnumSet<AclEntryScope> maskDirty = EnumSet.noneOf(AclEntryScope.class); EnumSet<AclEntryScope> scopeDirty = EnumSet.noneOf(AclEntryScope.class); for (AclEntry aclSpecEntry: aclSpec) { scopeDirty.add(aclSpecEntry.getScope()); if (aclSpecEntry.getType() == MASK) { providedMask.put(aclSpecEntry.getScope(), aclSpecEntry); maskDirty.add(aclSpecEntry.getScope()); } else { aclBuilder.add(aclSpecEntry); } } // Copy existing entries if the scope was not replaced. for (AclEntry existingEntry: existingAcl) { if (!scopeDirty.contains(existingEntry.getScope())) { if (existingEntry.getType() == MASK) { providedMask.put(existingEntry.getScope(), existingEntry); } else { aclBuilder.add(existingEntry); } } } copyDefaultsIfNeeded(aclBuilder); calculateMasks(aclBuilder, providedMask, maskDirty, scopeDirty); return buildAndValidateAcl(aclBuilder); }
@Test public void testReplaceAclEntriesAutomaticDefaultUser() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, ALL), aclEntry(ACCESS, GROUP, READ), aclEntry(ACCESS, OTHER, NONE), aclEntry(DEFAULT, USER, "bruce", READ), aclEntry(DEFAULT, GROUP, READ_WRITE), aclEntry(DEFAULT, MASK, READ_WRITE), aclEntry(DEFAULT, OTHER, READ)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, GROUP, READ_WRITE)) .add(aclEntry(DEFAULT, MASK, READ_WRITE)) .add(aclEntry(DEFAULT, OTHER, READ)) .build(); assertEquals(expected, replaceAclEntries(existing, aclSpec)); }
@Override public Integer decode(ByteBuf source) { int size = source.readableBytes(); if (size == 1) { // Compatible with old version (< 1.7.0). return (int) source.readByte(); } if (size >= 4) { return source.readInt(); } return -1; }
@Test public void testDecodePingResponseData() { ByteBuf buf = Unpooled.buffer(); PingResponseDataDecoder decoder = new PingResponseDataDecoder(); int big = Integer.MAX_VALUE; buf.writeInt(big); assertThat(decoder.decode(buf)).isEqualTo(big); byte small = 12; buf.writeByte(small); assertThat(decoder.decode(buf)).isEqualTo(small); buf.release(); }
@SuppressWarnings( "unchecked" ) @Nullable public <T extends VFSConnectionDetails> VFSConnectionProvider<T> getProvider( @NonNull ConnectionManager manager, @Nullable String key ) { return (VFSConnectionProvider<T>) manager.getConnectionProvider( key ); }
@Test public void testGetProviderOfDetailsReturnsExistingCastedProviderFromManager() { String provider1Key = "provider1Key"; VFSConnectionDetails details1 = mock( VFSConnectionDetails.class ); doReturn( provider1Key ).when( details1 ).getType(); ConnectionProvider<? extends ConnectionDetails> provider1 = (ConnectionProvider<? extends ConnectionDetails>) mock( VFSConnectionProvider.class ); doReturn( provider1 ).when( connectionManager ).getConnectionProvider( provider1Key ); VFSConnectionProvider<VFSConnectionDetails> result = vfsConnectionManagerHelper.getProvider( connectionManager, details1 ); assertSame( provider1, result ); }
public void completeSQLUnitExecution(final SQLExecutionUnit executionUnit, final String processId) { if (Strings.isNullOrEmpty(processId)) { return; } Process process = ProcessRegistry.getInstance().get(processId); if (null == process) { return; } process.completeExecutionUnit(); process.removeProcessStatement(executionUnit.getExecutionUnit()); }
@Test void assertCompleteSQLUnitExecution() { when(processRegistry.get("foo_id")).thenReturn(mock(Process.class)); new ProcessEngine().completeSQLUnitExecution(mock(SQLExecutionUnit.class), "foo_id"); verify(processRegistry).get("foo_id"); }
public void enrichWorkflowDefinition(WorkflowDefinition workflowDefinition) { WorkflowDefinitionExtras enrichedWorkflowDefinition = new WorkflowDefinitionExtras(); enrichParams(workflowDefinition, enrichedWorkflowDefinition); enrichNextRunDate(workflowDefinition, enrichedWorkflowDefinition); workflowDefinition.setEnrichedExtras(enrichedWorkflowDefinition); }
@Test public void testEnrichWorkflowDefinitionCron() throws IOException { definition = loadObject( "fixtures/workflows/definition/sample-active-wf-with-cron-named-triggers.json", WorkflowDefinition.class); workflowEnrichmentHelper.enrichWorkflowDefinition(definition); Assert.assertNotNull(definition.getEnrichedExtras()); Assert.assertNotNull(definition.getEnrichedExtras().getNextExecutionTime()); Assert.assertEquals(2, definition.getEnrichedExtras().getNextExecutionTimes().size()); }
public static ProcessingException getException(ProcessingException processingException, Throwable t) { return getException(processingException, getTruncatedStackTrace(t)); }
@Test public void testExceptionMessage() { Exception exception = new UnsupportedOperationException("Caught exception."); ProcessingException processingException = QueryException.getException(QueryException.QUERY_EXECUTION_ERROR, exception); // there's one more 1 lines for the top level wrapper QUERY_EXECUTION_ERROR assertEquals(5 + 1, processingException.getMessage().split("\n").length); Exception withSuppressedException = new IllegalStateException("Suppressed exception"); withSuppressedException.addSuppressed(processingException); ProcessingException withSuppressedProcessingException = QueryException.getException(QueryException.QUERY_EXECUTION_ERROR, withSuppressedException); // QueryException._maxLinesOfStackTracePerFrame * 2 + 1 line separator + 1 QUERY_EXECUTION_ERROR wrapper. assertEquals(5 * 2 + 1 + 1, withSuppressedProcessingException.getMessage().split("\n").length); Exception withNestedException = new IllegalStateException("Outer exception", withSuppressedProcessingException); ProcessingException withNestedProcessingException = QueryException.getException(QueryException.QUERY_EXECUTION_ERROR, withNestedException); // QueryException._maxLinesOfStackTracePerFrame * 3 + 2 line separators + 1 QUERY_EXECUTION_ERROR wrapper. assertEquals(5 * 3 + 2 + 1, withNestedProcessingException.getMessage().split("\n").length); }
List<Integer> originalReplicas() { List<Integer> replicas = new ArrayList<>(this.replicas); replicas.removeAll(adding); return replicas; }
@Test public void testOriginalReplicas() { PartitionReassignmentReplicas replicas = new PartitionReassignmentReplicas( partitionAssignment(Arrays.asList(0, 1, 2)), partitionAssignment(Arrays.asList(0, 1, 3))); assertEquals(Arrays.asList(0, 1, 2), replicas.originalReplicas()); }
@Override public void lock() { try { lockInterruptibly(-1, null); } catch (InterruptedException e) { throw new IllegalStateException(); } }
@Test public void testConcurrency_SingleInstance() throws InterruptedException { final AtomicInteger lockedCounter = new AtomicInteger(); int iterations = 15; testSingleInstanceConcurrency(iterations, r -> { Lock lock = r.getSpinLock("testConcurrency_SingleInstance"); lock.lock(); lockedCounter.incrementAndGet(); lock.unlock(); }); Assertions.assertEquals(iterations, lockedCounter.get()); }
@Override public Map<String, Object> load(String configKey) { if (targetUri == null) { return null; } // Check for new file every so often int currentTimeSecs = Time.currentTimeSecs(); if (lastReturnedValue != null && ((currentTimeSecs - lastReturnedTime) < artifactoryPollTimeSecs)) { LOG.debug("currentTimeSecs: {}; lastReturnedTime {}; artifactoryPollTimeSecs: {}. Returning our last map.", currentTimeSecs, lastReturnedTime, artifactoryPollTimeSecs); return (Map<String, Object>) lastReturnedValue.get(configKey); } try { Map<String, Object> raw = loadFromUri(targetUri); if (raw != null) { return (Map<String, Object>) raw.get(configKey); } } catch (Exception e) { LOG.error("Failed to load from uri {}", targetUri); } return null; }
@Test public void testInvalidConfig() { Config conf = new Config(); ArtifactoryConfigLoaderMock loaderMock = new ArtifactoryConfigLoaderMock(conf); Map<String, Object> ret = loaderMock.load(DaemonConfig.MULTITENANT_SCHEDULER_USER_POOLS); assertNull(ret, "Unexpectedly returned not null"); }
@Override public EntityExcerpt createExcerpt(PipelineDao pipeline) { return EntityExcerpt.builder() .id(ModelId.of(pipeline.id())) .type(ModelTypes.PIPELINE_V1) .title(pipeline.title()) .build(); }
@Test public void createExcerpt() { final PipelineDao pipeline = PipelineDao.builder() .id("id") .title("title") .description("description") .source("pipeline \"Test\"\nstage 0 match either\nrule \"debug\"\nend") .build(); final EntityExcerpt excerpt = facade.createExcerpt(pipeline); assertThat(excerpt.id()).isEqualTo(ModelId.of("id")); assertThat(excerpt.type()).isEqualTo(ModelTypes.PIPELINE_V1); assertThat(excerpt.title()).isEqualTo("title"); }
@VisibleForTesting protected Map<SubClusterIdInfo, Float> getTargetWeights( Map<SubClusterId, ClusterMetricsInfo> clusterMetrics) { Map<SubClusterIdInfo, Float> weights = GPGUtils.createUniformWeights(clusterMetrics.keySet()); List<SubClusterId> scs = new ArrayList<>(clusterMetrics.keySet()); // Sort the sub clusters into descending order based on pending load scs.sort(new SortByDescendingLoad(clusterMetrics)); // Keep the top N loaded sub clusters scs = scs.subList(0, Math.min(maxEdit, scs.size())); for (SubClusterId sc : scs) { LOG.info("Updating weight for sub cluster {}", sc.toString()); int pending = clusterMetrics.get(sc).getAppsPending(); if (pending <= minPending) { LOG.info("Load ({}) is lower than minimum ({}), skipping", pending, minPending); } else if (pending < maxPending) { // The different scaling strategies should all map values from the // range min_pending+1 to max_pending to the range min_weight to 1.0f // so we pre-process and simplify the domain to some value [1, MAX-MIN) int val = pending - minPending; int maxVal = maxPending - minPending; // Scale the weights to respect the config minimum float weight = getWeightByScaling(maxVal, val); weight = weight * (1.0f - minWeight); weight += minWeight; weights.put(new SubClusterIdInfo(sc), weight); LOG.info("Load ({}) is within maximum ({}), setting weights via {} " + "scale to {}", pending, maxPending, scaling, weight); } else { weights.put(new SubClusterIdInfo(sc), minWeight); LOG.info("Load ({}) exceeded maximum ({}), setting weight to minimum: {}", pending, maxPending, minWeight); } } validateWeights(weights); return weights; }
@Test public void testSimpleTargetWeights() { weights = policyGenerator.getTargetWeights(clusterMetricsInfos); assertEquals(weights.size(), 3); assertEquals(1.0, getWeight(0), DELTA); assertEquals(1.0, getWeight(1), DELTA); assertEquals(1.0, getWeight(2), DELTA); }
@Override public void lock() { try { lockInterruptibly(-1, null); } catch (InterruptedException e) { throw new IllegalStateException(); } }
@Test public void testExpire() throws InterruptedException { RLock lock = redisson.getSpinLock("lock"); lock.lock(2, TimeUnit.SECONDS); final long startTime = System.currentTimeMillis(); Thread t = new Thread() { public void run() { RLock lock1 = redisson.getSpinLock("lock"); lock1.lock(); long spendTime = System.currentTimeMillis() - startTime; Assertions.assertTrue(spendTime < 2020); lock1.unlock(); } ; }; t.start(); t.join(); assertThatThrownBy(() -> { lock.unlock(); }).isInstanceOf(IllegalMonitorStateException.class); }
public int doWork() { final long nowNs = nanoClock.nanoTime(); cachedNanoClock.update(nowNs); dutyCycleTracker.measureAndUpdate(nowNs); int workCount = commandQueue.drain(CommandProxy.RUN_TASK, Configuration.COMMAND_DRAIN_LIMIT); final int bytesReceived = dataTransportPoller.pollTransports(); totalBytesReceived.getAndAddOrdered(bytesReceived); final PublicationImage[] publicationImages = this.publicationImages; for (int lastIndex = publicationImages.length - 1, i = lastIndex; i >= 0; i--) { final PublicationImage image = publicationImages[i]; if (image.isConnected(nowNs)) { image.checkEosForDrainTransition(nowNs); workCount += image.sendPendingStatusMessage(nowNs); workCount += image.processPendingLoss(); workCount += image.initiateAnyRttMeasurements(nowNs); } else { this.publicationImages = 1 == this.publicationImages.length ? EMPTY_IMAGES : ArrayUtil.remove(this.publicationImages, i); image.removeFromDispatcher(); image.receiverRelease(); } } checkPendingSetupMessages(nowNs); if (reResolutionCheckIntervalNs > 0 && (reResolutionDeadlineNs - nowNs) < 0) { reResolutionDeadlineNs = nowNs + reResolutionCheckIntervalNs; dataTransportPoller.checkForReResolutions(nowNs, conductorProxy); } return workCount + bytesReceived; }
@Test void shouldNotOverwriteDataFrameWithHeartbeat() { receiverProxy.registerReceiveChannelEndpoint(receiveChannelEndpoint); receiverProxy.addSubscription(receiveChannelEndpoint, STREAM_ID); receiver.doWork(); receiver.doWork(); fillSetupFrame(setupHeader); receiveChannelEndpoint.onSetupMessage(setupHeader, setupBuffer, SetupFlyweight.HEADER_LENGTH, senderAddress, 0); final int commandsRead = drainConductorQueue( (e) -> { final PublicationImage image = new PublicationImage( CORRELATION_ID, ctx, receiveChannelEndpoint, 0, senderAddress, SESSION_ID, STREAM_ID, INITIAL_TERM_ID, ACTIVE_TERM_ID, INITIAL_TERM_OFFSET, (short)0, rawLog, mockFeedbackDelayGenerator, POSITIONS, mockHighestReceivedPosition, mockRebuildPosition, SOURCE_IDENTITY, congestionControl); receiverProxy.newPublicationImage(receiveChannelEndpoint, image); }); assertThat(commandsRead, is(1)); receiver.doWork(); fillDataFrame(dataHeader, 0); // initial data frame receiveChannelEndpoint.onDataPacket(dataHeader, dataBuffer, dataHeader.frameLength(), senderAddress, 0); fillDataFrame(dataHeader, 0); // heartbeat with same term offset receiveChannelEndpoint.onDataPacket(dataHeader, dataBuffer, dataHeader.frameLength(), senderAddress, 0); final int readOutcome = TermReader.read( termBuffers[ACTIVE_INDEX], INITIAL_TERM_OFFSET, (buffer, offset, length, header) -> { assertThat(header.type(), is(HeaderFlyweight.HDR_TYPE_DATA)); assertThat(header.termId(), is(ACTIVE_TERM_ID)); assertThat(header.streamId(), is(STREAM_ID)); assertThat(header.sessionId(), is(SESSION_ID)); assertThat(header.termOffset(), is(0)); assertThat(header.frameLength(), is(DataHeaderFlyweight.HEADER_LENGTH + FAKE_PAYLOAD.length)); }, Integer.MAX_VALUE, header, mockErrorHandler, 0, mockSubscriberPosition); assertThat(readOutcome, is(1)); }
@SuppressWarnings("JavaUtilDate") protected LocalTime convertTimeValue(Object value) { if (value instanceof Number) { long millis = ((Number) value).longValue(); return DateTimeUtil.timeFromMicros(millis * 1000); } else if (value instanceof String) { return LocalTime.parse((String) value); } else if (value instanceof LocalTime) { return (LocalTime) value; } else if (value instanceof Date) { long millis = ((Date) value).getTime(); return DateTimeUtil.timeFromMicros(millis * 1000); } throw new RuntimeException("Cannot convert time: " + value); }
@Test public void testTimeConversion() { Table table = mock(Table.class); when(table.schema()).thenReturn(SIMPLE_SCHEMA); RecordConverter converter = new RecordConverter(table, config); LocalTime expected = LocalTime.of(7, 51, 30, 888_000_000); List<Object> inputList = ImmutableList.of( "07:51:30.888", expected.toNanoOfDay() / 1000 / 1000, expected, new Date(expected.toNanoOfDay() / 1000 / 1000)); inputList.forEach( input -> { Temporal ts = converter.convertTimeValue(input); assertThat(ts).isEqualTo(expected); }); }
@Override public String parse(final String value) { return value; }
@Test void assertParse() { assertThat(new PostgreSQLVarcharValueParser().parse("1"), is("1")); }
@Override public Object unmarshal(Exchange exchange, InputStream inputStream) throws Exception { byte[] body = ExchangeHelper.convertToMandatoryType(exchange, byte[].class, inputStream); String charsetName = HL7Charset.getCharsetName(body, guessCharsetName(body, exchange)); String bodyAsString = new String(body, charsetName); Message message = parser.parse(bodyAsString); // add MSH fields as message out headers Terser terser = new Terser(message); for (Map.Entry<String, String> entry : HEADER_MAP.entrySet()) { exchange.getOut().setHeader(entry.getKey(), terser.get(entry.getValue())); } exchange.getOut().setHeader(HL7_CONTEXT, hapiContext); exchange.getOut().setHeader(Exchange.CHARSET_NAME, charsetName); return message; }
@Test public void testUnmarshal() throws Exception { MockEndpoint mock = getMockEndpoint("mock:unmarshal"); mock.expectedMessageCount(1); mock.message(0).body().isInstanceOf(Message.class); mock.expectedHeaderReceived(HL7Constants.HL7_SENDING_APPLICATION, "MYSENDER"); mock.expectedHeaderReceived(HL7Constants.HL7_SENDING_FACILITY, "MYSENDERAPP"); mock.expectedHeaderReceived(HL7Constants.HL7_RECEIVING_APPLICATION, "MYCLIENT"); mock.expectedHeaderReceived(HL7Constants.HL7_RECEIVING_FACILITY, "MYCLIENTAPP"); mock.expectedHeaderReceived(HL7Constants.HL7_TIMESTAMP, "200612211200"); mock.expectedHeaderReceived(HL7Constants.HL7_SECURITY, null); mock.expectedHeaderReceived(HL7Constants.HL7_MESSAGE_TYPE, "QRY"); mock.expectedHeaderReceived(HL7Constants.HL7_TRIGGER_EVENT, "A19"); mock.expectedHeaderReceived(HL7Constants.HL7_MESSAGE_CONTROL, "1234"); mock.expectedHeaderReceived(HL7Constants.HL7_PROCESSING_ID, "P"); mock.expectedHeaderReceived(HL7Constants.HL7_VERSION_ID, "2.4"); mock.expectedHeaderReceived(HL7Constants.HL7_CONTEXT, hl7.getHapiContext()); mock.expectedHeaderReceived(HL7Constants.HL7_CHARSET, null); mock.expectedHeaderReceived(Exchange.CHARSET_NAME, "UTF-8"); String body = createHL7AsString(); template.sendBody("direct:unmarshal", body); MockEndpoint.assertIsSatisfied(context); Message msg = mock.getExchanges().get(0).getIn().getBody(Message.class); assertEquals("2.4", msg.getVersion()); QRD qrd = (QRD) msg.get("QRD"); assertEquals("0101701234", qrd.getWhoSubjectFilter(0).getIDNumber().getValue()); }
@Override public void suspendAll(HostName parentHostname, List<HostName> hostNames) throws BatchHostStateChangeDeniedException, BatchHostNameNotFoundException, BatchInternalErrorException { try (OrchestratorContext context = OrchestratorContext.createContextForMultiAppOp(clock)) { List<NodeGroup> nodeGroupsOrderedByApplication; try { nodeGroupsOrderedByApplication = nodeGroupsOrderedForSuspend(hostNames); } catch (HostNameNotFoundException e) { throw new BatchHostNameNotFoundException(parentHostname, hostNames, e); } suspendAllNodeGroups(context, parentHostname, nodeGroupsOrderedByApplication, true); suspendAllNodeGroups(context, parentHostname, nodeGroupsOrderedByApplication, false); } }
@Test public void suspendAllWorks() { // A spy is preferential because suspendAll() relies on delegating the hard work to suspend() and resume(). OrchestratorImpl orchestrator = spy(this.orchestrator); orchestrator.suspendAll( new HostName("parentHostname"), List.of( DummyServiceMonitor.TEST1_HOST_NAME, DummyServiceMonitor.TEST3_HOST_NAME, DummyServiceMonitor.TEST6_HOST_NAME)); // As of 2016-06-07 the order of the node groups are as follows: // TEST3: mediasearch:imagesearch:default // TEST6: tenant-id-3:application-instance-3:default // TEST1: test-tenant-id:application:instance InOrder order = inOrder(orchestrator); verifySuspendGroup(order, orchestrator, DummyServiceMonitor.TEST3_NODE_GROUP, true); verifySuspendGroup(order, orchestrator, DummyServiceMonitor.TEST6_NODE_GROUP, true); verifySuspendGroup(order, orchestrator, DummyServiceMonitor.TEST1_NODE_GROUP, true); verifySuspendGroup(order, orchestrator, DummyServiceMonitor.TEST3_NODE_GROUP, false); verifySuspendGroup(order, orchestrator, DummyServiceMonitor.TEST6_NODE_GROUP, false); verifySuspendGroup(order, orchestrator, DummyServiceMonitor.TEST1_NODE_GROUP, false); order.verifyNoMoreInteractions(); }
@Override public ByteBuf discardReadBytes() { if (readerIndex == 0) { ensureAccessible(); return this; } if (readerIndex != writerIndex) { setBytes(0, this, readerIndex, writerIndex - readerIndex); writerIndex -= readerIndex; adjustMarkers(readerIndex); readerIndex = 0; } else { ensureAccessible(); adjustMarkers(readerIndex); writerIndex = readerIndex = 0; } return this; }
@Test public void testDiscardReadBytesAfterRelease() { assertThrows(IllegalReferenceCountException.class, new Executable() { @Override public void execute() { releasedBuffer().discardReadBytes(); } }); }
public TurnServerOptions getRoutingFor( @Nonnull final UUID aci, @Nonnull final Optional<InetAddress> clientAddress, final int instanceLimit ) { try { return getRoutingForInner(aci, clientAddress, instanceLimit); } catch(Exception e) { logger.error("Failed to perform routing", e); return new TurnServerOptions(this.configTurnRouter.getHostname(), null, this.configTurnRouter.randomUrls()); } }
@Test public void testPrioritizesManualRecords() throws UnknownHostException { when(performanceTable.getDatacentersFor(any(), any(), any(), any())) .thenReturn(List.of("dc-performance1")); when(manualTable.getDatacentersFor(any(), any(), any(), any())) .thenReturn(List.of("dc-manual")); assertThat(router().getRoutingFor(aci, Optional.of(InetAddress.getByName("0.0.0.1")), 10)) .isEqualTo(optionsWithUrls(List.of( "turn:1.1.1.1", "turn:1.1.1.1:80?transport=tcp", "turns:1.1.1.1:443?transport=tcp", "turn:[2222:1111:0:dead:0:0:0:0]", "turn:[2222:1111:0:dead:0:0:0:0]:80?transport=tcp", "turns:[2222:1111:0:dead:0:0:0:0]:443?transport=tcp" ))); }
static Process startPythonProcess( PythonEnvironment pythonEnv, List<String> commands, boolean redirectToPipe) throws IOException { ProcessBuilder pythonProcessBuilder = new ProcessBuilder(); Map<String, String> env = pythonProcessBuilder.environment(); if (pythonEnv.pythonPath != null) { String defaultPythonPath = env.get("PYTHONPATH"); if (Strings.isNullOrEmpty(defaultPythonPath)) { env.put("PYTHONPATH", pythonEnv.pythonPath); } else { env.put( "PYTHONPATH", String.join(File.pathSeparator, pythonEnv.pythonPath, defaultPythonPath)); } } if (pythonEnv.archivesDirectory != null) { pythonProcessBuilder.directory(new File(pythonEnv.archivesDirectory)); } pythonEnv.systemEnv.forEach(env::put); commands.add(0, pythonEnv.pythonExec); pythonProcessBuilder.command(commands); // redirect the stderr to stdout pythonProcessBuilder.redirectErrorStream(true); if (redirectToPipe) { pythonProcessBuilder.redirectOutput(ProcessBuilder.Redirect.PIPE); } else { // set the child process the output same as the parent process. pythonProcessBuilder.redirectOutput(ProcessBuilder.Redirect.INHERIT); } LOG.info( "Starting Python process with environment variables: {{}}, command: {}", env.entrySet().stream() .map(e -> e.getKey() + "=" + e.getValue()) .collect(Collectors.joining(", ")), String.join(" ", commands)); Process process = pythonProcessBuilder.start(); if (!process.isAlive()) { throw new RuntimeException("Failed to start Python process. "); } return process; }
@Test void testStartPythonProcess() { PythonEnvUtils.PythonEnvironment pythonEnv = new PythonEnvUtils.PythonEnvironment(); pythonEnv.tempDirectory = tmpDirPath; pythonEnv.pythonPath = tmpDirPath; List<String> commands = new ArrayList<>(); String pyPath = String.join(File.separator, tmpDirPath, "verifier.py"); try { File pyFile = new File(pyPath); pyFile.createNewFile(); pyFile.setExecutable(true); String pyProgram = "#!/usr/bin/python\n" + "# -*- coding: UTF-8 -*-\n" + "import os\n" + "import sys\n" + "\n" + "if __name__=='__main__':\n" + "\tfilename = sys.argv[1]\n" + "\tfo = open(filename, \"w\")\n" + "\tfo.write(os.getcwd())\n" + "\tfo.close()"; Files.write(pyFile.toPath(), pyProgram.getBytes(), StandardOpenOption.WRITE); String result = String.join(File.separator, tmpDirPath, "python_working_directory.txt"); commands.add(pyPath); commands.add(result); Process pythonProcess = PythonEnvUtils.startPythonProcess(pythonEnv, commands, false); int exitCode = pythonProcess.waitFor(); if (exitCode != 0) { throw new RuntimeException("Python process exits with code: " + exitCode); } String cmdResult = new String(Files.readAllBytes(new File(result).toPath())); // Check if the working directory of python process is the same as java process. assertThat(cmdResult).isEqualTo(System.getProperty("user.dir")); pythonProcess.destroyForcibly(); pyFile.delete(); new File(result).delete(); } catch (IOException | InterruptedException e) { throw new RuntimeException("test start Python process failed " + e.getMessage()); } }
public static long getNumSector(String requestSize, String sectorSize) { Double memSize = Double.parseDouble(requestSize); Double sectorBytes = Double.parseDouble(sectorSize); Double nSectors = memSize / sectorBytes; Double memSizeKB = memSize / 1024; Double memSizeGB = memSize / (1024 * 1024 * 1024); Double memSize100GB = memSizeGB / 100; // allocation bitmap file: one bit per sector Double allocBitmapSize = nSectors / 8; // extend overflow file: 4MB, plus 4MB per 100GB Double extOverflowFileSize = memSize100GB * 1024 * 1024 * 4; // journal file: 8MB, plus 8MB per 100GB Double journalFileSize = memSize100GB * 1024 * 1024 * 8; // catalog file: 10bytes per KB Double catalogFileSize = memSizeKB * 10; // hot files: 5bytes per KB Double hotFileSize = memSizeKB * 5; // quota users file and quota groups file Double quotaUsersFileSize = (memSizeGB * 256 + 1) * 64; Double quotaGroupsFileSize = (memSizeGB * 32 + 1) * 64; Double metadataSize = allocBitmapSize + extOverflowFileSize + journalFileSize + catalogFileSize + hotFileSize + quotaUsersFileSize + quotaGroupsFileSize; Double allocSize = memSize + metadataSize; Double numSectors = allocSize / sectorBytes; System.out.println(numSectors.longValue() + 1); // round up return numSectors.longValue() + 1; }
@Test public void getSectorTestMB() { String testRequestSize = "1048576"; // 1MB String testSectorSize = "512"; long result = HFSUtils.getNumSector(testRequestSize, testSectorSize); assertEquals(2080L, result); // 1MB/512B = 2048 }
@Override public boolean isEmpty() { return size() == 0; }
@Test public void testIsEmpty() { assertTrue(set.isEmpty()); set.add(random.nextLong()); assertFalse(set.isEmpty()); }
protected List<String> buildArgumentList() { // Use file.separator as a wild guess as to whether this is Windows final List<String> args = new ArrayList<>(); if (!StringUtils.isBlank(getSettings().getString(Settings.KEYS.ANALYZER_ASSEMBLY_DOTNET_PATH))) { args.add(getSettings().getString(Settings.KEYS.ANALYZER_ASSEMBLY_DOTNET_PATH)); } else if (isDotnetPath()) { args.add("dotnet"); } else { return null; } args.add(grokAssembly.getPath()); return args; }
@Test public void testNonexistent() { assumeNotNull(analyzer.buildArgumentList()); // Tweak the log level so the warning doesn't show in the console String oldProp = System.getProperty(LOG_KEY, "info"); File f = BaseTest.getResourceAsFile(this, "log4net.dll"); File test = new File(f.getParent(), "nonexistent.dll"); Dependency d = new Dependency(test); try { analyzer.analyze(d, null); fail("Expected an AnalysisException"); } catch (AnalysisException ae) { assertTrue(ae.getMessage().contains("nonexistent.dll does not exist and cannot be analyzed by dependency-check")); } finally { System.setProperty(LOG_KEY, oldProp); } }
@UdafFactory(description = "Build a value-to-count histogram of input Strings") public static TableUdaf<String, Map<String, Long>, Map<String, Long>> histogramString() { return histogram(); }
@Test public void shouldCountStrings() { final TableUdaf<String, Map<String, Long>, Map<String, Long>> udaf = HistogramUdaf.histogramString(); Map<String, Long> agg = udaf.initialize(); final String[] values = new String[] {"foo", "bar", "foo", "foo", "baz"}; for (final String thisValue : values) { agg = udaf.aggregate(thisValue, agg); } assertThat(agg.entrySet(), hasSize(3)); assertThat(agg, hasEntry("foo", 3L)); assertThat(agg, hasEntry("bar", 1L)); assertThat(agg, hasEntry("baz", 1L)); }
public static void disablePullConsumption(DefaultLitePullConsumerWrapper wrapper, Set<String> topics) { Set<String> subscribedTopic = wrapper.getSubscribedTopics(); if (subscribedTopic.stream().anyMatch(topics::contains)) { suspendPullConsumer(wrapper); return; } resumePullConsumer(wrapper); }
@Test public void testDisablePullConsumptionNoTopic() { pullConsumerWrapper.setProhibition(true); pullConsumerWrapper.setSubscribedTopics(new HashSet<>()); pullConsumerWrapper.setSubscriptionType(SubscriptionType.SUBSCRIBE); RocketMqPullConsumerController.disablePullConsumption(pullConsumerWrapper, prohibitionTopics); Assert.assertFalse(pullConsumerWrapper.isProhibition()); }
public static Logger warningLogger() { return WARNING_ANDROID_LOGGER; }
@Test public void warningLoggerReturnsSameInstance() { Logger logger1 = Loggers.warningLogger(); Logger logger2 = Loggers.warningLogger(); assertThat(logger1, sameInstance(logger2)); }
public List<Entry> getEntries() { return new ArrayList<>(actions.values()); }
@Test public void actions_with_multiple_document_types() { List<RefeedActions.Entry> entries = new ConfigChangeActionsBuilder(). refeed(ValidationId.indexModeChange, CHANGE_MSG, DOC_TYPE, CLUSTER, SERVICE_NAME). refeed(ValidationId.indexModeChange, CHANGE_MSG, DOC_TYPE_2, CLUSTER, SERVICE_NAME). build().getRefeedActions().getEntries(); assertThat(entries.size(), is(2)); assertThat(toString(entries.get(0)), equalTo("book.foo:[baz][change]")); assertThat(toString(entries.get(1)), equalTo("music.foo:[baz][change]")); }
@Subscribe public void onVarbitChanged(VarbitChanged varbitChanged) { if (varbitChanged.getVarpId() == VarPlayer.CANNON_AMMO) { int old = cballsLeft; cballsLeft = varbitChanged.getValue(); if (cballsLeft > old) { cannonBallNotificationSent = false; } if (!cannonBallNotificationSent && cballsLeft > 0 && config.lowWarningThreshold() >= cballsLeft) { notifier.notify(config.showCannonNotifications(), String.format("Your cannon has %d cannon balls remaining!", cballsLeft)); cannonBallNotificationSent = true; } } else if (varbitChanged.getVarpId() == VarPlayer.CANNON_COORD) { WorldPoint c = WorldPoint.fromCoord(varbitChanged.getValue()); cannonPosition = buildCannonWorldArea(c); } else if (varbitChanged.getVarpId() == VarPlayer.CANNON_STATE) { cannonPlaced = varbitChanged.getValue() == 4; if (cannonPlaced) { addCounter(); } else { removeCounter(); } } }
@Test public void testCannonInfoBox() { when(config.showInfobox()).thenReturn(true); VarbitChanged varbitChanged = new VarbitChanged(); varbitChanged.setVarpId(VarPlayer.CANNON_STATE); varbitChanged.setValue(4); plugin.onVarbitChanged(varbitChanged); assertTrue(plugin.isCannonPlaced()); assertEquals(0, plugin.getCballsLeft()); verify(infoBoxManager).addInfoBox(any(CannonCounter.class)); }
@Override public SmsSendRespDTO sendSms(Long sendLogId, String mobile, String apiTemplateId, List<KeyValue<String, Object>> templateParams) throws Throwable { Assert.notBlank(properties.getSignature(), "短信签名不能为空"); // 1. 执行请求 // 参考链接 https://api.aliyun.com/document/Dysmsapi/2017-05-25/SendSms TreeMap<String, Object> queryParam = new TreeMap<>(); queryParam.put("PhoneNumbers", mobile); queryParam.put("SignName", properties.getSignature()); queryParam.put("TemplateCode", apiTemplateId); queryParam.put("TemplateParam", JsonUtils.toJsonString(MapUtils.convertMap(templateParams))); queryParam.put("OutId", sendLogId); JSONObject response = request("SendSms", queryParam); // 2. 解析请求 return new SmsSendRespDTO() .setSuccess(Objects.equals(response.getStr("Code"), RESPONSE_CODE_SUCCESS)) .setSerialNo(response.getStr("BizId")) .setApiRequestId(response.getStr("RequestId")) .setApiCode(response.getStr("Code")) .setApiMsg(response.getStr("Message")); }
@Test public void tesSendSms_success() throws Throwable { try (MockedStatic<HttpUtils> httpUtilsMockedStatic = mockStatic(HttpUtils.class)) { // 准备参数 Long sendLogId = randomLongId(); String mobile = randomString(); String apiTemplateId = randomString(); List<KeyValue<String, Object>> templateParams = Lists.newArrayList( new KeyValue<>("code", 1234), new KeyValue<>("op", "login")); // mock 方法 httpUtilsMockedStatic.when(() -> HttpUtils.post(anyString(), anyMap(), anyString())) .thenReturn("{\"Message\":\"OK\",\"RequestId\":\"30067CE9-3710-5984-8881-909B21D8DB28\",\"Code\":\"OK\",\"BizId\":\"800025323183427988\"}"); // 调用 SmsSendRespDTO result = smsClient.sendSms(sendLogId, mobile, apiTemplateId, templateParams); // 断言 assertTrue(result.getSuccess()); assertEquals("30067CE9-3710-5984-8881-909B21D8DB28", result.getApiRequestId()); assertEquals("OK", result.getApiCode()); assertEquals("OK", result.getApiMsg()); assertEquals("800025323183427988", result.getSerialNo()); } }
protected void mergeAndRevive(ConsumeReviveObj consumeReviveObj) throws Throwable { ArrayList<PopCheckPoint> sortList = consumeReviveObj.genSortList(); POP_LOGGER.info("reviveQueueId={}, ck listSize={}", queueId, sortList.size()); if (sortList.size() != 0) { POP_LOGGER.info("reviveQueueId={}, 1st ck, startOffset={}, reviveOffset={}; last ck, startOffset={}, reviveOffset={}", queueId, sortList.get(0).getStartOffset(), sortList.get(0).getReviveOffset(), sortList.get(sortList.size() - 1).getStartOffset(), sortList.get(sortList.size() - 1).getReviveOffset()); } long newOffset = consumeReviveObj.oldOffset; for (PopCheckPoint popCheckPoint : sortList) { if (!shouldRunPopRevive) { POP_LOGGER.info("slave skip ck process, revive topic={}, reviveQueueId={}", reviveTopic, queueId); break; } if (consumeReviveObj.endTime - popCheckPoint.getReviveTime() <= (PopAckConstants.ackTimeInterval + PopAckConstants.SECOND)) { break; } // check normal topic, skip ck , if normal topic is not exist String normalTopic = KeyBuilder.parseNormalTopic(popCheckPoint.getTopic(), popCheckPoint.getCId()); if (brokerController.getTopicConfigManager().selectTopicConfig(normalTopic) == null) { POP_LOGGER.warn("reviveQueueId={}, can not get normal topic {}, then continue", queueId, popCheckPoint.getTopic()); newOffset = popCheckPoint.getReviveOffset(); continue; } if (null == brokerController.getSubscriptionGroupManager().findSubscriptionGroupConfig(popCheckPoint.getCId())) { POP_LOGGER.warn("reviveQueueId={}, can not get cid {}, then continue", queueId, popCheckPoint.getCId()); newOffset = popCheckPoint.getReviveOffset(); continue; } while (inflightReviveRequestMap.size() > 3) { waitForRunning(100); Pair<Long, Boolean> pair = inflightReviveRequestMap.firstEntry().getValue(); if (!pair.getObject2() && System.currentTimeMillis() - pair.getObject1() > 1000 * 30) { PopCheckPoint oldCK = inflightReviveRequestMap.firstKey(); rePutCK(oldCK, pair); inflightReviveRequestMap.remove(oldCK); POP_LOGGER.warn("stay too long, remove from reviveRequestMap, {}, {}, {}, {}", popCheckPoint.getTopic(), popCheckPoint.getBrokerName(), popCheckPoint.getQueueId(), popCheckPoint.getStartOffset()); } } reviveMsgFromCk(popCheckPoint); newOffset = popCheckPoint.getReviveOffset(); } if (newOffset > consumeReviveObj.oldOffset) { if (!shouldRunPopRevive) { POP_LOGGER.info("slave skip commit, revive topic={}, reviveQueueId={}", reviveTopic, queueId); return; } this.brokerController.getConsumerOffsetManager().commitOffset(PopAckConstants.LOCAL_HOST, PopAckConstants.REVIVE_GROUP, reviveTopic, queueId, newOffset); } reviveOffset = newOffset; consumeReviveObj.newOffset = newOffset; }
@Test public void testReviveMsgFromCk_messageFound_writeRetryFailed_rewriteCK() throws Throwable { PopCheckPoint ck = buildPopCheckPoint(0, 0, 0); PopReviveService.ConsumeReviveObj reviveObj = new PopReviveService.ConsumeReviveObj(); reviveObj.map.put("", ck); reviveObj.endTime = System.currentTimeMillis(); StringBuilder actualRetryTopic = new StringBuilder(); StringBuilder actualReviveTopic = new StringBuilder(); AtomicLong actualInvisibleTime = new AtomicLong(0L); when(escapeBridge.getMessageAsync(anyString(), anyLong(), anyInt(), anyString(), anyBoolean())) .thenReturn(CompletableFuture.completedFuture(Triple.of(new MessageExt(), "", false))); when(escapeBridge.putMessageToSpecificQueue(any(MessageExtBrokerInner.class))).thenAnswer(invocation -> { MessageExtBrokerInner msg = invocation.getArgument(0); actualRetryTopic.append(msg.getTopic()); return new PutMessageResult(PutMessageStatus.MESSAGE_ILLEGAL, new AppendMessageResult(AppendMessageStatus.MESSAGE_SIZE_EXCEEDED)); }); when(messageStore.putMessage(any(MessageExtBrokerInner.class))).thenAnswer(invocation -> { MessageExtBrokerInner msg = invocation.getArgument(0); actualReviveTopic.append(msg.getTopic()); PopCheckPoint rewriteCK = JSON.parseObject(msg.getBody(), PopCheckPoint.class); actualInvisibleTime.set(rewriteCK.getReviveTime()); return new PutMessageResult(PutMessageStatus.PUT_OK, new AppendMessageResult(AppendMessageStatus.PUT_OK)); }); popReviveService.mergeAndRevive(reviveObj); Assert.assertEquals(KeyBuilder.buildPopRetryTopic(TOPIC, GROUP, false), actualRetryTopic.toString()); Assert.assertEquals(REVIVE_TOPIC, actualReviveTopic.toString()); Assert.assertEquals(INVISIBLE_TIME + 10 * 1000L, actualInvisibleTime.get()); // first interval is 10s verify(escapeBridge, times(1)).putMessageToSpecificQueue(any(MessageExtBrokerInner.class)); // write retry verify(messageStore, times(1)).putMessage(any(MessageExtBrokerInner.class)); // rewrite CK }
static void parseRootless(final StringReader reader, final Host host, final Consumer<HostParserException> decorator) throws HostParserException { // This is not RFC-compliant. // * Rootless-path must not include authentication information. final boolean userInfoResult = parseUserInfo(reader, host, decorator); if(host.getProtocol().isHostnameConfigurable() && StringUtils.isWhitespace(host.getHostname())) { // This is not RFC-compliant. // We assume for hostconfigurable-empty-hostnames a hostname on first path segment parseHostname(reader, host, decorator); } parsePath(reader, host, false, decorator); }
@Test public void testParseRootless() throws HostParserException { final Host host = new Host(new TestProtocol() { @Override public boolean isHostnameConfigurable() { return false; } }); final String path = "path/sub/directory"; final HostParser.StringReader reader = new HostParser.StringReader(path); HostParser.parseRootless(reader, host, null); assertEquals(path, host.getDefaultPath()); }
@Override public QueryHeader build(final QueryResultMetaData queryResultMetaData, final ShardingSphereDatabase database, final String columnName, final String columnLabel, final int columnIndex) throws SQLException { int columnType = queryResultMetaData.getColumnType(columnIndex); String columnTypeName = queryResultMetaData.getColumnTypeName(columnIndex); int columnLength = queryResultMetaData.getColumnLength(columnIndex); return new QueryHeader(UNUSED_STRING_FIELD, UNUSED_STRING_FIELD, columnLabel, UNUSED_STRING_FIELD, columnType, columnTypeName, columnLength, UNUSED_INT_FIELD, UNUSED_BOOLEAN_FIELD, UNUSED_BOOLEAN_FIELD, UNUSED_BOOLEAN_FIELD, UNUSED_BOOLEAN_FIELD); }
@Test void assertBuildPostgreSQLQueryHeader() throws SQLException { final int columnIndex = 1; QueryResultMetaData queryResultMetaData = mock(QueryResultMetaData.class); when(queryResultMetaData.getColumnLabel(columnIndex)).thenReturn("label"); when(queryResultMetaData.getColumnType(columnIndex)).thenReturn(Types.INTEGER); when(queryResultMetaData.getColumnTypeName(columnIndex)).thenReturn("int"); when(queryResultMetaData.getColumnLength(columnIndex)).thenReturn(11); QueryHeader actual = new PostgreSQLQueryHeaderBuilder().build(queryResultMetaData, null, null, queryResultMetaData.getColumnLabel(columnIndex), columnIndex); assertThat(actual.getColumnLabel(), is("label")); assertThat(actual.getColumnType(), is(Types.INTEGER)); assertThat(actual.getColumnTypeName(), is("int")); assertThat(actual.getColumnLength(), is(11)); }
@Override public void log(Logger logger, Level level, String message, Throwable t) { logger.log(level, withLogPrefix(message), t); }
@Test void verify() { context.log(logger, Level.INFO, "A %s message", "log"); assertEquals(1, logger.records.size()); assertEquals(Level.INFO, logger.records.get(0).getLevel()); assertEquals("Cluster 'clustername': A log message", logger.records.get(0).getMessage()); }
public static String longToTimeString(long timeStamp, SimpleDateFormat dateFormat) { if (timeStamp <= 0L) { return FeConstants.NULL_STRING; } return dateFormat.format(new Date(timeStamp)); }
@Test public void testDateTrans() throws AnalysisException { Assert.assertEquals(FeConstants.NULL_STRING, TimeUtils.longToTimeString(-2)); long timestamp = 1426125600000L; Assert.assertEquals("2015-03-12 10:00:00", TimeUtils.longToTimeString(timestamp)); DateLiteral date = new DateLiteral("2015-03-01", ScalarType.DATE); Assert.assertEquals(20150301000000L, date.getLongValue()); DateLiteral datetime = new DateLiteral("2015-03-01 12:00:00", ScalarType.DATETIME); Assert.assertEquals(20150301120000L, datetime.getLongValue()); }
@Override public Object getDateValue(final ResultSet resultSet, final int columnIndex) throws SQLException { if (isYearDataType(resultSet.getMetaData().getColumnTypeName(columnIndex))) { return resultSet.wasNull() ? null : resultSet.getObject(columnIndex); } return resultSet.getDate(columnIndex); }
@Test void assertGetDateValueWithNotYearDataType() throws SQLException { when(resultSet.getMetaData().getColumnTypeName(1)).thenReturn("DATE"); when(resultSet.getDate(1)).thenReturn(new Date(0L)); assertThat(dialectResultSetMapper.getDateValue(resultSet, 1), is(new Date(0L))); }
public static org.joda.time.Instant microsecondToInstant(long microsecond) { return org.joda.time.Instant.ofEpochMilli(microsecond / 1_000L); }
@Test public void testMicrosecondToJodaInstant() { assertEquals(org.joda.time.Instant.ofEpochMilli(1_234L), microsecondToInstant(1_234_567L)); }
@Override public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException { try { String partitionColumn = job.get(Constants.JDBC_PARTITION_COLUMN); int numPartitions = job.getInt(Constants.JDBC_NUM_PARTITIONS, -1); String lowerBound = job.get(Constants.JDBC_LOW_BOUND); String upperBound = job.get(Constants.JDBC_UPPER_BOUND); InputSplit[] splits; if (!job.getBoolean(Constants.JDBC_SPLIT_QUERY, true) || numPartitions <= 1) { // We will not split this query if: // 1. hive.sql.query.split is set to false (either manually or automatically by calcite // 2. numPartitions == 1 splits = new InputSplit[1]; splits[0] = new JdbcInputSplit(FileInputFormat.getInputPaths(job)[0]); LOGGER.info("Creating 1 input split " + splits[0]); return splits; } dbAccessor = DatabaseAccessorFactory.getAccessor(job); Path[] tablePaths = FileInputFormat.getInputPaths(job); // We will split this query into n splits LOGGER.debug("Creating {} input splits", numPartitions); if (partitionColumn != null) { List<String> columnNames = dbAccessor.getColumnNames(job); if (!columnNames.contains(partitionColumn)) { throw new IOException("Cannot find partitionColumn:" + partitionColumn + " in " + columnNames); } List<TypeInfo> hiveColumnTypesList = dbAccessor.getColumnTypes(job); TypeInfo typeInfo = hiveColumnTypesList.get(columnNames.indexOf(partitionColumn)); if (!(typeInfo instanceof PrimitiveTypeInfo)) { throw new IOException(partitionColumn + " is a complex type, only primitive type can be a partition column"); } if (lowerBound == null || upperBound == null) { Pair<String, String> boundary = dbAccessor.getBounds(job, partitionColumn, lowerBound == null, upperBound == null); if (lowerBound == null) { lowerBound = boundary.getLeft(); } if (upperBound == null) { upperBound = boundary.getRight(); } } if (lowerBound == null) { throw new IOException("lowerBound of " + partitionColumn + " cannot be null"); } if (upperBound == null) { throw new IOException("upperBound of " + partitionColumn + " cannot be null"); } IntervalSplitter intervalSplitter = IntervalSplitterFactory.newIntervalSpitter(typeInfo); List<MutablePair<String, String>> intervals = intervalSplitter.getIntervals(lowerBound, upperBound, numPartitions, typeInfo); if (intervals.size()<=1) { LOGGER.debug("Creating 1 input splits"); splits = new InputSplit[1]; splits[0] = new JdbcInputSplit(FileInputFormat.getInputPaths(job)[0]); return splits; } intervals.get(0).setLeft(null); intervals.get(intervals.size()-1).setRight(null); splits = new InputSplit[intervals.size()]; for (int i = 0; i < intervals.size(); i++) { splits[i] = new JdbcInputSplit(partitionColumn, intervals.get(i).getLeft(), intervals.get(i).getRight(), tablePaths[0]); } } else { int numRecords = dbAccessor.getTotalNumberOfRecords(job); if (numRecords < numPartitions) { numPartitions = numRecords; } int numRecordsPerSplit = numRecords / numPartitions; int numSplitsWithExtraRecords = numRecords % numPartitions; LOGGER.debug("Num records = {}", numRecords); splits = new InputSplit[numPartitions]; int offset = 0; for (int i = 0; i < numPartitions; i++) { int numRecordsInThisSplit = numRecordsPerSplit; if (i < numSplitsWithExtraRecords) { numRecordsInThisSplit++; } splits[i] = new JdbcInputSplit(numRecordsInThisSplit, offset, tablePaths[0]); offset += numRecordsInThisSplit; } } dbAccessor = null; LOGGER.info("Num input splits created {}", splits.length); for (InputSplit split : splits) { LOGGER.info("split:" + split.toString()); } return splits; } catch (Exception e) { LOGGER.error("Error while splitting input data.", e); throw new IOException(e); } }
@Test public void testIntervalSplit_Date() throws HiveJdbcDatabaseAccessException, IOException { JdbcInputFormat f = new JdbcInputFormat(); when(mockDatabaseAccessor.getColumnNames(any(Configuration.class))).thenReturn(Lists.newArrayList("a")); when(mockDatabaseAccessor.getBounds(any(Configuration.class), any(String.class), anyBoolean(), anyBoolean())) .thenReturn(new ImmutablePair<String, String>("2010-01-01", "2018-01-01")); List<TypeInfo> columnTypes = Collections.singletonList(TypeInfoFactory.dateTypeInfo); when(mockDatabaseAccessor.getColumnTypes(any(Configuration.class))).thenReturn(columnTypes); JobConf conf = new JobConf(); conf.set("mapred.input.dir", "/temp"); conf.set("hive.sql.partitionColumn", "a"); conf.set("hive.sql.numPartitions", "3"); InputSplit[] splits = f.getSplits(conf, -1); assertThat(splits, is(notNullValue())); assertThat(splits.length, is(3)); assertNull(((JdbcInputSplit)splits[0]).getLowerBound()); assertEquals(((JdbcInputSplit)splits[0]).getUpperBound(), "2012-09-01"); assertEquals(((JdbcInputSplit)splits[1]).getLowerBound(), "2012-09-01"); assertEquals(((JdbcInputSplit)splits[1]).getUpperBound(), "2015-05-03"); assertEquals(((JdbcInputSplit)splits[2]).getLowerBound(), "2015-05-03"); assertNull(((JdbcInputSplit)splits[2]).getUpperBound()); }
@Override public void reset() { set(initialValue); }
@Test public void testReset() { SnapshotRegistry registry = new SnapshotRegistry(new LogContext()); TimelineObject<String> value = new TimelineObject<>(registry, "<default>"); registry.getOrCreateSnapshot(2); value.set("first value"); registry.getOrCreateSnapshot(3); value.set("second value"); registry.reset(); assertEquals(Collections.emptyList(), registry.epochsList()); assertEquals("<default>", value.get()); }
protected String concat( String basePath, String relativePath ) { return FilenameUtils.concat( basePath, relativePath ); }
@Test public void testConcat() { String basePath = "/tmp/some/path"; String relativePath = "that/is/temporary"; String expectedPath = applyFileSeperator( "/tmp/some/path/that/is/temporary" ); // CASE 1: Add separator assertEquals( expectedPath, servlet.concat( basePath, relativePath ) ); // CASE 2: Don't add separator assertEquals( expectedPath, servlet.concat( basePath + "/", relativePath) ); }
public static long compute(byte[] bytes, int offset, int size) { Checksum crc = create(); crc.update(bytes, offset, size); return crc.getValue(); }
@Test public void testValue() { final byte[] bytes = "Some String".getBytes(); assertEquals(608512271, Crc32C.compute(bytes, 0, bytes.length)); }
public void validateConsumerGroup(Resource consumerGroup) { validateConsumerGroup(consumerGroup.getName()); }
@Test public void testValidateConsumerGroup() { assertThrows(GrpcProxyException.class, () -> grpcValidator.validateConsumerGroup("")); assertThrows(GrpcProxyException.class, () -> grpcValidator.validateConsumerGroup("CID_RMQ_SYS_xxxx")); grpcValidator.validateConsumerGroup("consumerGroupName"); }
public void setEnabled(boolean enabled) { this.enabled = enabled; }
@Test public void setEnabled() { properties.setEnabled(false); assertThat(properties.isEnabled()).isEqualTo(false); }
@Override public boolean tryFence(HAServiceTarget target, String args) { ProcessBuilder builder; String cmd = parseArgs(target.getTransitionTargetHAStatus(), args); if (!Shell.WINDOWS) { builder = new ProcessBuilder("bash", "-e", "-c", cmd); } else { builder = new ProcessBuilder("cmd.exe", "/c", cmd); } setConfAsEnvVars(builder.environment()); addTargetInfoAsEnvVars(target, builder.environment()); Process p; try { p = builder.start(); p.getOutputStream().close(); } catch (IOException e) { LOG.warn("Unable to execute " + cmd, e); return false; } String pid = tryGetPid(p); LOG.info("Launched fencing command '" + cmd + "' with " + ((pid != null) ? ("pid " + pid) : "unknown pid")); String logPrefix = abbreviate(cmd, ABBREV_LENGTH); if (pid != null) { logPrefix = "[PID " + pid + "] " + logPrefix; } // Pump logs to stderr StreamPumper errPumper = new StreamPumper( LOG, logPrefix, p.getErrorStream(), StreamPumper.StreamType.STDERR); errPumper.start(); StreamPumper outPumper = new StreamPumper( LOG, logPrefix, p.getInputStream(), StreamPumper.StreamType.STDOUT); outPumper.start(); int rc; try { rc = p.waitFor(); errPumper.join(); outPumper.join(); } catch (InterruptedException ie) { LOG.warn("Interrupted while waiting for fencing command: " + cmd); return false; } return rc == 0; }
@Test public void testStderrLogging() { assertTrue(fencer.tryFence(TEST_TARGET, "echo hello>&2")); Mockito.verify(ShellCommandFencer.LOG).warn( Mockito.endsWith("echo hello>&2: hello")); }
@SuppressWarnings("unchecked") @Override public <T extends Statement> ConfiguredStatement<T> inject( final ConfiguredStatement<T> statement ) { if (!(statement.getStatement() instanceof CreateSource) && !(statement.getStatement() instanceof CreateAsSelect)) { return statement; } try { if (statement.getStatement() instanceof CreateSource) { final ConfiguredStatement<CreateSource> createStatement = (ConfiguredStatement<CreateSource>) statement; return (ConfiguredStatement<T>) forCreateStatement(createStatement).orElse(createStatement); } else { final ConfiguredStatement<CreateAsSelect> createStatement = (ConfiguredStatement<CreateAsSelect>) statement; return (ConfiguredStatement<T>) forCreateAsStatement(createStatement).orElse( createStatement); } } catch (final KsqlStatementException e) { throw e; } catch (final KsqlException e) { throw new KsqlStatementException( ErrorMessageUtil.buildErrorMessage(e), statement.getMaskedStatementText(), e.getCause()); } }
@Test public void shouldReturnStatementUnchangedIfCsFormatsDoNotSupportInference() { // Given: givenNeitherKeyNorValueInferenceSupported(); // When: final ConfiguredStatement<?> result = injector.inject(csStatement); // Then: assertThat(result, is(sameInstance(csStatement))); }
public void add(T element) { int newIndex = (int) (++writeIndex) % size; elements[newIndex] = element; int nextIndex = (newIndex + 1) % size; if (elements[nextIndex] != null) { readIndex = nextIndex; } if (++length > size) { length = size; } }
@Test public void testAdd() { CircularBuffer<Integer> cb = new CircularBuffer<>(2); cb.add(1); assertEquals(1, cb.getLength()); final List<Integer> elements = new ArrayList<>(); cb.forEach( new Function<Integer, Boolean>() { @Override public Boolean apply(Integer value) { elements.add(value); return true; } }); assertEquals(0, cb.getLength()); assertEquals(Arrays.asList(1), elements); }
@Override public boolean find(final Path file, final ListProgressListener listener) throws BackgroundException { try { new BrickAttributesFinderFeature(session).find(file); return true; } catch(NotfoundException e) { return false; } }
@Test public void testFindDirectory() throws Exception { final Path folder = new BrickDirectoryFeature(session).mkdir( new Path(new DefaultHomeFinderService(session).find(), new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); assertTrue(new BrickFindFeature(session).find(folder)); assertFalse(new BrickFindFeature(session).find(new Path(folder.getAbsolute(), EnumSet.of(Path.Type.file)))); new BrickDeleteFeature(session).delete(Collections.singletonList(folder), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@SuppressWarnings("unchecked") @Override public <T extends Statement> ConfiguredStatement<T> inject( final ConfiguredStatement<T> statement ) { if (!(statement.getStatement() instanceof CreateSource) && !(statement.getStatement() instanceof CreateAsSelect)) { return statement; } try { if (statement.getStatement() instanceof CreateSource) { final ConfiguredStatement<CreateSource> createStatement = (ConfiguredStatement<CreateSource>) statement; return (ConfiguredStatement<T>) forCreateStatement(createStatement).orElse(createStatement); } else { final ConfiguredStatement<CreateAsSelect> createStatement = (ConfiguredStatement<CreateAsSelect>) statement; return (ConfiguredStatement<T>) forCreateAsStatement(createStatement).orElse( createStatement); } } catch (final KsqlStatementException e) { throw e; } catch (final KsqlException e) { throw new KsqlStatementException( ErrorMessageUtil.buildErrorMessage(e), statement.getMaskedStatementText(), e.getCause()); } }
@Test public void shouldThrowIfKeyFormatDoesNotSupportSchemaIdInference() { // Given givenValueButNotKeyInferenceSupported( ImmutableMap.of("key_schema_id", new IntegerLiteral(123))); // When: final Exception e = assertThrows( KsqlException.class, () -> injector.inject(csStatement) ); // Then: assertThat(e.getMessage(), containsString("KEY_FORMAT should support schema inference when KEY_SCHEMA_ID is provided. " + "Current format is KAFKA.")); }
public static void preserve(FileSystem targetFS, Path path, CopyListingFileStatus srcFileStatus, EnumSet<FileAttribute> attributes, boolean preserveRawXattrs) throws IOException { // strip out those attributes we don't need any more attributes.remove(FileAttribute.BLOCKSIZE); attributes.remove(FileAttribute.CHECKSUMTYPE); // If not preserving anything from FileStatus, don't bother fetching it. FileStatus targetFileStatus = attributes.isEmpty() ? null : targetFS.getFileStatus(path); String group = targetFileStatus == null ? null : targetFileStatus.getGroup(); String user = targetFileStatus == null ? null : targetFileStatus.getOwner(); boolean chown = false; if (attributes.contains(FileAttribute.ACL)) { List<AclEntry> srcAcl = srcFileStatus.getAclEntries(); List<AclEntry> targetAcl = getAcl(targetFS, targetFileStatus); if (!srcAcl.equals(targetAcl)) { targetFS.removeAcl(path); targetFS.setAcl(path, srcAcl); } // setAcl doesn't preserve sticky bit, so also call setPermission if needed. if (srcFileStatus.getPermission().getStickyBit() != targetFileStatus.getPermission().getStickyBit()) { targetFS.setPermission(path, srcFileStatus.getPermission()); } } else if (attributes.contains(FileAttribute.PERMISSION) && !srcFileStatus.getPermission().equals(targetFileStatus.getPermission())) { targetFS.setPermission(path, srcFileStatus.getPermission()); } final boolean preserveXAttrs = attributes.contains(FileAttribute.XATTR); if (preserveXAttrs || preserveRawXattrs) { final String rawNS = StringUtils.toLowerCase(XAttr.NameSpace.RAW.name()); Map<String, byte[]> srcXAttrs = srcFileStatus.getXAttrs(); Map<String, byte[]> targetXAttrs = getXAttrs(targetFS, path); if (srcXAttrs != null && !srcXAttrs.equals(targetXAttrs)) { for (Entry<String, byte[]> entry : srcXAttrs.entrySet()) { String xattrName = entry.getKey(); if (xattrName.startsWith(rawNS) || preserveXAttrs) { targetFS.setXAttr(path, xattrName, entry.getValue()); } } } } // The replication factor can only be preserved for replicated files. // It is ignored when either the source or target file are erasure coded. if (attributes.contains(FileAttribute.REPLICATION) && !targetFileStatus.isDirectory() && !targetFileStatus.isErasureCoded() && !srcFileStatus.isErasureCoded() && srcFileStatus.getReplication() != targetFileStatus.getReplication()) { targetFS.setReplication(path, srcFileStatus.getReplication()); } if (attributes.contains(FileAttribute.GROUP) && !group.equals(srcFileStatus.getGroup())) { group = srcFileStatus.getGroup(); chown = true; } if (attributes.contains(FileAttribute.USER) && !user.equals(srcFileStatus.getOwner())) { user = srcFileStatus.getOwner(); chown = true; } if (chown) { targetFS.setOwner(path, user, group); } if (attributes.contains(FileAttribute.TIMES)) { targetFS.setTimes(path, srcFileStatus.getModificationTime(), srcFileStatus.getAccessTime()); } }
@Test public void testPreserveGroupOnDirectory() throws IOException { FileSystem fs = FileSystem.get(config); EnumSet<FileAttribute> attributes = EnumSet.of(FileAttribute.GROUP); Path dst = new Path("/tmp/abc"); Path src = new Path("/tmp/src"); createDirectory(fs, src); createDirectory(fs, dst); fs.setPermission(src, fullPerm); fs.setOwner(src, "somebody", "somebody-group"); fs.setPermission(dst, noPerm); fs.setOwner(dst, "nobody", "nobody-group"); CopyListingFileStatus srcStatus = new CopyListingFileStatus(fs.getFileStatus(src)); DistCpUtils.preserve(fs, dst, srcStatus, attributes, false); CopyListingFileStatus dstStatus = new CopyListingFileStatus(fs.getFileStatus(dst)); // FileStatus.equals only compares path field, must explicitly compare all fields Assert.assertFalse(srcStatus.getPermission().equals(dstStatus.getPermission())); Assert.assertFalse(srcStatus.getOwner().equals(dstStatus.getOwner())); Assert.assertTrue(srcStatus.getGroup().equals(dstStatus.getGroup())); }
protected Type getMessageType() { Class<?> targetClass; if (rocketMQListener != null) { targetClass = AopProxyUtils.ultimateTargetClass(rocketMQListener); } else { targetClass = AopProxyUtils.ultimateTargetClass(rocketMQReplyListener); } Type matchedGenericInterface = null; while (Objects.nonNull(targetClass)) { Type[] interfaces = targetClass.getGenericInterfaces(); if (Objects.nonNull(interfaces)) { for (Type type : interfaces) { if (type instanceof ParameterizedType && (Objects.equals(((ParameterizedType) type).getRawType(), RocketMQListener.class) || Objects.equals(((ParameterizedType) type).getRawType(), RocketMQReplyListener.class))) { matchedGenericInterface = type; break; } } } targetClass = targetClass.getSuperclass(); } if (Objects.isNull(matchedGenericInterface)) { return Object.class; } Type[] actualTypeArguments = ((ParameterizedType) matchedGenericInterface).getActualTypeArguments(); if (Objects.nonNull(actualTypeArguments) && actualTypeArguments.length > 0) { return actualTypeArguments[0]; } return Object.class; }
@Test public void testGetMessageType() throws Exception { DefaultRocketMQListenerContainer listenerContainer = new DefaultRocketMQListenerContainer(); Method getMessageType = DefaultRocketMQListenerContainer.class.getDeclaredMethod("getMessageType"); getMessageType.setAccessible(true); listenerContainer.setRocketMQListener(new RocketMQListener<String>() { @Override public void onMessage(String message) { } }); Class result = (Class) getMessageType.invoke(listenerContainer); assertThat(result.getName().equals(String.class.getName())); //support message listenerContainer.setRocketMQListener(new RocketMQListener<Message>() { @Override public void onMessage(Message message) { } }); result = (Class) getMessageType.invoke(listenerContainer); assertThat(result.getName().equals(Message.class.getName())); listenerContainer.setRocketMQListener(new RocketMQListener<MessageExt>() { @Override public void onMessage(MessageExt message) { } }); result = (Class) getMessageType.invoke(listenerContainer); assertThat(result.getName().equals(MessageExt.class.getName())); listenerContainer.setRocketMQReplyListener(new RocketMQReplyListener<MessageExt, String>() { @Override public String onMessage(MessageExt message) { return "test"; } }); result = (Class) getMessageType.invoke(listenerContainer); assertThat(result.getName().equals(MessageExt.class.getName())); listenerContainer.setRocketMQReplyListener(new RocketMQReplyListener<String, String>() { @Override public String onMessage(String message) { return "test"; } }); result = (Class) getMessageType.invoke(listenerContainer); assertThat(result.getName().equals(String.class.getName())); }
@Override public void updateSocialClient(SocialClientSaveReqVO updateReqVO) { // 校验存在 validateSocialClientExists(updateReqVO.getId()); // 校验重复 validateSocialClientUnique(updateReqVO.getId(), updateReqVO.getUserType(), updateReqVO.getSocialType()); // 更新 SocialClientDO updateObj = BeanUtils.toBean(updateReqVO, SocialClientDO.class); socialClientMapper.updateById(updateObj); }
@Test public void testUpdateSocialClient_success() { // mock 数据 SocialClientDO dbSocialClient = randomPojo(SocialClientDO.class); socialClientMapper.insert(dbSocialClient);// @Sql: 先插入出一条存在的数据 // 准备参数 SocialClientSaveReqVO reqVO = randomPojo(SocialClientSaveReqVO.class, o -> { o.setId(dbSocialClient.getId()); // 设置更新的 ID o.setSocialType(randomEle(SocialTypeEnum.values()).getType()) .setUserType(randomEle(UserTypeEnum.values()).getValue()) .setStatus(randomCommonStatus()); }); // 调用 socialClientService.updateSocialClient(reqVO); // 校验是否更新正确 SocialClientDO socialClient = socialClientMapper.selectById(reqVO.getId()); // 获取最新的 assertPojoEquals(reqVO, socialClient); }
@JsonProperty public List<ReporterFactory> getReporters() { return reporters; }
@Test void hasReporters() { CsvReporterFactory csvReporter = new CsvReporterFactory(); csvReporter.setFile(new File("metrics")); assertThat(config.getReporters()).hasSize(3); }
public static byte[] readAllBytes(java.nio.file.Path path) throws IOException { try (SeekableByteChannel channel = Files.newByteChannel(path); InputStream in = Channels.newInputStream(channel)) { long size = channel.size(); if (size > (long) MAX_BUFFER_SIZE) { throw new OutOfMemoryError("Required array size too large"); } return read(in, (int) size); } }
@Test void testReadAllBytes() throws Exception { File tempFile = TempDirUtils.newFolder(Paths.get(this.getClass().getResource("/").getPath())); final int fileSize = 1024; final String testFilePath = tempFile.toPath() .resolve(this.getClass().getSimpleName() + "_" + fileSize + ".txt") .toString(); { String expectedMD5 = generateTestFile(testFilePath, 1024); final byte[] data = FileUtils.readAllBytes((new File(testFilePath)).toPath()); assertThat(md5Hex(data)).isEqualTo(expectedMD5); } { String expectedMD5 = generateTestFile(testFilePath, 4096); final byte[] data = FileUtils.readAllBytes((new File(testFilePath)).toPath()); assertThat(md5Hex(data)).isEqualTo(expectedMD5); } { String expectedMD5 = generateTestFile(testFilePath, 5120); final byte[] data = FileUtils.readAllBytes((new File(testFilePath)).toPath()); assertThat(md5Hex(data)).isEqualTo(expectedMD5); } }
public synchronized void setLevel(Level newLevel) { if (level == newLevel) { // nothing to do; return; } if (newLevel == null && isRootLogger()) { throw new IllegalArgumentException( "The level of the root logger cannot be set to null"); } level = newLevel; if (newLevel == null) { effectiveLevelInt = parent.effectiveLevelInt; newLevel = parent.getEffectiveLevel(); } else { effectiveLevelInt = newLevel.levelInt; } if (childrenList != null) { int len = childrenList.size(); for (int i = 0; i < len; i++) { Logger child = (Logger) childrenList.get(i); // tell child to handle parent levelInt change child.handleParentLevelChange(effectiveLevelInt); } } // inform listeners loggerContext.fireOnLevelChange(this, newLevel); }
@Test public void testEnabled_Info() throws Exception { root.setLevel(Level.INFO); checkLevelThreshold(loggerTest, Level.INFO); }
@Override public int compareVersions(String v1, String v2) { return Version.parse(v1).compareTo(Version.parse(v2)); }
@Test void compareVersions() { assertTrue(versionManager.compareVersions("1.1.0", "1.0.0") > 0); }
public static ParamType getVarArgsSchemaFromType(final Type type) { return getSchemaFromType(type, VARARGS_JAVA_TO_ARG_TYPE); }
@Test public void shouldGetGenericTriFunctionVariadic() throws NoSuchMethodException { // Given: final Type genericType = getClass().getMethod("genericTriFunctionType").getGenericReturnType(); // When: final ParamType returnType = UdfUtil.getVarArgsSchemaFromType(genericType); // Then: assertThat(returnType, is(LambdaType.of(ImmutableList.of(GenericType.of("T"), GenericType.of("U"), GenericType.of("V")), GenericType.of("W")))); }
public static synchronized @Nonnull Map<String, Object> loadYamlFile(File file) throws Exception { try (FileInputStream inputStream = new FileInputStream((file))) { Map<String, Object> yamlResult = (Map<String, Object>) loader.loadFromInputStream(inputStream); return yamlResult == null ? new HashMap<>() : yamlResult; } catch (FileNotFoundException e) { LOG.error("Failed to find YAML file", e); throw e; } catch (IOException | YamlEngineException e) { if (e instanceof MarkedYamlEngineException) { YamlEngineException exception = wrapExceptionToHiddenSensitiveData((MarkedYamlEngineException) e); LOG.error("Failed to parse YAML configuration", exception); throw exception; } else { throw e; } } }
@Test void testLoadYamlFile_DuplicateKeyException() { File confFile = new File(tmpDir, "invalid.yaml"); try (final PrintWriter pw = new PrintWriter(confFile)) { pw.println("key: secret1"); pw.println("key: secret2"); } catch (FileNotFoundException e) { throw new RuntimeException(e); } assertThatThrownBy(() -> YamlParserUtils.loadYamlFile(confFile)) .isInstanceOf(YamlEngineException.class) .satisfies( e -> assertThat(ExceptionUtils.stringifyException(e)) .doesNotContain("secret1", "secret2")); }
@Override public ResultSet getColumnPrivileges(final String catalog, final String schema, final String table, final String columnNamePattern) { return null; }
@Test void assertGetColumnPrivileges() { assertNull(metaData.getColumnPrivileges("", "", "", "")); }
@Override public HashSlotCursor12byteKey cursor() { return new CursorIntKey2(); }
@Test public void testCursor_advance_whenEmpty() { HashSlotCursor12byteKey cursor = hsa.cursor(); assertFalse(cursor.advance()); }
Object getCellValue(Cell cell, Schema.FieldType type) { ByteString cellValue = cell.getValue(); int valueSize = cellValue.size(); switch (type.getTypeName()) { case BOOLEAN: checkArgument(valueSize == 1, message("Boolean", 1)); return cellValue.toByteArray()[0] != 0; case BYTE: checkArgument(valueSize == 1, message("Byte", 1)); return cellValue.toByteArray()[0]; case INT16: checkArgument(valueSize == 2, message("Int16", 2)); return Shorts.fromByteArray(cellValue.toByteArray()); case INT32: checkArgument(valueSize == 4, message("Int32", 4)); return Ints.fromByteArray(cellValue.toByteArray()); case INT64: checkArgument(valueSize == 8, message("Int64", 8)); return Longs.fromByteArray(cellValue.toByteArray()); case FLOAT: checkArgument(valueSize == 4, message("Float", 4)); return Float.intBitsToFloat(Ints.fromByteArray(cellValue.toByteArray())); case DOUBLE: checkArgument(valueSize == 8, message("Double", 8)); return Double.longBitsToDouble(Longs.fromByteArray(cellValue.toByteArray())); case DATETIME: return DateTime.parse(cellValue.toStringUtf8()); case STRING: return cellValue.toStringUtf8(); case BYTES: return cellValue.toByteArray(); case LOGICAL_TYPE: String identifier = checkArgumentNotNull(type.getLogicalType()).getIdentifier(); throw new IllegalStateException("Unsupported logical type: " + identifier); default: throw new IllegalArgumentException( String.format("Unsupported cell value type '%s'.", type.getTypeName())); } }
@Test public void shouldParseDatetimeType() { byte[] value = "2010-06-30T01:20".getBytes(UTF_8); assertEquals(new DateTime(2010, 6, 30, 1, 20), PARSER.getCellValue(cell(value), DATETIME)); }
@Udf public <T> List<T> intersect( @UdfParameter(description = "First array of values") final List<T> left, @UdfParameter(description = "Second array of values") final List<T> right) { if (left == null || right == null) { return null; } final Set<T> intersection = Sets.newLinkedHashSet(left); intersection.retainAll(Sets.newHashSet(right)); return Lists.newArrayList(intersection); }
@Test public void shouldIntersectIntegerArrays() { final List<Integer> input1 = Arrays.asList(1, 2, 3, 2, 1); final List<Integer> input2 = Arrays.asList(1, 2, 2); final List<Integer> result = udf.intersect(input1, input2); assertThat(result, contains(1, 2)); }
public static KeyValueBytesStoreSupplier persistentKeyValueStore(final String name) { Objects.requireNonNull(name, "name cannot be null"); return new RocksDBKeyValueBytesStoreSupplier(name, false); }
@Test public void shouldThrowIfPersistentKeyValueStoreStoreNameIsNull() { final Exception e = assertThrows(NullPointerException.class, () -> Stores.persistentKeyValueStore(null)); assertEquals("name cannot be null", e.getMessage()); }
@Override public T scaleDown(T currentValue) { return currentValue; }
@Test void testScaleDownNoOp() { NoOpScalingStrategy scalingStrategy = new NoOpScalingStrategy(); Integer currentRate = 10; assertThat(scalingStrategy.scaleDown(currentRate)).isEqualTo(currentRate); }
@Override public CRParseResult responseMessageForParseDirectory(String responseBody) { ErrorCollection errors = new ErrorCollection(); try { ResponseScratch responseMap = parseResponseForMigration(responseBody); ParseDirectoryResponseMessage parseDirectoryResponseMessage; if (responseMap.target_version == null) { errors.addError("Plugin response message", "missing 'target_version' field"); return new CRParseResult(errors); } else if (responseMap.target_version > CURRENT_CONTRACT_VERSION) { String message = String.format("'target_version' is %s but the GoCD Server supports %s", responseMap.target_version, CURRENT_CONTRACT_VERSION); errors.addError("Plugin response message", message); return new CRParseResult(errors); } else { int version = responseMap.target_version; while (version < CURRENT_CONTRACT_VERSION) { version++; responseBody = migrate(responseBody, version); } // after migration, json should match contract parseDirectoryResponseMessage = codec.getGson().fromJson(responseBody, ParseDirectoryResponseMessage.class); parseDirectoryResponseMessage.validateResponse(errors); errors.addErrors(parseDirectoryResponseMessage.getPluginErrors()); return new CRParseResult(parseDirectoryResponseMessage.getEnvironments(), parseDirectoryResponseMessage.getPipelines(), errors); } } catch (Exception ex) { StringBuilder builder = new StringBuilder(); builder.append("Unexpected error when handling plugin response").append('\n'); builder.append(ex); // "location" of error is runtime. This is what user will see in config repo errors list. errors.addError("runtime", builder.toString()); LOGGER.error(builder.toString(), ex); return new CRParseResult(errors); } }
@Test public void shouldErrorWhenMissingTargetVersionInResponse() { String json = """ { "environments" : [], "pipelines" : [], "errors" : [] }"""; CRParseResult result = handler.responseMessageForParseDirectory(json); assertThat(result.getErrors().getErrorsAsText()).contains("missing 'target_version' field"); }
public Class<?> getCommandClass(String commandName) { if (hasCommand(commandName)) { return frameworkModel .getExtensionLoader(BaseCommand.class) .getExtension(commandName) .getClass(); } else { return null; } }
@Test void testGetCommandClass() { assertThat(commandHelper.getCommandClass("greeting"), equalTo(GreetingCommand.class)); assertNull(commandHelper.getCommandClass("not-exiting")); }
@Override public List<LoadedActiveRule> load(String qualityProfileKey) { List<LoadedActiveRule> ruleList = new LinkedList<>(); int page = 1; int pageSize = 500; long loaded = 0; while (true) { GetRequest getRequest = new GetRequest(getUrl(qualityProfileKey, page, pageSize)); ListResponse response = loadFromStream(wsClient.call(getRequest).contentStream()); List<LoadedActiveRule> pageRules = readPage(response); ruleList.addAll(pageRules); Paging paging = response.getPaging(); loaded += paging.getPageSize(); if (paging.getTotal() <= loaded) { break; } page++; } return ruleList; }
@Test public void load_shouldRequestRulesAndParseResponse() { int total = PAGE_SIZE_1 + PAGE_SIZE_2; WsTestUtil.mockStream(wsClient, urlOfPage(1), responseOfSize(1, PAGE_SIZE_1, total)); WsTestUtil.mockStream(wsClient, urlOfPage(2), responseOfSize(2, PAGE_SIZE_2, total)); Collection<LoadedActiveRule> activeRules = loader.load("c+-test_c+-values-17445"); assertThat(activeRules).hasSize(total); assertThat(activeRules) .filteredOn(r -> r.getRuleKey().equals(EXAMPLE_KEY)) .extracting(LoadedActiveRule::getParams) .extracting(p -> p.get(FORMAT_KEY)) .containsExactly(FORMAT_VALUE); assertThat(activeRules) .filteredOn(r -> r.getRuleKey().equals(EXAMPLE_KEY)) .extracting(LoadedActiveRule::getSeverity) .containsExactly(SEVERITY_VALUE); WsTestUtil.verifyCall(wsClient, urlOfPage(1)); WsTestUtil.verifyCall(wsClient, urlOfPage(2)); verifyNoMoreInteractions(wsClient); }
public final Sensor clientLevelSensor(final String sensorName, final RecordingLevel recordingLevel, final Sensor... parents) { synchronized (clientLevelSensors) { final String fullSensorName = CLIENT_LEVEL_GROUP + SENSOR_NAME_DELIMITER + sensorName; final Sensor sensor = metrics.getSensor(fullSensorName); if (sensor == null) { clientLevelSensors.push(fullSensorName); return metrics.sensor(fullSensorName, recordingLevel, parents); } return sensor; } }
@Test public void shouldGetExistingClientLevelSensor() { final Metrics metrics = mock(Metrics.class); final RecordingLevel recordingLevel = RecordingLevel.INFO; setupGetExistingSensorTest(metrics); final StreamsMetricsImpl streamsMetrics = new StreamsMetricsImpl(metrics, CLIENT_ID, VERSION, time); final Sensor actualSensor = streamsMetrics.clientLevelSensor(SENSOR_NAME_1, recordingLevel); assertThat(actualSensor, is(equalToObject(sensor))); }
@Override public KeyValueIterator<Windowed<K>, V> backwardFindSessions(final K key, final long earliestSessionEndTime, final long latestSessionStartTime) { Objects.requireNonNull(key, "key cannot be null"); final Bytes bytesKey = keyBytes(key); return new MeteredWindowedKeyValueIterator<>( wrapped().backwardFindSessions( bytesKey, earliestSessionEndTime, latestSessionStartTime ), fetchSensor, iteratorDurationSensor, streamsMetrics, serdes::keyFrom, serdes::valueFrom, time, numOpenIterators, openIterators ); }
@Test public void shouldThrowNullPointerOnBackwardFindSessionsIfKeyIsNull() { setUpWithoutContext(); assertThrows(NullPointerException.class, () -> store.backwardFindSessions(null, 0, 0)); }
@Override public E intern(E sample) { E canonical = map.get(sample); if (canonical != null) { return canonical; } var value = map.putIfAbsent(sample, sample); return (value == null) ? sample : value; }
@Test(dataProvider = "interners") public void intern_null(Interner<Int> interner) { assertThrows(NullPointerException.class, () -> interner.intern(null)); }
public static void isGreaterThanOrEqualTo(int value, int minimumValue) { isGreaterThanOrEqualTo( value, minimumValue, String.format("value [%s] is less than minimum value [%s]", value, minimumValue)); }
@Test public void testIsGreaterThanOrEqualTo2() { Precondition.isGreaterThanOrEqualTo(2, 1); }
@VisibleForTesting static BatchMessageSequenceRef getMessageSequenceRefForBatchMessage(MessageId messageId) { long ledgerId; long entryId; int batchIdx; try { try { batchIdx = (int) getMethodOfMessageId(messageId, "getBatchIndex").invoke(messageId); if (batchIdx < 0) { return null; } } catch (NoSuchMethodException noSuchMethodException) { // not a BatchMessageIdImpl, returning null to use the standard sequenceId return null; } ledgerId = (long) getMethodOfMessageId(messageId, "getLedgerId").invoke(messageId); entryId = (long) getMethodOfMessageId(messageId, "getEntryId").invoke(messageId); } catch (IllegalAccessException | NoSuchMethodException | InvocationTargetException ex) { log.error("Unexpected error while retrieving sequenceId, messageId class: {}, error: {}", messageId.getClass().getName(), ex.getMessage(), ex); throw new RuntimeException(ex); } return new BatchMessageSequenceRef(ledgerId, entryId, batchIdx); }
@Test public void testGetMessageSequenceRefForBatchMessage() throws Exception { long ledgerId = 123L; long entryId = Long.MAX_VALUE; int batchIdx = 16; KafkaConnectSink.BatchMessageSequenceRef ref = KafkaConnectSink .getMessageSequenceRefForBatchMessage(new MessageIdImpl(ledgerId, entryId, 0)); assertNull(ref); ref = KafkaConnectSink.getMessageSequenceRefForBatchMessage( new TopicMessageIdImpl("topic-0", new MessageIdImpl(ledgerId, entryId, 0)) ); assertNull(ref); ref = KafkaConnectSink.getMessageSequenceRefForBatchMessage( new BatchMessageIdImpl(ledgerId, entryId, 0, batchIdx)); assertEquals(ref.getLedgerId(), ledgerId); assertEquals(ref.getEntryId(), entryId); assertEquals(ref.getBatchIdx(), batchIdx); ref = KafkaConnectSink.getMessageSequenceRefForBatchMessage( new TopicMessageIdImpl("topic-0", new BatchMessageIdImpl(ledgerId, entryId, 0, batchIdx)) ); assertEquals(ref.getLedgerId(), ledgerId); assertEquals(ref.getEntryId(), entryId); assertEquals(ref.getBatchIdx(), batchIdx); }
public Collection<SQLToken> generateSQLTokens(final TablesContext tablesContext, final SetAssignmentSegment setAssignmentSegment) { String tableName = tablesContext.getSimpleTables().iterator().next().getTableName().getIdentifier().getValue(); EncryptTable encryptTable = encryptRule.getEncryptTable(tableName); Collection<SQLToken> result = new LinkedList<>(); String schemaName = tablesContext.getSchemaName().orElseGet(() -> new DatabaseTypeRegistry(databaseType).getDefaultSchemaName(databaseName)); for (ColumnAssignmentSegment each : setAssignmentSegment.getAssignments()) { String columnName = each.getColumns().get(0).getIdentifier().getValue(); if (encryptTable.isEncryptColumn(columnName)) { generateSQLToken(schemaName, encryptTable.getTable(), encryptTable.getEncryptColumn(columnName), each).ifPresent(result::add); } } return result; }
@Test void assertGenerateSQLTokenWithUpdateLiteralExpressionSegment() { when(assignmentSegment.getValue()).thenReturn(mock(LiteralExpressionSegment.class)); assertThat(tokenGenerator.generateSQLTokens(tablesContext, setAssignmentSegment).size(), is(1)); }
public static void prepareFilesForStaging(FileStagingOptions options) { List<String> filesToStage = options.getFilesToStage(); if (filesToStage == null || filesToStage.isEmpty()) { filesToStage = detectClassPathResourcesToStage(ReflectHelpers.findClassLoader(), options); LOG.info( "PipelineOptions.filesToStage was not specified. " + "Defaulting to files from the classpath: will stage {} files. " + "Enable logging at DEBUG level to see which files will be staged.", filesToStage.size()); LOG.debug("Classpath elements: {}", filesToStage); } final String tmpJarLocation = MoreObjects.firstNonNull(options.getTempLocation(), System.getProperty("java.io.tmpdir")); final List<String> resourcesToStage = prepareFilesForStaging(filesToStage, tmpJarLocation); options.setFilesToStage(resourcesToStage); }
@Test public void testPackagingDirectoryResourceFromOptions() throws IOException { String directoryPath = tmpFolder.newFolder().getAbsolutePath(); List<String> filesToStage = Arrays.asList(directoryPath); String temporaryLocation = tmpFolder.newFolder().getAbsolutePath(); FileStagingOptions options = PipelineOptionsFactory.create().as(FileStagingOptions.class); options.setFilesToStage(filesToStage); options.setTempLocation(temporaryLocation); PipelineResources.prepareFilesForStaging(options); List<String> result = options.getFilesToStage(); assertEquals(1, result.size()); assertTrue(new File(result.get(0)).exists()); assertTrue(result.get(0).matches(".*\\.jar")); }
public static double similar(String strA, String strB) { String newStrA, newStrB; if (strA.length() < strB.length()) { newStrA = removeSign(strB); newStrB = removeSign(strA); } else { newStrA = removeSign(strA); newStrB = removeSign(strB); } // 用较大的字符串长度作为分母,相似子串作为分子计算出字串相似度 int temp = Math.max(newStrA.length(), newStrB.length()); if(0 == temp) { // 两个都是空串相似度为1,被认为是相同的串 return 1; } final int commonLength = longestCommonSubstringLength(newStrA, newStrB); return NumberUtil.div(commonLength, temp); }
@Test public void similarDegreeTest2() { String a = "我是一个文本,独一无二的文本"; String b = "一个文本,独一无二的文本,#,>>?#$%^%$&^&^%"; double degree = TextSimilarity.similar(a, b); assertEquals(0.8461538462D, degree, 0.01); String similarPercent = TextSimilarity.similar(a, b, 2); assertEquals("84.62%", similarPercent); }
@Override public SelType call(String methodName, SelType[] args) { if (args.length == 1) { if ("dateIntToTs".equals(methodName)) { return dateIntToTs(args[0]); } else if ("tsToDateInt".equals(methodName)) { return tsToDateInt(args[0]); } } else if (args.length == 2) { if ("incrementDateInt".equals(methodName)) { return incrementDateInt(args[0], args[1]); } else if ("timeoutForDateTimeDeadline".equals(methodName)) { return timeoutForDateTimeDeadline(args[0], args[1]); } else if ("timeoutForDateIntDeadline".equals(methodName)) { return timeoutForDateIntDeadline(args[0], args[1]); } } else if (args.length == 3) { if ("dateIntsBetween".equals(methodName)) { return dateIntsBetween(args[0], args[1], args[2]); } else if ("intsBetween".equals(methodName)) { return intsBetween(args[0], args[1], args[2]); } } else if (args.length == 5 && "dateIntHourToTs".equals(methodName)) { return dateIntHourToTs(args); } throw new UnsupportedOperationException( type() + " DO NOT support calling method: " + methodName + " with args: " + Arrays.toString(args)); }
@Test(expected = IllegalFieldValueException.class) public void testInvalidCallDateIntsBetween() { SelUtilFunc.INSTANCE.call( "dateIntsBetween", new SelType[] {SelLong.of(20190229), SelLong.of(20190303), SelLong.of(1)}); }
@SuppressWarnings("unchecked") public <T extends Metric> T register(String name, T metric) throws IllegalArgumentException { if (metric == null) { throw new NullPointerException("metric == null"); } if (metric instanceof MetricRegistry) { final MetricRegistry childRegistry = (MetricRegistry) metric; final String childName = name; childRegistry.addListener(new MetricRegistryListener() { @Override public void onGaugeAdded(String name, Gauge<?> gauge) { register(name(childName, name), gauge); } @Override public void onGaugeRemoved(String name) { remove(name(childName, name)); } @Override public void onCounterAdded(String name, Counter counter) { register(name(childName, name), counter); } @Override public void onCounterRemoved(String name) { remove(name(childName, name)); } @Override public void onHistogramAdded(String name, Histogram histogram) { register(name(childName, name), histogram); } @Override public void onHistogramRemoved(String name) { remove(name(childName, name)); } @Override public void onMeterAdded(String name, Meter meter) { register(name(childName, name), meter); } @Override public void onMeterRemoved(String name) { remove(name(childName, name)); } @Override public void onTimerAdded(String name, Timer timer) { register(name(childName, name), timer); } @Override public void onTimerRemoved(String name) { remove(name(childName, name)); } }); } else if (metric instanceof MetricSet) { registerAll(name, (MetricSet) metric); } else { final Metric existing = metrics.putIfAbsent(name, metric); if (existing == null) { onMetricAdded(name, metric); } else { throw new IllegalArgumentException("A metric named " + name + " already exists"); } } return metric; }
@Test public void registeringAGaugeTriggersANotification() { assertThat(registry.register("thing", gauge)) .isEqualTo(gauge); verify(listener).onGaugeAdded("thing", gauge); }