focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public InterpreterResult interpret(String st, InterpreterContext context) { return helper.interpret(session, st, context); }
@Test void should_interpret_simple_select() { // Given // When final InterpreterResult actual = interpreter.interpret("SELECT * FROM " + ARTISTS_TABLE + " LIMIT 10;", intrContext); // Then assertNotNull(actual); assertEquals(Code.SUCCESS, actual.code()); assertEquals("name\tborn\tcountry\tdied\tgender\t" + "styles\ttype\n" + "Bogdan Raczynski\t1977-01-01\tPoland\tnull\tMale\t" + "[Dance, Electro]\tPerson\n" + "Krishna Das\t1947-05-31\tUSA\tnull\tMale\t[Unknown]\tPerson\n" + "Sheryl Crow\t1962-02-11\tUSA\tnull\tFemale\t" + "[Classic, Rock, Country, Blues, Pop, Folk]\tPerson\n" + "Doof\t1968-08-31\tUnited Kingdom\tnull\tnull\t[Unknown]\tPerson\n" + "House of Large Sizes\t1986-01-01\tUSA\t2003\tnull\t[Unknown]\tGroup\n" + "Fanfarlo\t2006-01-01\tUnited Kingdom\tnull\tnull\t" + "[Rock, Indie, Pop, Classic]\tGroup\n" + "Jeff Beck\t1944-06-24\tUnited Kingdom\tnull\tMale\t" + "[Rock, Pop, Classic]\tPerson\n" + "Los Paranoias\tnull\tUnknown\tnull\tnull\t[Unknown]\tnull\n" + "…And You Will Know Us by the Trail of Dead\t1994-01-01\tUSA\tnull\tnull\t" + "[Rock, Pop, Classic]\tGroup\n", actual.message().get(0).getData()); }
@Override public <T extends Number> T incrementAndGet(String path, T delta) { return get(incrementAndGetAsync(path, delta)); }
@Test public void testIncrementAndGet() { RJsonBucket<TestType> al = redisson.getJsonBucket("test", new JacksonCodec<>(TestType.class)); TestType t = new TestType(); NestedType nt = new NestedType(); nt.setValue2(new BigDecimal(1)); nt.setValue(1); nt.setValues(Arrays.asList("t1", "t2", "t4", "t5", "t6")); t.setType(nt); t.setName("name1"); al.set(t); Integer s = al.incrementAndGet("type.value", 1); assertThat(s).isEqualTo(2); BigDecimal s2 = al.incrementAndGet("type.value2", new BigDecimal(1)); assertThat(s2).isEqualTo(new BigDecimal(2)); }
public static Class<?> getParamClass(final String className) throws ClassNotFoundException { if (PRIMITIVE_TYPE.containsKey(className)) { return PRIMITIVE_TYPE.get(className).getClazz(); } else { return Class.forName(className); } }
@Test public void testStatic() throws ClassNotFoundException { Assertions.assertEquals(PrxInfoUtil.getParamClass("int"), int.class); }
@Override public Map<String, String> getTopicConfig(final String topicName) { return topicConfig(topicName, true); }
@Test public void shouldThrowOnNoneRetryableGetTopicConfigError() { // Given: when(adminClient.describeConfigs(any())) .thenAnswer(describeConfigsResult(new RuntimeException())); // When: assertThrows( KafkaResponseGetFailedException.class, () -> kafkaTopicClient.getTopicConfig("fred") ); }
public DLQEntry pollEntry(long timeout) throws IOException, InterruptedException { byte[] bytes = pollEntryBytes(timeout); if (bytes == null) { return null; } return DLQEntry.deserialize(bytes); }
@Test public void testConcurrentWriteReadRandomEventSize() throws Exception { final ExecutorService exec = Executors.newSingleThreadExecutor(); try { final int maxEventSize = BLOCK_SIZE * 2; final int eventCount = 300; exec.submit(() -> { final Event event = new Event(); long startTime = System.currentTimeMillis(); try (DeadLetterQueueWriter writeManager = DeadLetterQueueWriter .newBuilder(dir, 10 * 1024 * 1024, defaultDlqSize, Duration.ofSeconds(10)) .build()) { for (int i = 0; i < eventCount; i++) { event.setField( "message", generateMessageContent((int) (Math.random() * (maxEventSize))) ); writeManager.writeEntry( new DLQEntry( event, "", "", String.valueOf(i), new Timestamp(startTime++) ) ); } } catch (final IOException ex) { throw new IllegalStateException(ex); } }); int i = 0; try (DeadLetterQueueReader readManager = new DeadLetterQueueReader(dir)) { while(i < eventCount) { DLQEntry entry = readManager.pollEntry(10_000L); if (entry != null){ assertThat(entry.getReason(), is(String.valueOf(i))); i++; } } } catch (Exception e){ throw new IllegalArgumentException("Failed to process entry number" + i, e); } } finally { exec.shutdown(); if (!exec.awaitTermination(2L, TimeUnit.MINUTES)) { Assert.fail("Failed to shut down record writer"); } } }
public Map<String, Object> configStorageTopicSettings() { return topicSettings(CONFIG_STORAGE_PREFIX); }
@Test public void shouldRemoveCompactionFromConfigTopicSettings() { Map<String, String> expectedTopicSettings = new HashMap<>(); expectedTopicSettings.put("foo", "foo value"); expectedTopicSettings.put("bar", "bar value"); expectedTopicSettings.put("baz.bim", "100"); Map<String, String> topicSettings = new HashMap<>(expectedTopicSettings); topicSettings.put("cleanup.policy", "something-else"); topicSettings.put("partitions", "3"); Map<String, String> settings = configs(); topicSettings.forEach((k, v) -> settings.put(DistributedConfig.CONFIG_STORAGE_PREFIX + k, v)); DistributedConfig config = new DistributedConfig(settings); Map<String, Object> actual = config.configStorageTopicSettings(); assertEquals(expectedTopicSettings, actual); assertNotEquals(topicSettings, actual); }
List<Token> tokenize() throws ScanException { List<Token> tokenList = new ArrayList<Token>(); StringBuilder buf = new StringBuilder(); while (pointer < patternLength) { char c = pattern.charAt(pointer); pointer++; switch (state) { case LITERAL_STATE: handleLiteralState(c, tokenList, buf); break; case START_STATE: handleStartState(c, tokenList, buf); break; case DEFAULT_VAL_STATE: handleDefaultValueState(c, tokenList, buf); default: } } // EOS switch (state) { case LITERAL_STATE: addLiteralToken(tokenList, buf); break; case DEFAULT_VAL_STATE: // trailing colon. see also LOGBACK-1140 buf.append(CoreConstants.COLON_CHAR); addLiteralToken(tokenList, buf); break; case START_STATE: // trailing $. see also LOGBACK-1149 buf.append(CoreConstants.DOLLAR); addLiteralToken(tokenList, buf); break; } return tokenList; }
@Test public void literalOnly() throws ScanException { String input = "abc"; Tokenizer tokenizer = new Tokenizer(input); List<Token> tokenList = tokenizer.tokenize(); witnessList.add(new Token(Token.Type.LITERAL, input)); assertEquals(witnessList, tokenList); }
public static Schema schemaFromJavaBeanClass( TypeDescriptor<?> typeDescriptor, FieldValueTypeSupplier fieldValueTypeSupplier) { return StaticSchemaInference.schemaFromClass(typeDescriptor, fieldValueTypeSupplier); }
@Test public void testSimpleBean() { Schema schema = JavaBeanUtils.schemaFromJavaBeanClass( new TypeDescriptor<SimpleBean>() {}, GetterTypeSupplier.INSTANCE); SchemaTestUtils.assertSchemaEquivalent(SIMPLE_BEAN_SCHEMA, schema); }
@Override public final boolean accept(EdgeIteratorState iter) { if (!edgeFilter.accept(iter)) { return false; } if (pointHint.isEmpty()) { return true; } String name = iter.getName(); if (name == null || name.isEmpty()) { return false; } BBox bbox = createBBox(iter); if (!pointCircle.intersects(bbox)) { return false; } name = removeRelation(name); String edgeName = prepareName(name); return isJaroWinklerSimilar(pointHint, edgeName); }
@Test public void testAcceptStForStreet() { EdgeIteratorState edge = createTestEdgeIterator("Augustine Street"); assertTrue(createNameSimilarityEdgeFilter("Augustine St").accept(edge)); assertTrue(createNameSimilarityEdgeFilter("Augustine Street").accept(edge)); edge = createTestEdgeIterator("Augustine St"); assertTrue(createNameSimilarityEdgeFilter("Augustine St").accept(edge)); assertTrue(createNameSimilarityEdgeFilter("Augustine Street").accept(edge)); }
@VisibleForTesting void checkSourceFileField( String sourceFilenameFieldName, SFTPPutData data ) throws KettleStepException { // Sourcefilename field sourceFilenameFieldName = environmentSubstitute( sourceFilenameFieldName ); if ( Utils.isEmpty( sourceFilenameFieldName ) ) { // source filename field is missing throw new KettleStepException( BaseMessages.getString( PKG, "SFTPPut.Error.SourceFileNameFieldMissing" ) ); } data.indexOfSourceFileFieldName = getInputRowMeta().indexOfValue( sourceFilenameFieldName ); if ( data.indexOfSourceFileFieldName == -1 ) { // source filename field is missing throw new KettleStepException( BaseMessages.getString( PKG, "SFTPPut.Error.CanNotFindField", sourceFilenameFieldName ) ); } }
@Test( expected = KettleStepException.class ) public void checkSourceFileField_NameIsBlank() throws Exception { SFTPPutData data = new SFTPPutData(); step.checkSourceFileField( "", data ); }
public int size() { return getProps().size(); }
@Test public void testSize() { Configuration conf = new Configuration(false); conf.set("a", "A"); conf.set("b", "B"); assertEquals(2, conf.size()); }
public FEELFnResult<String> invoke(@ParameterName("from") Object val) { if ( val == null ) { return FEELFnResult.ofResult( null ); } else { return FEELFnResult.ofResult( TypeUtil.formatValue(val, false) ); } }
@Test void invokeDurationMinutes() { FunctionTestUtil.assertResult(stringFunction.invoke(Duration.ofMinutes(9)), "PT9M"); FunctionTestUtil.assertResult(stringFunction.invoke(Duration.ofMinutes(200)), "PT3H20M"); FunctionTestUtil.assertResult(stringFunction.invoke(Duration.ofMinutes(5000)), "P3DT11H20M"); FunctionTestUtil.assertResult(stringFunction.invoke(Duration.ofMinutes(-5000)), "-P3DT11H20M"); }
@Override public void handlerAdded(ChannelHandlerContext ctx) throws Exception { if (acceptForeignIp) { return; } // the anonymous access is enabled by default, permission level is PUBLIC // if allow anonymous access, return if (qosConfiguration.isAllowAnonymousAccess()) { return; } final InetAddress inetAddress = ((InetSocketAddress) ctx.channel().remoteAddress()).getAddress(); // loopback address, return if (inetAddress.isLoopbackAddress()) { return; } // the ip is in the whitelist, return if (checkForeignIpInWhiteList(inetAddress)) { return; } ByteBuf cb = Unpooled.wrappedBuffer((QosConstants.BR_STR + "Foreign Ip Not Permitted, Consider Config It In Whitelist." + QosConstants.BR_STR) .getBytes()); ctx.writeAndFlush(cb).addListener(ChannelFutureListener.CLOSE); }
@Test void shouldNotShowIpNotPermittedMsg_GivenAcceptForeignIpFalseAndMatchWhiteListRange() throws Exception { ChannelHandlerContext context = mock(ChannelHandlerContext.class); Channel channel = mock(Channel.class); when(context.channel()).thenReturn(channel); InetAddress addr = mock(InetAddress.class); when(addr.isLoopbackAddress()).thenReturn(false); when(addr.getHostAddress()).thenReturn("192.168.1.199"); InetSocketAddress address = new InetSocketAddress(addr, 12345); when(channel.remoteAddress()).thenReturn(address); ForeignHostPermitHandler handler = new ForeignHostPermitHandler(QosConfiguration.builder() .acceptForeignIp(false) .acceptForeignIpWhitelist("175.23.44.1, 192.168.1.192/26") .build()); handler.handlerAdded(context); verify(context, never()).writeAndFlush(any()); }
@Override public GenericRow transform(GenericRow record) { for (Map.Entry<String, FunctionEvaluator> entry : _expressionEvaluators.entrySet()) { String column = entry.getKey(); FunctionEvaluator transformFunctionEvaluator = entry.getValue(); Object existingValue = record.getValue(column); if (existingValue == null) { try { // Skip transformation if column value already exists // NOTE: column value might already exist for OFFLINE data, // For backward compatibility, The only exception here is that we will override nested field like array, // collection or map since they were not included in the record transformation before. record.putValue(column, transformFunctionEvaluator.evaluate(record)); } catch (Exception e) { if (!_continueOnError) { throw new RuntimeException("Caught exception while evaluation transform function for column: " + column, e); } else { LOGGER.debug("Caught exception while evaluation transform function for column: {}", column, e); record.putValue(GenericRow.INCOMPLETE_RECORD_KEY, true); } } } else if (existingValue.getClass().isArray() || existingValue instanceof Collections || existingValue instanceof Map) { try { Object transformedValue = transformFunctionEvaluator.evaluate(record); // For backward compatibility, The only exception here is that we will override nested field like array, // collection or map since they were not included in the record transformation before. if (!isTypeCompatible(existingValue, transformedValue)) { record.putValue(column, transformedValue); } } catch (Exception e) { LOGGER.debug("Caught exception while evaluation transform function for column: {}", column, e); } } } return record; }
@Test public void testTransformFunctionSortOrder() { Schema schema = new Schema.SchemaBuilder().addSingleValueDimension("a", FieldSpec.DataType.STRING) .addSingleValueDimension("b", FieldSpec.DataType.STRING).addSingleValueDimension("c", FieldSpec.DataType.STRING) .addSingleValueDimension("d", FieldSpec.DataType.STRING).addSingleValueDimension("e", FieldSpec.DataType.STRING) .addSingleValueDimension("f", FieldSpec.DataType.STRING).build(); List<TransformConfig> transformConfigs = Arrays.asList( new TransformConfig("d", "plus(x, 10)"), new TransformConfig("b", "plus(d, 10)"), new TransformConfig("a", "plus(b, 10)"), new TransformConfig("c", "plus(a, d)"), new TransformConfig("f", "plus(e, 10)")); IngestionConfig ingestionConfig = new IngestionConfig(); ingestionConfig.setTransformConfigs(transformConfigs); TableConfig tableConfig = new TableConfigBuilder(TableType.OFFLINE).setTableName("testDerivedFunctions") .setIngestionConfig(ingestionConfig).build(); ExpressionTransformer expressionTransformer = new ExpressionTransformer(tableConfig, schema); GenericRow genericRow = new GenericRow(); genericRow.putValue("x", 100); genericRow.putValue("e", 200); GenericRow transform = expressionTransformer.transform(genericRow); Assert.assertEquals(transform.getValue("a"), 130.0); Assert.assertEquals(transform.getValue("b"), 120.0); Assert.assertEquals(transform.getValue("c"), 240.0); Assert.assertEquals(transform.getValue("d"), 110.0); Assert.assertEquals(transform.getValue("e"), 200); Assert.assertEquals(transform.getValue("f"), 210.0); }
public static KvCoderComponents getKvCoderComponents(Coder coder) { checkArgument( KV_CODER_URN.equals(coder.getSpec().getUrn()), "Provided coder %s is not of type %s", coder.getSpec().getUrn(), KV_CODER_URN); return new AutoValue_ModelCoders_KvCoderComponents( coder.getComponentCoderIds(0), coder.getComponentCoderIds(1)); }
@Test public void kvCoderComponentsWrongUrn() { thrown.expect(IllegalArgumentException.class); ModelCoders.getKvCoderComponents( Coder.newBuilder() .setSpec(FunctionSpec.newBuilder().setUrn(ModelCoders.LENGTH_PREFIX_CODER_URN)) .build()); }
public T send() throws IOException { return web3jService.send(this, responseType); }
@Test public void testAdminDataDir() throws Exception { web3j.adminDataDir().send(); verifyResult("{\"jsonrpc\":\"2.0\",\"method\":\"admin_datadir\",\"params\":[],\"id\":1}"); }
public static FEEL_1_1Parser parse(FEELEventListenersManager eventsManager, String source, Map<String, Type> inputVariableTypes, Map<String, Object> inputVariables, Collection<FEELFunction> additionalFunctions, List<FEELProfile> profiles, FEELTypeRegistry typeRegistry) { CharStream input = CharStreams.fromString(source); FEEL_1_1Lexer lexer = new FEEL_1_1Lexer( input ); CommonTokenStream tokens = new CommonTokenStream( lexer ); FEEL_1_1Parser parser = new FEEL_1_1Parser( tokens ); ParserHelper parserHelper = new ParserHelper(eventsManager); additionalFunctions.forEach(f -> parserHelper.getSymbolTable().getBuiltInScope().define(f.getSymbol())); parser.setHelper(parserHelper); parser.setErrorHandler( new FEELErrorHandler() ); parser.removeErrorListeners(); // removes the error listener that prints to the console parser.addErrorListener( new FEELParserErrorListener( eventsManager ) ); // pre-loads the parser with symbols defineVariables( inputVariableTypes, inputVariables, parser ); if (typeRegistry != null) { parserHelper.setTypeRegistry(typeRegistry); } return parser; }
@Test void functionInvocationNamedParams() { String inputExpression = "my.test.Function( named parameter 1 : x+10, named parameter 2 : \"foo\" )"; BaseNode functionBase = parse( inputExpression ); assertThat( functionBase).isInstanceOf(FunctionInvocationNode.class); assertThat( functionBase.getText()).isEqualTo(inputExpression); FunctionInvocationNode function = (FunctionInvocationNode) functionBase; assertThat( function.getName()).isInstanceOf(QualifiedNameNode.class); assertThat( function.getName().getText()).isEqualTo("my.test.Function"); assertThat( function.getParams()).isInstanceOf(ListNode.class); assertThat( function.getParams().getElements()).hasSize(2); assertThat( function.getParams().getElements().get( 0 )).isInstanceOf(NamedParameterNode.class); assertThat( function.getParams().getElements().get( 1 )).isInstanceOf(NamedParameterNode.class); NamedParameterNode named = (NamedParameterNode) function.getParams().getElements().get( 0 ); assertThat( named.getText()).isEqualTo( "named parameter 1 : x+10"); assertThat( named.getName().getText()).isEqualTo( "named parameter 1"); assertThat( named.getExpression()).isInstanceOf(InfixOpNode.class); assertThat( named.getExpression().getText()).isEqualTo( "x+10"); named = (NamedParameterNode) function.getParams().getElements().get( 1 ); assertThat( named.getText()).isEqualTo( "named parameter 2 : \"foo\""); assertThat( named.getName().getText()).isEqualTo( "named parameter 2"); assertThat( named.getExpression()).isInstanceOf(StringNode.class); assertThat( named.getExpression().getText()).isEqualTo( "\"foo\""); }
public static String join(List<?> list, String delim) { int len = list.size(); if (len == 0) return ""; final StringBuilder result = new StringBuilder(toString(list.get(0), delim)); for (int i = 1; i < len; i++) { result.append(delim); result.append(toString(list.get(i), delim)); } return result.toString(); }
@Test public void testOneNullElementJoin() throws IOException { assertEquals("", KeyNode.join(Arrays.asList(new Object[] { null }), ",")); }
public Result fetchArtifacts(String[] uris) { checkArgument(uris != null && uris.length > 0, "At least one URI is required."); ArtifactUtils.createMissingParents(baseDir); List<File> artifacts = Arrays.stream(uris) .map(FunctionUtils.uncheckedFunction(this::fetchArtifact)) .collect(Collectors.toList()); if (artifacts.size() > 1) { return new Result(null, artifacts); } if (artifacts.size() == 1) { return new Result(artifacts.get(0), null); } // Should not happen. throw new IllegalStateException("Corrupt artifact fetching state."); }
@Test void testHttpDisabledError() { ArtifactFetchManager fetchMgr = new ArtifactFetchManager(configuration); assertThatThrownBy( () -> fetchMgr.fetchArtifacts( "http://127.0.0.1:1234/download/notexists.jar", null)) .isInstanceOf(IllegalArgumentException.class) .hasMessageContaining("raw HTTP endpoints are disabled"); }
@Override public void resetConfigStats(RedisClusterNode node) { RedisClient entry = getEntry(node); RFuture<Void> f = executorService.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.CONFIG_RESETSTAT); syncFuture(f); }
@Test public void testResetConfigStats() { RedisClusterNode master = getFirstMaster(); connection.resetConfigStats(master); }
public static int compare(Slice left, Slice right) { boolean leftStrictlyNegative = isStrictlyNegative(left); boolean rightStrictlyNegative = isStrictlyNegative(right); if (leftStrictlyNegative != rightStrictlyNegative) { return leftStrictlyNegative ? -1 : 1; } else { return compareAbsolute(left, right) * (leftStrictlyNegative ? -1 : 1); } }
@Test public void testCompare() { assertCompare(unscaledDecimal(0), unscaledDecimal(0), 0); assertCompare(negate(unscaledDecimal(0)), unscaledDecimal(0), 0); assertCompare(unscaledDecimal(0), negate(unscaledDecimal(0)), 0); assertCompare(unscaledDecimal(0), unscaledDecimal(10), -1); assertCompare(unscaledDecimal(10), unscaledDecimal(0), 1); assertCompare(negate(unscaledDecimal(0)), unscaledDecimal(10), -1); assertCompare(unscaledDecimal(10), negate(unscaledDecimal(0)), 1); assertCompare(negate(unscaledDecimal(0)), MAX_DECIMAL, -1); assertCompare(MAX_DECIMAL, negate(unscaledDecimal(0)), 1); assertCompare(unscaledDecimal(-10), unscaledDecimal(-11), 1); assertCompare(unscaledDecimal(-11), unscaledDecimal(-11), 0); assertCompare(unscaledDecimal(-12), unscaledDecimal(-11), -1); assertCompare(unscaledDecimal(10), unscaledDecimal(11), -1); assertCompare(unscaledDecimal(11), unscaledDecimal(11), 0); assertCompare(unscaledDecimal(12), unscaledDecimal(11), 1); }
public static List<TargetInfo> parseOptTarget(CommandLine cmd, AlluxioConfiguration conf) throws IOException { String[] targets; if (cmd.hasOption(TARGET_OPTION_NAME)) { String argTarget = cmd.getOptionValue(TARGET_OPTION_NAME); if (StringUtils.isBlank(argTarget)) { throw new IOException("Option " + TARGET_OPTION_NAME + " can not be blank."); } else if (argTarget.contains(TARGET_SEPARATOR)) { targets = argTarget.split(TARGET_SEPARATOR); } else { targets = new String[]{argTarget}; } } else { // By default we set on all targets (master/workers/job_master/job_workers) targets = new String[]{ROLE_MASTER, ROLE_JOB_MASTER, ROLE_WORKERS, ROLE_JOB_WORKERS}; } return getTargetInfos(targets, conf); }
@Test public void parseEmbeddedHAMasterTarget() throws Exception { String masterAddresses = "masters-1:19200,masters-2:19200"; mConf.set(PropertyKey.MASTER_EMBEDDED_JOURNAL_ADDRESSES, masterAddresses); CommandLine mockCommandLine = mock(CommandLine.class); String[] mockArgs = new String[]{"--target", "master"}; when(mockCommandLine.getArgs()).thenReturn(mockArgs); when(mockCommandLine.hasOption(LogLevel.TARGET_OPTION_NAME)).thenReturn(true); when(mockCommandLine.getOptionValue(LogLevel.TARGET_OPTION_NAME)).thenReturn(mockArgs[1]); try (MockedStatic<MasterInquireClient.Factory> mockFactory = mockStatic(MasterInquireClient.Factory.class)) { MasterInquireClient mockInquireClient = mock(MasterInquireClient.class); when(mockInquireClient.getPrimaryRpcAddress()).thenReturn(new InetSocketAddress("masters-1", mConf.getInt(PropertyKey.MASTER_RPC_PORT))); when(mockInquireClient.getConnectDetails()) .thenReturn(() -> new MultiMasterAuthority(masterAddresses)); mockFactory.when(() -> MasterInquireClient.Factory.create(any(), any())) .thenReturn(mockInquireClient); List<LogLevel.TargetInfo> targets = LogLevel.parseOptTarget(mockCommandLine, mConf); assertEquals(1, targets.size()); assertEquals(new LogLevel.TargetInfo("masters-1", MASTER_WEB_PORT, "master"), targets.get(0)); } }
@Override public ExecuteContext onThrow(ExecuteContext context) { ThreadLocalUtils.removeRequestData(); LogUtils.printHttpRequestOnThrowPoint(context); return context; }
@Test public void testOnThrow() { ThreadLocalUtils.setRequestData(new RequestData(Collections.emptyMap(), "", "")); interceptor.onThrow(context); Assert.assertNull(ThreadLocalUtils.getRequestData()); }
@Override public ManifestTemplate call() throws IOException { Preconditions.checkState(!builtImages.isEmpty(), "no images given"); EventHandlers eventHandlers = buildContext.getEventHandlers(); try (TimerEventDispatcher ignored = new TimerEventDispatcher(eventHandlers, DESCRIPTION); ProgressEventDispatcher ignored2 = progressEventDispatcherFactory.create( "building a manifest list or a single manifest", 1)) { if (builtImages.size() == 1) { eventHandlers.dispatch(LogEvent.info("Building a single manifest")); ImageToJsonTranslator imageTranslator = new ImageToJsonTranslator(builtImages.get(0)); BlobDescriptor configDescriptor = Digests.computeDigest(imageTranslator.getContainerConfiguration()); return imageTranslator.getManifestTemplate( buildContext.getTargetFormat(), configDescriptor); } eventHandlers.dispatch(LogEvent.info("Building a manifest list")); return new ManifestListGenerator(builtImages) .getManifestListTemplate(buildContext.getTargetFormat()); } }
@Test public void testCall_singleManifest() throws IOException { // Expected manifest JSON // { // "schemaVersion":2, // "mediaType":"application/vnd.docker.distribution.manifest.v2+json", // "config":{ // "mediaType":"application/vnd.docker.container.image.v1+json", // "digest":"sha256:1b2ff280940537177565443144a81319ad48528fd35d1cdc38cbde07f24f6912", // "size":158 // }, // "layers":[ // { // "mediaType":"application/vnd.docker.image.rootfs.diff.tar.gzip", // "size":0 // } // ] // } ManifestTemplate manifestTemplate = new BuildManifestListOrSingleManifestStep( buildContext, progressDispatcherFactory, Arrays.asList(image1)) .call(); Assert.assertTrue(manifestTemplate instanceof V22ManifestTemplate); V22ManifestTemplate manifest = (V22ManifestTemplate) manifestTemplate; Assert.assertEquals(2, manifest.getSchemaVersion()); Assert.assertEquals( "application/vnd.docker.distribution.manifest.v2+json", manifest.getManifestMediaType()); Assert.assertEquals( "sha256:1b2ff280940537177565443144a81319ad48528fd35d1cdc38cbde07f24f6912", manifest.getContainerConfiguration().getDigest().toString()); Assert.assertEquals(0, manifest.getLayers().get(0).getSize()); Assert.assertEquals(158, manifest.getContainerConfiguration().getSize()); }
@Override public void configure(final Map<String, ?> configs, final boolean isKey) { this.isKey = isKey; delegate.configure(configs, isKey); }
@Test public void shouldConfigureDelegate() { // Given: final Map<String, ?> configs = ImmutableMap.of("some", "thing"); // When: serializer.configure(configs, true); // Then: verify(delegate).configure(configs, true); }
public long countInstallationOfEntityById(ModelId entityId) { final String field = String.format(Locale.ROOT, "%s.%s", ContentPackInstallation.FIELD_ENTITIES, NativeEntityDescriptor.FIELD_META_ID); return dbCollection.getCount(DBQuery.is(field, entityId)); }
@Test @MongoDBFixtures("ContentPackInstallationPersistenceServiceTest.json") public void countInstallationOfEntityById() { final long countedInstallations1 = persistenceService.countInstallationOfEntityById(ModelId.of("5b4c920b4b900a0024af2b5d")); assertThat(countedInstallations1).isEqualTo(2); final long countedInstallations2 = persistenceService.countInstallationOfEntityById(ModelId.of("non-exsistant")); assertThat(countedInstallations2).isEqualTo(0); final long countedInstallations3 = persistenceService.countInstallationOfEntityById(ModelId.of("5b4c920b4b900abeefaf2b5c")); assertThat(countedInstallations3).isEqualTo(1); }
@CanIgnoreReturnValue public final Ordered containsAtLeast( @Nullable Object firstExpected, @Nullable Object secondExpected, @Nullable Object @Nullable ... restOfExpected) { return containsAtLeastElementsIn(accumulate(firstExpected, secondExpected, restOfExpected)); }
@Test public void iterableContainsAtLeastWithDuplicates() { assertThat(asList(1, 2, 2, 2, 3)).containsAtLeast(2, 2); }
@Override public void executeWithLock(Runnable task) { Optional<LockConfiguration> lockConfigOptional = lockConfigurationExtractor.getLockConfiguration(task); if (lockConfigOptional.isEmpty()) { logger.debug("No lock configuration for {}. Executing without lock.", task); task.run(); } else { lockingTaskExecutor.executeWithLock(task, lockConfigOptional.get()); } }
@Test void doNotExecuteIfAlreadyLocked() { when(lockConfigurationExtractor.getLockConfiguration(task)).thenReturn(Optional.of(LOCK_CONFIGURATION)); when(lockProvider.lock(LOCK_CONFIGURATION)).thenReturn(Optional.empty()); defaultLockManager.executeWithLock(task); verifyNoInteractions(task); }
@Override public Type getToType() { return VARBINARY; }
@Test public void testGetToType() { assertEquals(coercer.getToType(), VarbinaryType.VARBINARY); }
public CreateStreamCommand createStreamCommand(final KsqlStructuredDataOutputNode outputNode) { return new CreateStreamCommand( outputNode.getSinkName().get(), outputNode.getSchema(), outputNode.getTimestampColumn(), outputNode.getKsqlTopic().getKafkaTopicName(), Formats.from(outputNode.getKsqlTopic()), outputNode.getKsqlTopic().getKeyFormat().getWindowInfo(), Optional.of(outputNode.getOrReplace()), Optional.of(false) ); }
@Test public void shouldThrowInCreateStreamOrReplaceOnSourceStreams() { // Given: final SourceName existingStreamName = SourceName.of("existingStreamName"); final KsqlStream existingStream = mock(KsqlStream.class); when(existingStream.getDataSourceType()).thenReturn(DataSourceType.KSTREAM); when(existingStream.isSource()).thenReturn(true); when(metaStore.getSource(existingStreamName)).thenReturn(existingStream); final CreateStream ddlStatement = new CreateStream(existingStreamName, STREAM_ELEMENTS, true, false, withProperties, false); // When: final Exception e = assertThrows( KsqlException.class, () -> createSourceFactory .createStreamCommand(ddlStatement, ksqlConfig)); // Then: assertThat(e.getMessage(), containsString( "Cannot add stream 'existingStreamName': CREATE OR REPLACE is not supported on " + "source streams.")); }
public List<JobVertex> getVerticesSortedTopologicallyFromSources() throws InvalidProgramException { // early out on empty lists if (this.taskVertices.isEmpty()) { return Collections.emptyList(); } List<JobVertex> sorted = new ArrayList<JobVertex>(this.taskVertices.size()); Set<JobVertex> remaining = new LinkedHashSet<JobVertex>(this.taskVertices.values()); // start by finding the vertices with no input edges // and the ones with disconnected inputs (that refer to some standalone data set) { Iterator<JobVertex> iter = remaining.iterator(); while (iter.hasNext()) { JobVertex vertex = iter.next(); if (vertex.hasNoConnectedInputs()) { sorted.add(vertex); iter.remove(); } } } int startNodePos = 0; // traverse from the nodes that were added until we found all elements while (!remaining.isEmpty()) { // first check if we have more candidates to start traversing from. if not, then the // graph is cyclic, which is not permitted if (startNodePos >= sorted.size()) { throw new InvalidProgramException("The job graph is cyclic."); } JobVertex current = sorted.get(startNodePos++); addNodesThatHaveNoNewPredecessors(current, sorted, remaining); } return sorted; }
@Test public void testTopoSortCyclicGraphIntermediateCycle() { try { JobVertex source = new JobVertex("source"); JobVertex v1 = new JobVertex("1"); JobVertex v2 = new JobVertex("2"); JobVertex v3 = new JobVertex("3"); JobVertex v4 = new JobVertex("4"); JobVertex target = new JobVertex("target"); v1.connectNewDataSetAsInput( source, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); v1.connectNewDataSetAsInput( v4, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); v2.connectNewDataSetAsInput( v1, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); v3.connectNewDataSetAsInput( v2, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); v4.connectNewDataSetAsInput( v3, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); target.connectNewDataSetAsInput( v3, DistributionPattern.POINTWISE, ResultPartitionType.PIPELINED); JobGraph jg = JobGraphTestUtils.streamingJobGraph(v1, v2, v3, v4, source, target); try { jg.getVerticesSortedTopologicallyFromSources(); fail("Failed to raise error on topologically sorting cyclic graph."); } catch (InvalidProgramException e) { // that what we wanted } } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
@Override public String generateSqlType(Dialect dialect) { return switch (dialect.getId()) { case PostgreSql.ID -> "SMALLINT"; case Oracle.ID -> "NUMBER(3)"; case MsSql.ID, H2.ID -> "TINYINT"; default -> throw new UnsupportedOperationException(String.format("Unknown dialect '%s'", dialect.getId())); }; }
@Test public void generate_sql_type() { TinyIntColumnDef def = new TinyIntColumnDef.Builder() .setColumnName("foo") .setIsNullable(true) .build(); assertThat(def.generateSqlType(new H2())).isEqualTo("TINYINT"); assertThat(def.generateSqlType(new PostgreSql())).isEqualTo("SMALLINT"); assertThat(def.generateSqlType(new MsSql())).isEqualTo("TINYINT"); assertThat(def.generateSqlType(new Oracle())).isEqualTo("NUMBER(3)"); }
@Override public boolean onTouchEvent(MotionEvent ev) { if (isEnabled()) return super.onTouchEvent(ev); else return false; }
@Test public void testOnTouchEventDisabled() throws Exception { mUnderTest.setEnabled(false); Assert.assertFalse( mUnderTest.onTouchEvent(MotionEvent.obtain(10, 10, MotionEvent.ACTION_DOWN, 1f, 1f, 0))); }
@Override public void onCreating(AbstractJob job) { JobDetails jobDetails = job.getJobDetails(); Optional<Job> jobAnnotation = getJobAnnotation(jobDetails); setJobName(job, jobAnnotation); setAmountOfRetries(job, jobAnnotation); setLabels(job, jobAnnotation); }
@Test void testDisplayNameByAnnotationReplacesVariables() { Job job = anEnqueuedJob().withoutName() .withJobDetails(() -> testService.doWorkWithAnnotationAndJobContext(67656, "the almighty user", JobContext.Null)) .build(); defaultJobFilter.onCreating(job); assertThat(job).hasJobName("Doing some hard work for user the almighty user with id 67656"); }
@Override protected Mono<Void> doExecute(final ServerWebExchange exchange, final ShenyuPluginChain chain, final SelectorData selector, final RuleData rule) { final List<Upstream> upstreamList = UpstreamCacheManager.getInstance().findUpstreamListBySelectorId(selector.getId()); final ShenyuContext shenyuContext = exchange.getAttribute(Constants.CONTEXT); if (CollectionUtils.isEmpty(upstreamList) || Objects.isNull(shenyuContext)) { LOG.error("websocket upstream configuration error:{}", rule); return chain.execute(exchange); } final WebSocketRuleHandle ruleHandle = buildRuleHandle(rule); final String ip = Objects.requireNonNull(exchange.getRequest().getRemoteAddress()).getAddress().getHostAddress(); Upstream upstream = LoadBalancerFactory.selector(upstreamList, ruleHandle.getLoadBalance(), ip); if (Objects.isNull(upstream)) { LOG.error("websocket has no upstream, error:{}", rule); Object error = ShenyuResultWrap.error(exchange, ShenyuResultEnum.CANNOT_FIND_HEALTHY_UPSTREAM_URL); return WebFluxResultUtils.result(exchange, error); } URI wsRequestUrl = buildWsRealPath(exchange, upstream, shenyuContext); LOG.info("you websocket urlPath is :{}", wsRequestUrl.toASCIIString()); HttpHeaders headers = exchange.getRequest().getHeaders(); return this.webSocketService.handleRequest(exchange, new ShenyuWebSocketHandler( wsRequestUrl, this.webSocketClient, filterHeaders(headers), buildWsProtocols(headers))); }
@Test public void doExecuteTest() { initMockInfo(); when(webSocketService.handleRequest(any(), any())).thenReturn(Mono.empty()); StepVerifier.create(Mono.defer(() -> webSocketPlugin.doExecute(exchange, chain, selectorData, ruleData))).expectSubscription().verifyComplete(); SelectorData selectorData1 = new SelectorData(); selectorData1.setId("1"); assertEquals(webSocketPlugin.doExecute(exchange, chain, selectorData1, new RuleData()), chain.execute(exchange)); }
@ApiOperation(value = "Get current User (getUser)", notes = "Get the information about the User which credentials are used to perform this REST API call.") @PreAuthorize("hasAnyAuthority('SYS_ADMIN', 'TENANT_ADMIN', 'CUSTOMER_USER')") @RequestMapping(value = "/auth/user", method = RequestMethod.GET) public @ResponseBody User getUser() throws ThingsboardException { SecurityUser securityUser = getCurrentUser(); return userService.findUserById(securityUser.getTenantId(), securityUser.getId()); }
@Test public void testGetUser() throws Exception { doGet("/api/auth/user") .andExpect(status().isUnauthorized()); loginSysAdmin(); doGet("/api/auth/user") .andExpect(status().isOk()) .andExpect(jsonPath("$.authority",is(Authority.SYS_ADMIN.name()))) .andExpect(jsonPath("$.email",is(SYS_ADMIN_EMAIL))); loginTenantAdmin(); doGet("/api/auth/user") .andExpect(status().isOk()) .andExpect(jsonPath("$.authority",is(Authority.TENANT_ADMIN.name()))) .andExpect(jsonPath("$.email",is(TENANT_ADMIN_EMAIL))); loginCustomerUser(); doGet("/api/auth/user") .andExpect(status().isOk()) .andExpect(jsonPath("$.authority",is(Authority.CUSTOMER_USER.name()))) .andExpect(jsonPath("$.email",is(CUSTOMER_USER_EMAIL))); }
static void handleJvmOptions(String[] args, String lsJavaOpts) { final JvmOptionsParser parser = new JvmOptionsParser(args[0]); final String jvmOpts = args.length == 2 ? args[1] : null; try { Optional<Path> jvmOptions = parser.lookupJvmOptionsFile(jvmOpts); parser.handleJvmOptions(jvmOptions, lsJavaOpts); } catch (JvmOptionsFileParserException pex) { System.err.printf(Locale.ROOT, "encountered [%d] error%s parsing [%s]", pex.invalidLines().size(), pex.invalidLines().size() == 1 ? "" : "s", pex.jvmOptionsFile()); int errorCounter = 0; for (final Map.Entry<Integer, String> entry : pex.invalidLines().entrySet()) { errorCounter++; System.err.printf(Locale.ROOT, "[%d]: encountered improperly formatted JVM option in [%s] on line number [%d]: [%s]", errorCounter, pex.jvmOptionsFile(), entry.getKey(), entry.getValue()); } } catch (IOException ex) { System.err.println("Error accessing jvm.options file"); System.exit(1); } }
@Test public void testNettyMaxOrderRuleAppliesIfNotAlreadyDefinedExplicitlyByUser() throws IOException { File optionsFile = writeIntoTempOptionsFile(writer -> writer.println("-Dsome.other.netty.property=123")); JvmOptionsParser.handleJvmOptions(new String[] {"/path/to/ls_home", optionsFile.toString()}, "-Dcli.opts=something"); // Verify final String output = outputStreamCaptor.toString(); assertTrue("Existing properties other than Netty's maxOrder ar preserved", output.contains("-Dsome.other.netty.property=123")); assertTrue("Netty's maxOrder MUST be forcibly defined to the expected default", output.contains("-Dio.netty.allocator.maxOrder=11")); }
public static String decodeBody(byte[] body, byte dataCoding, String defaultEncoding) throws UnsupportedEncodingException { Alphabet alphabet = Alphabet.parseDataCoding(dataCoding); if (body == null || SmppUtils.is8Bit(alphabet)) { return null; } switch (alphabet) { case ALPHA_IA5: return new String(body, StandardCharsets.US_ASCII); case ALPHA_LATIN1: return new String(body, StandardCharsets.ISO_8859_1); case ALPHA_UCS2: return new String(body, StandardCharsets.UTF_16BE); default: return new String(body, defaultEncoding); } }
@Test void testDecodeBodyWhenBodyIs8bitShouldReturnNull() throws UnsupportedEncodingException { byte[] body = new byte[] { 0, 1, 2, 3, 4 }; Assertions.assertNull(SmppUtils.decodeBody(body, Alphabet.ALPHA_8_BIT.value(), "X-Gsm7Bit")); }
public Optional<ProductSpecificResponse> findSpecificProductById(final Long id, final Long memberId) { return Optional.ofNullable(jpaQueryFactory.select(constructor(ProductSpecificResponse.class, product.id, product.description.location, product.description.title, product.description.content, product.price.price, product.productStatus, product.statisticCount.visitedCount, product.statisticCount.contactCount, product.categoryId, member.id, member.nickname, member.id.eq(memberId), product.statisticCount.likedCount, isLikedAlreadyByMe(memberId), product.createdAt )).from(product) .where(product.id.eq(id)) .leftJoin(member).on(member.id.eq(product.memberId)) .leftJoin(productLike).on(productLike.productId.eq(product.id).and(productLike.memberId.eq(memberId))) .leftJoin(category).on(category.id.eq(product.categoryId)) .fetchOne()); }
@Test void 상품_상세_정보를_조회한다() { // given Member member = memberRepository.save(일반_유저_생성()); Product product = productRepository.save(상품_생성()); // when Optional<ProductSpecificResponse> result = productQueryRepository.findSpecificProductById(product.getId(), member.getId()); // then assertSoftly(softly -> { softly.assertThat(result).isPresent(); softly.assertThat(result.get().id()).isEqualTo(product.getId()); }); }
@Override public Map<String, Object> load(String configKey) { if (targetUri == null) { return null; } // Check for new file every so often int currentTimeSecs = Time.currentTimeSecs(); if (lastReturnedValue != null && ((currentTimeSecs - lastReturnedTime) < artifactoryPollTimeSecs)) { LOG.debug("currentTimeSecs: {}; lastReturnedTime {}; artifactoryPollTimeSecs: {}. Returning our last map.", currentTimeSecs, lastReturnedTime, artifactoryPollTimeSecs); return (Map<String, Object>) lastReturnedValue.get(configKey); } try { Map<String, Object> raw = loadFromUri(targetUri); if (raw != null) { return (Map<String, Object>) raw.get(configKey); } } catch (Exception e) { LOG.error("Failed to load from uri {}", targetUri); } return null; }
@Test public void testPointingAtDirectory() { // This is a test where we are configured to point right at an artifact dir Config conf = new Config(); conf.put(DaemonConfig.SCHEDULER_CONFIG_LOADER_URI, ARTIFACTORY_HTTP_SCHEME_PREFIX + "bogushost.yahoo.com:9999/location/of/this/dir"); conf.put(Config.STORM_LOCAL_DIR, tmpDirPath.toString()); ArtifactoryConfigLoaderMock loaderMock = new ArtifactoryConfigLoaderMock(conf); loaderMock.setData("Anything", "/location/of/this/dir", "{\"children\" : [ { \"uri\" : \"/20160621204337.yaml\", \"folder\" : false }]}"); loaderMock .setData(null, null, "{ \"" + DaemonConfig.MULTITENANT_SCHEDULER_USER_POOLS + "\": {one: 1, two: 2, three: 3, four : 4}}"); Map<String, Object> ret = loaderMock.load(DaemonConfig.MULTITENANT_SCHEDULER_USER_POOLS); assertNotNull(ret, "Unexpectedly returned null"); assertEquals(1, ret.get("one")); assertEquals(2, ret.get("two")); assertEquals(3, ret.get("three")); assertEquals(4, ret.get("four")); // Now let's load w/o setting up gets and we should still get valid map back ArtifactoryConfigLoaderMock tc2 = new ArtifactoryConfigLoaderMock(conf); Map<String, Object> ret2 = tc2.load(DaemonConfig.MULTITENANT_SCHEDULER_USER_POOLS); assertNotNull(ret2, "Unexpectedly returned null"); assertEquals(1, ret2.get("one")); assertEquals(2, ret2.get("two")); assertEquals(3, ret2.get("three")); assertEquals(4, ret2.get("four")); }
@VisibleForTesting static CompletedCheckpointStore createCompletedCheckpointStore( Configuration jobManagerConfig, CheckpointRecoveryFactory recoveryFactory, Executor ioExecutor, Logger log, JobID jobId, RestoreMode restoreMode) throws Exception { return recoveryFactory.createRecoveredCompletedCheckpointStore( jobId, DefaultCompletedCheckpointStoreUtils.getMaximumNumberOfRetainedCheckpoints( jobManagerConfig, log), SharedStateRegistry.DEFAULT_FACTORY, ioExecutor, restoreMode); }
@Test void testSettingMaxNumberOfCheckpointsToRetain() throws Exception { final int maxNumberOfCheckpointsToRetain = 10; final Configuration jobManagerConfig = new Configuration(); jobManagerConfig.set( CheckpointingOptions.MAX_RETAINED_CHECKPOINTS, maxNumberOfCheckpointsToRetain); final CompletedCheckpointStore completedCheckpointStore = SchedulerUtils.createCompletedCheckpointStore( jobManagerConfig, new StandaloneCheckpointRecoveryFactory(), Executors.directExecutor(), log, new JobID(), RestoreMode.CLAIM); assertThat(completedCheckpointStore.getMaxNumberOfRetainedCheckpoints()) .isEqualTo(maxNumberOfCheckpointsToRetain); }
@Override public Rule register(String ref, RuleKey ruleKey) { requireNonNull(ruleKey, "ruleKey can not be null"); Rule rule = rulesByUuid.get(ref); if (rule != null) { if (!ruleKey.repository().equals(rule.repository()) || !ruleKey.rule().equals(rule.key())) { throw new IllegalArgumentException(format( "Specified RuleKey '%s' is not equal to the one already registered in repository for ref %s: '%s'", ruleKey, ref, RuleKey.of(rule.repository(), rule.key()))); } return rule; } rule = new Rule(ref, ruleKey.repository(), ruleKey.rule()); rulesByUuid.put(ref, rule); return rule; }
@Test public void register_returns_the_same_object_for_every_call_with_equals_RuleKey_objects() { Rule rule = underTest.register(SOME_UUID, RuleKey.of(SOME_REPOSITORY, SOME_RULE_KEY)); for (int i = 0; i < someRandomInt(); i++) { assertThat(underTest.register(Uuids.createFast(), RuleKey.of(SOME_REPOSITORY, SOME_RULE_KEY)).ref()).isNotEqualTo(rule.ref()); } }
public static long readUint32BE(ByteBuffer buf) throws BufferUnderflowException { return Integer.toUnsignedLong(buf.order(ByteOrder.BIG_ENDIAN).getInt()); }
@Test(expected = ArrayIndexOutOfBoundsException.class) public void testReadUint32BEThrowsException1() { ByteUtils.readUint32BE(new byte[]{1, 2, 3}, 2); }
private void sendResponse(Response response) { try { ((GrpcConnection) this.currentConnection).sendResponse(response); } catch (Exception e) { LOGGER.error("[{}]Error to send ack response, ackId->{}", this.currentConnection.getConnectionId(), response.getRequestId()); } }
@Test void testBindRequestStreamOnNextSetupAckRequest() throws NoSuchFieldException, IllegalAccessException, NoSuchMethodException, InvocationTargetException { BiRequestStreamGrpc.BiRequestStreamStub stub = mock(BiRequestStreamGrpc.BiRequestStreamStub.class); GrpcConnection grpcConnection = mock(GrpcConnection.class); when(stub.requestBiStream(any())).thenAnswer((Answer<StreamObserver<Payload>>) invocationOnMock -> { ((StreamObserver<Payload>) invocationOnMock.getArgument(0)).onNext(GrpcUtils.convert(new SetupAckRequest())); return null; }); setCurrentConnection(grpcConnection, grpcClient); invokeBindRequestStream(grpcClient, stub, grpcConnection); verify(grpcConnection, never()).sendResponse(any(Response.class)); }
@Override public RuntimeMode getRuntimeMode() { return original.getRuntimeMode(); }
@Test public void getRuntimeMode() { assertEquals(pluginManager.getRuntimeMode(), wrappedPluginManager.getRuntimeMode()); }
List<Argument> matchedArguments(Step step) { return argumentMatcher.argumentsFrom(step, types); }
@Test void should_apply_identity_transform_to_doc_string_when_target_type_is_object() { Feature feature = TestFeatureParser.parse("" + "Feature: Test feature\n" + " Scenario: Test scenario\n" + " Given I have some step\n" + " \"\"\"\n" + " content\n" + " \"\"\"\n"); StepDefinition stepDefinition = new StubStepDefinition("I have some step", Object.class); StepExpression expression = stepExpressionFactory.createExpression(stepDefinition); CoreStepDefinition coreStepDefinition = new CoreStepDefinition(id, stepDefinition, expression); Step step = feature.getPickles().get(0).getSteps().get(0); List<Argument> arguments = coreStepDefinition.matchedArguments(step); assertThat(arguments.get(0).getValue(), is(equalTo(DocString.create("content")))); }
@Override public List<String> getDuplicateKeyUpdate() { return null; }
@Test public void testGetDuplicateKeyUpdate() { String sql = "insert into t(a) values (?)"; SQLStatement ast = getSQLStatement(sql); SqlServerInsertRecognizer recognizer = new SqlServerInsertRecognizer(sql, ast); Assertions.assertNull(recognizer.getDuplicateKeyUpdate()); }
public static RequestPredicate GET(String pattern) { return method(HttpMethod.GET).and(path(pattern)); }
@Test void negate() { RequestPredicate predicate = request -> false; RequestPredicate negated = predicate.negate(); MockServerHttpRequest mockRequest = MockServerHttpRequest.get("https://example.com").build(); ServerRequest request = new DefaultServerRequest(MockServerWebExchange.from(mockRequest), Collections.emptyList()); assertThat(negated.test(request)).isTrue(); predicate = r -> true; negated = predicate.negate(); assertThat(negated.test(request)).isFalse(); }
@Deprecated public ProtocolBuilder dispather(String dispather) { this.dispatcher = dispather; return getThis(); }
@Test void dispather() { ProtocolBuilder builder = new ProtocolBuilder(); builder.dispather("mockdispatcher"); Assertions.assertEquals("mockdispatcher", builder.build().getDispather()); }
protected boolean evaluation(Object rawValue) { String stringValue = (String) ConverterTypeUtil.convert(String.class, rawValue); Object convertedValue = arrayType.getValue(stringValue); switch (inNotIn) { case IN: return values.contains(convertedValue); case NOT_IN: return !values.contains(convertedValue); default: throw new KiePMMLException("Unknown IN_NOTIN" + inNotIn); } }
@Test void evaluationStringNotIn() { ARRAY_TYPE arrayType = ARRAY_TYPE.STRING; List<Object> values = getObjects(arrayType, 1); KiePMMLSimpleSetPredicate kiePMMLSimpleSetPredicate = getKiePMMLSimpleSetPredicate(values, arrayType, IN_NOTIN.NOT_IN); assertThat(kiePMMLSimpleSetPredicate.evaluation(values.get(0))).isFalse(); assertThat(kiePMMLSimpleSetPredicate.evaluation("NOT")).isTrue(); }
@Override public R apply(R record) { if (operatingSchema(record) == null) { return applySchemaless(record); } else { return applyWithSchema(record); } }
@Test public void testNullListAndMapReplacementsAreMutable() { final List<String> maskFields = Arrays.asList("array", "map"); final Struct updatedValue = (Struct) transform(maskFields, null).apply(record(SCHEMA, VALUES_WITH_SCHEMA)).value(); @SuppressWarnings("unchecked") List<Integer> actualList = (List<Integer>) updatedValue.get("array"); assertEquals(Collections.emptyList(), actualList); actualList.add(0); assertEquals(Collections.singletonList(0), actualList); @SuppressWarnings("unchecked") Map<String, String> actualMap = (Map<String, String>) updatedValue.get("map"); assertEquals(Collections.emptyMap(), actualMap); actualMap.put("k", "v"); assertEquals(Collections.singletonMap("k", "v"), actualMap); }
private List<String> getSnapshotInfo(String snapshotName, String timestamp) { List<String> info = Lists.newArrayList(); if (Strings.isNullOrEmpty(timestamp)) { // get all timestamp // path eg: /location/__starrocks_repository_repo_name/__ss_my_snap/__info_* String infoFilePath = assembleJobInfoFilePath(snapshotName, -1); LOG.debug("assemble infoFilePath: {}, snapshot: {}", infoFilePath, snapshotName); List<RemoteFile> results = Lists.newArrayList(); Status st = storage.list(infoFilePath + "*", results); if (!st.ok()) { info.add(snapshotName); info.add(FeConstants.NULL_STRING); info.add("ERROR: Failed to get info: " + st.getErrMsg()); } else { info.add(snapshotName); List<String> tmp = Lists.newArrayList(); for (RemoteFile file : results) { // __info_2018-04-18-20-11-00.Jdwnd9312sfdn1294343 Pair<String, String> pureFileName = decodeFileNameWithChecksum(file.getName()); if (pureFileName == null) { // maybe: __info_2018-04-18-20-11-00.part tmp.add("Invalid: " + file.getName()); continue; } tmp.add(disjoinPrefix(PREFIX_JOB_INFO, pureFileName.first)); } info.add(Joiner.on("\n").join(tmp)); info.add(tmp.isEmpty() ? "ERROR: no snapshot" : "OK"); } } else { // get specified timestamp // path eg: /path/to/backup/__info_2081-04-19-12-59-11 String localFilePath = BackupHandler.BACKUP_ROOT_DIR + "/" + Repository.PREFIX_JOB_INFO + timestamp; try { String remoteInfoFilePath = assembleJobInfoFilePath(snapshotName, -1) + timestamp; Status st = download(remoteInfoFilePath, localFilePath); if (!st.ok()) { info.add(snapshotName); info.add(timestamp); info.add(FeConstants.NULL_STRING); info.add(FeConstants.NULL_STRING); info.add("Failed to get info: " + st.getErrMsg()); } else { try { BackupJobInfo jobInfo = BackupJobInfo.fromFile(localFilePath); info.add(snapshotName); info.add(timestamp); info.add(jobInfo.dbName); info.add(jobInfo.getBrief()); info.add("OK"); } catch (IOException e) { info.add(snapshotName); info.add(timestamp); info.add(FeConstants.NULL_STRING); info.add(FeConstants.NULL_STRING); info.add("Failed to read info from local file: " + e.getMessage()); } } } finally { // delete tmp local file File localFile = new File(localFilePath); if (localFile.exists()) { if (!localFile.delete()) { LOG.warn("Failed to delete file, filepath={}", localFile.getAbsolutePath()); } } } } return info; }
@Test public void testGetSnapshotInfo() { new Expectations() { { storage.list(anyString, (List<RemoteFile>) any); minTimes = 0; result = new Delegate() { public Status list(String remotePath, List<RemoteFile> result) { if (remotePath.contains(Repository.PREFIX_JOB_INFO)) { result.add(new RemoteFile(" __info_2018-04-18-20-11-00.12345678123456781234567812345678", true, 100)); } else { result.add(new RemoteFile(Repository.PREFIX_SNAPSHOT_DIR + "s1", false, 100)); result.add(new RemoteFile(Repository.PREFIX_SNAPSHOT_DIR + "s2", false, 100)); } return Status.OK; } }; } }; repo = new Repository(10000, "repo", false, location, storage); String snapshotName = ""; String timestamp = ""; try { List<List<String>> infos = repo.getSnapshotInfos(snapshotName, timestamp, null); Assert.assertEquals(2, infos.size()); } catch (SemanticException e) { e.printStackTrace(); Assert.fail(); } }
@Override public SchemaKStream<?> buildStream(final PlanBuildContext buildContext) { if (!joinKey.isForeignKey()) { ensureMatchingPartitionCounts(buildContext.getServiceContext().getTopicClient()); } final JoinerFactory joinerFactory = new JoinerFactory( buildContext, this, buildContext.buildNodeContext(getId().toString())); return joinerFactory.getJoiner(left.getNodeOutputType(), right.getNodeOutputType()).join(); }
@Test public void shouldPerformTableToTableInnerJoin() { // Given: setupTable(left, leftSchemaKTable); setupTable(right, rightSchemaKTable); final JoinNode joinNode = new JoinNode(nodeId, INNER, joinKey, true, left, right, empty(), "KAFKA"); // When: joinNode.buildStream(planBuildContext); // Then: verify(leftSchemaKTable).innerJoin( rightSchemaKTable, SYNTH_KEY, CONTEXT_STACKER ); }
@Override public CompletableFuture<ApplicationStatus> getShutDownFuture() { return shutDownFuture; }
@Test public void getShutDownFuture_newLeader_ignoresOldDispatcherLeaderProcessShutDownRequest() throws Exception { final UUID firstLeaderSessionId = UUID.randomUUID(); final UUID secondLeaderSessionId = UUID.randomUUID(); final CompletableFuture<ApplicationStatus> shutDownFuture = new CompletableFuture<>(); final TestingDispatcherLeaderProcess firstTestingDispatcherLeaderProcess = TestingDispatcherLeaderProcess.newBuilder(firstLeaderSessionId) .setShutDownFuture(shutDownFuture) .build(); final TestingDispatcherLeaderProcess secondTestingDispatcherLeaderProcess = TestingDispatcherLeaderProcess.newBuilder(secondLeaderSessionId).build(); testingDispatcherLeaderProcessFactory = TestingDispatcherLeaderProcessFactory.from( firstTestingDispatcherLeaderProcess, secondTestingDispatcherLeaderProcess); try (final DispatcherRunner dispatcherRunner = createDispatcherRunner()) { leaderElection.isLeader(firstLeaderSessionId); final CompletableFuture<ApplicationStatus> dispatcherShutDownFuture = dispatcherRunner.getShutDownFuture(); assertFalse(dispatcherShutDownFuture.isDone()); leaderElection.isLeader(secondLeaderSessionId); final ApplicationStatus finalApplicationStatus = ApplicationStatus.UNKNOWN; shutDownFuture.complete(finalApplicationStatus); assertFalse(dispatcherShutDownFuture.isDone()); } }
@Override public CompletableFuture<List<DescribedGroup>> shareGroupDescribe( RequestContext context, List<String> groupIds) { if (!isActive.get()) { return CompletableFuture.completedFuture(ShareGroupDescribeRequest.getErrorDescribedGroupList( groupIds, Errors.COORDINATOR_NOT_AVAILABLE )); } final List<CompletableFuture<List<ShareGroupDescribeResponseData.DescribedGroup>>> futures = new ArrayList<>(groupIds.size()); final Map<TopicPartition, List<String>> groupsByTopicPartition = new HashMap<>(); groupIds.forEach(groupId -> { if (isGroupIdNotEmpty(groupId)) { groupsByTopicPartition .computeIfAbsent(topicPartitionFor(groupId), __ -> new ArrayList<>()) .add(groupId); } else { futures.add(CompletableFuture.completedFuture(Collections.singletonList( new ShareGroupDescribeResponseData.DescribedGroup() .setGroupId(null) .setErrorCode(Errors.INVALID_GROUP_ID.code()) ))); } }); groupsByTopicPartition.forEach((topicPartition, groupList) -> { CompletableFuture<List<ShareGroupDescribeResponseData.DescribedGroup>> future = runtime.scheduleReadOperation( "share-group-describe", topicPartition, (coordinator, lastCommittedOffset) -> coordinator.shareGroupDescribe(groupIds, lastCommittedOffset) ).exceptionally(exception -> handleOperationException( "share-group-describe", groupList, exception, (error, __) -> ShareGroupDescribeRequest.getErrorDescribedGroupList(groupList, error) )); futures.add(future); }); return FutureUtils.combineFutures(futures, ArrayList::new, List::addAll); }
@Test public void testShareGroupDescribeCoordinatorNotActive() throws ExecutionException, InterruptedException { CoordinatorRuntime<GroupCoordinatorShard, CoordinatorRecord> runtime = mockRuntime(); GroupCoordinatorService service = new GroupCoordinatorService( new LogContext(), createConfig(), runtime, new GroupCoordinatorMetrics(), createConfigManager() ); when(runtime.scheduleReadOperation( ArgumentMatchers.eq("share-group-describe"), ArgumentMatchers.eq(new TopicPartition("__consumer_offsets", 0)), ArgumentMatchers.any() )).thenReturn(FutureUtils.failedFuture( Errors.COORDINATOR_NOT_AVAILABLE.exception() )); CompletableFuture<List<ShareGroupDescribeResponseData.DescribedGroup>> future = service.shareGroupDescribe(requestContext(ApiKeys.SHARE_GROUP_DESCRIBE), Collections.singletonList("share-group-id")); assertEquals( Collections.singletonList(new ShareGroupDescribeResponseData.DescribedGroup() .setGroupId("share-group-id") .setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code()) ), future.get() ); }
@Override public void shutdown() throws NacosException { String className = this.getClass().getName(); NAMING_LOGGER.info("{} do shutdown begin", className); failoverReactor.shutdown(); NAMING_LOGGER.info("{} do shutdown stop", className); }
@Test void testShutdown() throws NacosException, NoSuchFieldException, IllegalAccessException { Field field = ServiceInfoHolder.class.getDeclaredField("failoverReactor"); field.setAccessible(true); FailoverReactor reactor = (FailoverReactor) field.get(holder); Field executorService = FailoverReactor.class.getDeclaredField("executorService"); executorService.setAccessible(true); ScheduledExecutorService pool = (ScheduledExecutorService) executorService.get(reactor); assertFalse(pool.isShutdown()); holder.shutdown(); assertTrue(pool.isShutdown()); }
ReplicationStateSync getReplicatedState(boolean forRemote) { HiveConf hiveConf = new HiveConf(); // we probably just need to set the metastore URIs // TODO: figure out how to integrate this in production // how to load balance between piper HMS,HS2 // if we have list of uris, we can do something similar to createHiveConf in reairsync hiveConf.addResource(new Path(params.loadedProps.getProperty( forRemote ? REMOTE_HIVE_SITE_URI : LOCAL_HIVE_SITE_URI))); // TODO: get clusterId as input parameters ReplicationStateSync state = new ReplicationStateSync(params.mkGlobalHiveSyncProps(forRemote), hiveConf, forRemote ? "REMOTESYNC" : "LOCALSYNC"); return state; }
@Test public void testHiveConfigShouldMatchClusterConf() throws Exception { String commitTime = "100"; localCluster.createCOWTable(commitTime, 5, DB_NAME, TBL_NAME); // simulate drs remoteCluster.createCOWTable(commitTime, 5, DB_NAME, TBL_NAME); HiveSyncGlobalCommitParams params = getGlobalCommitConfig(commitTime); HiveSyncGlobalCommitTool tool = new HiveSyncGlobalCommitTool(params); ReplicationStateSync localReplicationStateSync = tool.getReplicatedState(false); ReplicationStateSync remoteReplicationStateSync = tool.getReplicatedState(true); assertEquals(localReplicationStateSync.globalHiveSyncTool.config.getHiveConf().get("hive.metastore.uris"), localCluster.getHiveConf().get("hive.metastore.uris")); assertEquals(remoteReplicationStateSync.globalHiveSyncTool.config.getHiveConf().get("hive.metastore.uris"), remoteCluster.getHiveConf().get("hive.metastore.uris")); }
public static <T> AsSingleton<T> asSingleton() { return new AsSingleton<>(); }
@Test @Category(ValidatesRunner.class) public void testWindowedSideInputFixedToFixed() { final PCollectionView<Integer> view = pipeline .apply( "CreateSideInput", Create.timestamped( TimestampedValue.of(1, new Instant(1)), TimestampedValue.of(2, new Instant(11)), TimestampedValue.of(3, new Instant(13)))) .apply("WindowSideInput", Window.into(FixedWindows.of(Duration.millis(10)))) .apply(Sum.integersGlobally().withoutDefaults()) .apply(View.asSingleton()); PCollection<String> output = pipeline .apply( "CreateMainInput", Create.timestamped( TimestampedValue.of("A", new Instant(4)), TimestampedValue.of("B", new Instant(15)), TimestampedValue.of("C", new Instant(7)))) .apply("WindowMainInput", Window.into(FixedWindows.of(Duration.millis(10)))) .apply( "OutputMainAndSideInputs", ParDo.of( new DoFn<String, String>() { @ProcessElement public void processElement(ProcessContext c) { c.output(c.element() + c.sideInput(view)); } }) .withSideInputs(view)); PAssert.that(output).containsInAnyOrder("A1", "B5", "C1"); pipeline.run(); }
public boolean isExpired() { return (state & MASK_EXPIRED) != 0; }
@Test public void isExpired() { LacpState state = new LacpState((byte) 0x80); assertTrue(state.isExpired()); }
public static Function<Integer, Integer> composeFunctions(Function<Integer, Integer> f1, Function<Integer, Integer> f2) { return f1.andThen(f2); }
@Test public void testComposeInverseFunctions() { Function<Integer, Integer> timesTwo = x -> x * 2; Function<Integer, Integer> half = x -> x / 2; Function<Integer, Integer> composed = FunctionComposer.composeFunctions(timesTwo, half); assertEquals("Expect the functions to cancel each other out", 5, (int) composed.apply(5)); }
@Deprecated @Override public Beacon fromScanData(byte[] scanData, int rssi, BluetoothDevice device) { return fromScanData(scanData, rssi, device, System.currentTimeMillis(), new AltBeacon()); }
@Test public void testParsesBeaconMissingDataField() { BeaconManager.setDebug(true); org.robolectric.shadows.ShadowLog.stream = System.err; byte[] bytes = hexStringToByteArray("02011a1aff1801beac2f234454cf6d4a0fadf2f4911ba9ffa600010002c5000000"); AltBeaconParser parser = new AltBeaconParser(); Beacon beacon = parser.fromScanData(bytes, -55, null, 123456L); assertEquals("mRssi should be as passed in", -55, beacon.getRssi()); assertEquals("uuid should be parsed", "2f234454-cf6d-4a0f-adf2-f4911ba9ffa6", beacon.getIdentifier(0).toString()); assertEquals("id2 should be parsed", "1", beacon.getIdentifier(1).toString()); assertEquals("id3 should be parsed", "2", beacon.getIdentifier(2).toString()); assertEquals("txPower should be parsed", -59, beacon.getTxPower()); assertEquals("manufacturer should be parsed", 0x118 ,beacon.getManufacturer()); assertEquals("missing data field zero should be zero", new Long(0l), beacon.getDataFields().get(0)); }
public static <T> WithTimestamps<T> of(SerializableFunction<T, Instant> fn) { return new WithTimestamps<>(fn, Duration.ZERO); }
@Test @Category(ValidatesRunner.class) public void withTimestampsShouldApplyTimestamps() { SerializableFunction<String, Instant> timestampFn = input -> new Instant(Long.valueOf(input)); String yearTwoThousand = "946684800000"; PCollection<String> timestamped = p.apply(Create.of("1234", "0", Integer.toString(Integer.MAX_VALUE), yearTwoThousand)) .apply(WithTimestamps.of(timestampFn)); PCollection<KV<String, Instant>> timestampedVals = timestamped.apply( ParDo.of( new DoFn<String, KV<String, Instant>>() { @ProcessElement public void processElement(DoFn<String, KV<String, Instant>>.ProcessContext c) throws Exception { c.output(KV.of(c.element(), c.timestamp())); } })); PAssert.that(timestamped) .containsInAnyOrder(yearTwoThousand, "0", "1234", Integer.toString(Integer.MAX_VALUE)); PAssert.that(timestampedVals) .containsInAnyOrder( KV.of("0", new Instant(0)), KV.of("1234", new Instant(1234L)), KV.of(Integer.toString(Integer.MAX_VALUE), new Instant(Integer.MAX_VALUE)), KV.of(yearTwoThousand, new Instant(Long.valueOf(yearTwoThousand)))); p.run(); }
public static Combine.CombineFn<Boolean, ?, Long> combineFn() { return new CountIfFn(); }
@Test public void testAddsInputToAccumulator() { Combine.CombineFn countIfFn = CountIf.combineFn(); long[] accumulator = (long[]) countIfFn.addInput(countIfFn.createAccumulator(), Boolean.TRUE); assertEquals(1L, accumulator[0]); }
public static String getDoneFileName(JobIndexInfo indexInfo) throws IOException { return getDoneFileName(indexInfo, JHAdminConfig.DEFAULT_MR_HS_JOBNAME_LIMIT); }
@Test public void testJobNamePercentEncoding() throws IOException { JobIndexInfo info = new JobIndexInfo(); JobID oldJobId = JobID.forName(JOB_ID); JobId jobId = TypeConverter.toYarn(oldJobId); info.setJobId(jobId); info.setSubmitTime(Long.parseLong(SUBMIT_TIME)); info.setUser(USER_NAME); info.setJobName(JOB_NAME_WITH_DELIMITER); info.setFinishTime(Long.parseLong(FINISH_TIME)); info.setNumMaps(Integer.parseInt(NUM_MAPS)); info.setNumReduces(Integer.parseInt(NUM_REDUCES)); info.setJobStatus(JOB_STATUS); info.setQueueName(QUEUE_NAME); info.setJobStartTime(Long.parseLong(JOB_START_TIME)); String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info); assertTrue(jobHistoryFile.contains(JOB_NAME_WITH_DELIMITER_ESCAPE), "Job name not encoded correctly into job history file"); }
@Override public void report(SortedMap<MetricName, Gauge> gauges, SortedMap<MetricName, Counter> counters, SortedMap<MetricName, Histogram> histograms, SortedMap<MetricName, Meter> meters, SortedMap<MetricName, Timer> timers) { final String dateTime = dateFormat.format(new Date(clock.getTime())); printWithBanner(dateTime, '='); output.println(); if (!gauges.isEmpty()) { printWithBanner("-- Gauges", '-'); for (Map.Entry<MetricName, Gauge> entry : gauges.entrySet()) { output.println(entry.getKey()); printGauge(entry); } output.println(); } if (!counters.isEmpty()) { printWithBanner("-- Counters", '-'); for (Map.Entry<MetricName, Counter> entry : counters.entrySet()) { output.println(entry.getKey()); printCounter(entry); } output.println(); } if (!histograms.isEmpty()) { printWithBanner("-- Histograms", '-'); for (Map.Entry<MetricName, Histogram> entry : histograms.entrySet()) { output.println(entry.getKey()); printHistogram(entry.getValue()); } output.println(); } if (!meters.isEmpty()) { printWithBanner("-- Meters", '-'); for (Map.Entry<MetricName, Meter> entry : meters.entrySet()) { output.println(entry.getKey()); printMeter(entry.getValue()); } output.println(); } if (!timers.isEmpty()) { printWithBanner("-- Timers", '-'); for (Map.Entry<MetricName, Timer> entry : timers.entrySet()) { output.println(entry.getKey()); printTimer(entry.getValue()); } output.println(); } output.println(); output.flush(); }
@Test public void reportsMeterValues() throws Exception { final Meter meter = mock(Meter.class); when(meter.getCount()).thenReturn(1L); when(meter.getMeanRate()).thenReturn(2.0); when(meter.getOneMinuteRate()).thenReturn(3.0); when(meter.getFiveMinuteRate()).thenReturn(4.0); when(meter.getFifteenMinuteRate()).thenReturn(5.0); reporter.report(this.map(), this.map(), this.map(), map("test.meter", meter), this.map()); assertThat(consoleOutput()) .isEqualTo(lines( "3/17/13, 6:04:36 PM ============================================================", "", "-- Meters ----------------------------------------------------------------------", "test.meter", " count = 1", " mean rate = 2.00 events/second", " 1-minute rate = 3.00 events/second", " 5-minute rate = 4.00 events/second", " 15-minute rate = 5.00 events/second", "", "" )); }
public void sendRequest(Request request) { Payload convert = GrpcUtils.convert(request); payloadStreamObserver.onNext(convert); }
@Test void testSendRequest() { connection.sendRequest(new HealthCheckRequest()); verify(payloadStreamObserver).onNext(any(Payload.class)); }
@Override public String getValue(EvaluationContext context) { // Use variable name if we just provide this. if (variableName != null && variable == null) { variable = context.lookupVariable(variableName); return (variable != null ? variable.toString() : ""); } String propertyName = pathExpression; String propertyPath = null; int delimiterIndex = -1; // Search for a delimiter to isolate property name. for (String delimiter : PROPERTY_NAME_DELIMITERS) { delimiterIndex = pathExpression.indexOf(delimiter); if (delimiterIndex != -1) { propertyName = pathExpression.substring(0, delimiterIndex); propertyPath = pathExpression.substring(delimiterIndex); break; } } Object variableValue = getProperty(variable, propertyName); if (log.isDebugEnabled()) { log.debug("propertyName: {}", propertyName); log.debug("propertyPath: {}", propertyPath); log.debug("variableValue: {}", variableValue); } if (propertyPath != null) { if (variableValue.getClass().equals(String.class)) { if (propertyPath.startsWith("/")) { // This is a JSON Pointer or XPath expression to apply. String variableString = String.valueOf(variableValue); if (variableString.trim().startsWith("{") || variableString.trim().startsWith("[")) { variableValue = getJsonPointerValue(variableString, propertyPath); } else if (variableString.trim().startsWith("<")) { variableValue = getXPathValue(variableString, propertyPath); } else { log.warn("Got a path query expression but content seems not to be JSON nor XML..."); variableValue = null; } } } else if (variableValue.getClass().isArray()) { if (propertyPath.matches(ARRAY_INDEX_REGEXP)) { Matcher m = ARRAY_INDEX_PATTERN.matcher(propertyPath); if (m.matches()) { String arrayIndex = m.group(1); Object[] variableValues = (Object[]) variableValue; try { variableValue = variableValues[Integer.parseInt(arrayIndex)]; } catch (ArrayIndexOutOfBoundsException ae) { log.warn("Expression asked for " + arrayIndex + " but array is smaller (" + variableValues.length + "). Returning null."); variableValue = null; } } } } else if (Map.class.isAssignableFrom(variableValue.getClass())) { if (propertyPath.matches(MAP_INDEX_REGEXP)) { Matcher m = MAP_INDEX_PATTERN.matcher(propertyPath); if (m.matches()) { String mapKey = m.group(1); Map variableValues = (Map) variableValue; variableValue = variableValues.get(mapKey); } } } } return String.valueOf(variableValue); }
@Test void testStringValue() { EvaluableRequest request = new EvaluableRequest("hello world", null); // Create new expression evaluating simple string value. VariableReferenceExpression exp = new VariableReferenceExpression(request, "body"); String result = exp.getValue(new EvaluationContext()); assertEquals("hello world", result); }
public final BarcodeParameters getParams() { return params; }
@Test final void testConstructorWithBarcodeFormat() throws IOException { try (BarcodeDataFormat barcodeDataFormat = new BarcodeDataFormat(BarcodeFormat.AZTEC)) { this.checkParams(BarcodeParameters.IMAGE_TYPE, BarcodeParameters.WIDTH, BarcodeParameters.HEIGHT, BarcodeFormat.AZTEC, barcodeDataFormat.getParams()); } }
@VisibleForTesting static List<Tuple2<ConfigGroup, String>> generateTablesForClass( Class<?> optionsClass, Collection<OptionWithMetaInfo> optionWithMetaInfos) { ConfigGroups configGroups = optionsClass.getAnnotation(ConfigGroups.class); List<OptionWithMetaInfo> allOptions = selectOptionsToDocument(optionWithMetaInfos); if (allOptions.isEmpty()) { return Collections.emptyList(); } List<Tuple2<ConfigGroup, String>> tables; if (configGroups != null) { tables = new ArrayList<>(configGroups.groups().length + 1); Tree tree = new Tree(configGroups.groups(), allOptions); for (ConfigGroup group : configGroups.groups()) { List<OptionWithMetaInfo> configOptions = tree.findConfigOptions(group); if (!configOptions.isEmpty()) { sortOptions(configOptions); tables.add(Tuple2.of(group, toHtmlTable(configOptions))); } } List<OptionWithMetaInfo> configOptions = tree.getDefaultOptions(); if (!configOptions.isEmpty()) { sortOptions(configOptions); tables.add(Tuple2.of(null, toHtmlTable(configOptions))); } } else { sortOptions(allOptions); tables = Collections.singletonList(Tuple2.of(null, toHtmlTable(allOptions))); } return tables; }
@Test void testConfigOptionExclusion() { final String expectedTable = "<table class=\"configuration table table-bordered\">\n" + " <thead>\n" + " <tr>\n" + " <th class=\"text-left\" style=\"width: 20%\">Key</th>\n" + " <th class=\"text-left\" style=\"width: 15%\">Default</th>\n" + " <th class=\"text-left\" style=\"width: 10%\">Type</th>\n" + " <th class=\"text-left\" style=\"width: 55%\">Description</th>\n" + " </tr>\n" + " </thead>\n" + " <tbody>\n" + " <tr>\n" + " <td><h5>first.option.a</h5></td>\n" + " <td style=\"word-wrap: break-word;\">2</td>\n" + " <td>Integer</td>\n" + " <td>This is example description for the first option.</td>\n" + " </tr>\n" + " </tbody>\n" + "</table>\n"; final String htmlTable = ConfigOptionsDocGenerator.generateTablesForClass( TestConfigGroupWithExclusion.class, ConfigurationOptionLocator.extractConfigOptions( TestConfigGroupWithExclusion.class)) .get(0) .f1; assertThat(htmlTable).isEqualTo(expectedTable); }
@Override public boolean shouldHandle(String key) { return super.shouldHandle(key) && key.startsWith(RouterConstant.ROUTER_KEY_PREFIX + POINT); }
@Test public void testShouldHandle() { Assert.assertTrue(handler.shouldHandle("servicecomb.routeRule.foo")); Assert.assertFalse(handler.shouldHandle("servicecomb.routeRule")); }
public void forEachInt(final ObjIntConsumer<? super K> action) { requireNonNull(action); final int missingValue = this.missingValue; final K[] keys = this.keys; final int[] values = this.values; @DoNotSub final int length = values.length; for (@DoNotSub int index = 0, remaining = size; remaining > 0 && index < length; index++) { final int oldValue = values[index]; if (missingValue != oldValue) { action.accept(keys[index], oldValue); --remaining; } } }
@Test void forEachIntThrowsNullPointerExceptionIfActionIsNull() { final Object2IntHashMap<String> map = new Object2IntHashMap<>(MISSING_VALUE); assertThrowsExactly(NullPointerException.class, () -> map.forEachInt(null)); }
public Collection<DevConsole> loadDevConsoles() { return loadDevConsoles(false); }
@Test public void testLoader() { DefaultDevConsolesLoader loader = new DefaultDevConsolesLoader(context); Collection<DevConsole> col = loader.loadDevConsoles(); Assertions.assertTrue(col.size() > 3); }
@POST @Path(KMSRESTConstants.KEYS_RESOURCE) @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8) @SuppressWarnings("unchecked") public Response createKey(Map jsonKey) throws Exception { try{ LOG.trace("Entering createKey Method."); KMSWebApp.getAdminCallsMeter().mark(); UserGroupInformation user = HttpUserGroupInformation.get(); final String name = (String) jsonKey.get(KMSRESTConstants.NAME_FIELD); checkNotEmpty(name, KMSRESTConstants.NAME_FIELD); assertAccess(KMSACLs.Type.CREATE, user, KMSOp.CREATE_KEY, name); String cipher = (String) jsonKey.get(KMSRESTConstants.CIPHER_FIELD); final String material; material = (String) jsonKey.get(KMSRESTConstants.MATERIAL_FIELD); int length = (jsonKey.containsKey(KMSRESTConstants.LENGTH_FIELD)) ? (Integer) jsonKey.get(KMSRESTConstants.LENGTH_FIELD) : 0; String description = (String) jsonKey.get(KMSRESTConstants.DESCRIPTION_FIELD); LOG.debug("Creating key with name {}, cipher being used{}, " + "length of key {}, description of key {}", name, cipher, length, description); Map<String, String> attributes = (Map<String, String>) jsonKey.get(KMSRESTConstants.ATTRIBUTES_FIELD); if (material != null) { assertAccess(KMSACLs.Type.SET_KEY_MATERIAL, user, KMSOp.CREATE_KEY, name); } final KeyProvider.Options options = new KeyProvider.Options( KMSWebApp.getConfiguration()); if (cipher != null) { options.setCipher(cipher); } if (length != 0) { options.setBitLength(length); } options.setDescription(description); options.setAttributes(attributes); KeyProvider.KeyVersion keyVersion = user.doAs( new PrivilegedExceptionAction<KeyVersion>() { @Override public KeyVersion run() throws Exception { KeyProvider.KeyVersion keyVersion = (material != null) ? provider.createKey(name, Base64.decodeBase64(material), options) : provider.createKey(name, options); provider.flush(); return keyVersion; } } ); kmsAudit.ok(user, KMSOp.CREATE_KEY, name, "UserProvidedMaterial:" + (material != null) + " Description:" + description); if (!KMSWebApp.getACLs().hasAccess(KMSACLs.Type.GET, user)) { keyVersion = removeKeyMaterial(keyVersion); } Map json = KMSUtil.toJSON(keyVersion); String requestURL = KMSMDCFilter.getURL(); int idx = requestURL.lastIndexOf(KMSRESTConstants.KEYS_RESOURCE); requestURL = requestURL.substring(0, idx); LOG.trace("Exiting createKey Method."); return Response.created(getKeyURI(KMSRESTConstants.SERVICE_VERSION, name)) .type(MediaType.APPLICATION_JSON) .header("Location", getKeyURI(requestURL, name)).entity(json).build(); } catch (Exception e) { LOG.debug("Exception in createKey.", e); throw e; } }
@Test public void testKMSBlackList() throws Exception { Configuration conf = new Configuration(); conf.set("hadoop.security.authentication", "kerberos"); File testDir = getTestDir(); conf = createBaseKMSConf(testDir, conf); conf.set("hadoop.kms.authentication.type", "kerberos"); conf.set("hadoop.kms.authentication.kerberos.keytab", keytab.getAbsolutePath()); conf.set("hadoop.kms.authentication.kerberos.principal", "HTTP/localhost"); conf.set("hadoop.kms.authentication.kerberos.name.rules", "DEFAULT"); for (KMSACLs.Type type : KMSACLs.Type.values()) { conf.set(type.getAclConfigKey(), " "); } conf.set(KMSACLs.Type.CREATE.getAclConfigKey(), "client,hdfs,otheradmin"); conf.set(KMSACLs.Type.GENERATE_EEK.getAclConfigKey(), "client,hdfs,otheradmin"); conf.set(KMSACLs.Type.DECRYPT_EEK.getAclConfigKey(), "client,hdfs,otheradmin"); conf.set(KMSACLs.Type.DECRYPT_EEK.getBlacklistConfigKey(), "hdfs,otheradmin"); conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "ck0.ALL", "*"); conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "ck1.ALL", "*"); writeConf(testDir, conf); runServer(null, null, testDir, new KMSCallable<Void>() { @Override public Void call() throws Exception { final Configuration conf = new Configuration(); conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128); final URI uri = createKMSUri(getKMSUrl()); doAs("client", new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { try { KeyProvider kp = createProvider(uri, conf); KeyProvider.KeyVersion kv = kp.createKey("ck0", new KeyProvider.Options(conf)); EncryptedKeyVersion eek = ((CryptoExtension)kp).generateEncryptedKey("ck0"); ((CryptoExtension)kp).decryptEncryptedKey(eek); Assert.assertNull(kv.getMaterial()); } catch (Exception ex) { Assert.fail(ex.getMessage()); } return null; } }); doAs("hdfs", new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { try { KeyProvider kp = createProvider(uri, conf); KeyProvider.KeyVersion kv = kp.createKey("ck1", new KeyProvider.Options(conf)); EncryptedKeyVersion eek = ((CryptoExtension)kp).generateEncryptedKey("ck1"); ((CryptoExtension)kp).decryptEncryptedKey(eek); Assert.fail("admin user must not be allowed to decrypt !!"); } catch (Exception ex) { } return null; } }); doAs("otheradmin", new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { try { KeyProvider kp = createProvider(uri, conf); KeyProvider.KeyVersion kv = kp.createKey("ck2", new KeyProvider.Options(conf)); EncryptedKeyVersion eek = ((CryptoExtension)kp).generateEncryptedKey("ck2"); ((CryptoExtension)kp).decryptEncryptedKey(eek); Assert.fail("admin user must not be allowed to decrypt !!"); } catch (Exception ex) { } return null; } }); return null; } }); }
@Override public AbstractUploadFilter filter(final Session<?> source, final Session<?> destination, final TransferAction action, final ProgressListener listener) { if(log.isDebugEnabled()) { log.debug(String.format("Filter transfer with action %s and options %s", action, options)); } final Symlink symlink = source.getFeature(Symlink.class); final UploadSymlinkResolver resolver = new UploadSymlinkResolver(symlink, roots); final Find find; final AttributesFinder attributes; if(roots.size() > 1 || roots.stream().filter(item -> item.remote.isDirectory()).findAny().isPresent()) { find = new CachingFindFeature(source, cache, source.getFeature(Find.class, new DefaultFindFeature(source))); attributes = new CachingAttributesFinderFeature(source, cache, source.getFeature(AttributesFinder.class, new DefaultAttributesFinderFeature(source))); } else { find = new CachingFindFeature(source, cache, source.getFeature(Find.class)); attributes = new CachingAttributesFinderFeature(source, cache, source.getFeature(AttributesFinder.class)); } if(log.isDebugEnabled()) { log.debug(String.format("Determined features %s and %s", find, attributes)); } if(action.equals(TransferAction.resume)) { return new ResumeFilter(resolver, source, options).withFinder(find).withAttributes(attributes); } if(action.equals(TransferAction.rename)) { return new RenameFilter(resolver, source, options).withFinder(find).withAttributes(attributes); } if(action.equals(TransferAction.renameexisting)) { return new RenameExistingFilter(resolver, source, options).withFinder(find).withAttributes(attributes); } if(action.equals(TransferAction.skip)) { return new SkipFilter(resolver, source, options).withFinder(find).withAttributes(attributes); } if(action.equals(TransferAction.comparison)) { return new CompareFilter(resolver, source, options, listener).withFinder(find).withAttributes(attributes); } return new OverwriteFilter(resolver, source, options).withFinder(find).withAttributes(attributes); }
@Test public void testTemporaryDisabledLargeUpload() throws Exception { final Host h = new Host(new TestProtocol()); final NullSession session = new NullSession(h); final AbstractUploadFilter f = new UploadTransfer(h, Collections.emptyList()) .filter(session, null, TransferAction.overwrite, new DisabledProgressListener()); final Path file = new Path("/t", EnumSet.of(Path.Type.file)); final TransferStatus status = f.prepare(file, new NullLocal("t"), new TransferStatus(), new DisabledProgressListener()); assertNull(status.getRename().local); assertNull(status.getRename().remote); }
@Override public void ensureCapacity(int capacity) { Preconditions.checkArgument(capacity <= _maxCapacity); if (capacity > _resultHolderCapacity) { int copyLength = _resultHolderCapacity; _resultHolderCapacity = Math.max(_resultHolderCapacity * 2, capacity); // Cap the growth to maximum possible number of group keys _resultHolderCapacity = Math.min(_resultHolderCapacity, _maxCapacity); double[] current = _resultArray; _resultArray = new double[_resultHolderCapacity]; System.arraycopy(current, 0, _resultArray, 0, copyLength); if (_defaultValue != 0.0) { Arrays.fill(_resultArray, copyLength, _resultHolderCapacity, _defaultValue); } } }
@Test void testEnsureCapacity() { GroupByResultHolder resultHolder = new DoubleGroupByResultHolder(INITIAL_CAPACITY, MAX_CAPACITY, DEFAULT_VALUE); for (int i = 0; i < INITIAL_CAPACITY; i++) { resultHolder.setValueForKey(i, _expected[i]); } resultHolder.ensureCapacity(MAX_CAPACITY); for (int i = INITIAL_CAPACITY; i < MAX_CAPACITY; i++) { double actual = resultHolder.getDoubleResult(i); Assert.assertEquals(actual, DEFAULT_VALUE, "Default Value mis-match: Actual: " + actual + " Expected: " + DEFAULT_VALUE + " Random seed: " + RANDOM_SEED); resultHolder.setValueForKey(i, _expected[i]); } testValues(resultHolder, _expected, 0, MAX_CAPACITY); }
public DataSchemaParser.ParseResult parseSources(String[] rawSources) throws IOException { Set<String> fileExtensions = _parserByFileExtension.keySet(); Map<String, List<String>> byExtension = new HashMap<>(fileExtensions.size()); for (String fileExtension : fileExtensions) { byExtension.put(fileExtension, new ArrayList<>()); } String[] sortedSources = Arrays.copyOf(rawSources, rawSources.length); Arrays.sort(sortedSources); // Extract all schema files from the given source paths and group by extension (JARs are handled specially) for (String source : sortedSources) { final File sourceFile = new File(source); if (sourceFile.exists()) { if (sourceFile.isDirectory()) { // Source path is a directory, so recursively find all schema files contained therein final FileExtensionFilter filter = new FileExtensionFilter(fileExtensions); final List<File> sourceFilesInDirectory = FileUtil.listFiles(sourceFile, filter); // Add each schema to the corresponding extension's source list for (File f : sourceFilesInDirectory) { String ext = FilenameUtils.getExtension(f.getName()); List<String> filesForExtension = byExtension.get(ext); if (filesForExtension != null) { filesForExtension.add(f.getAbsolutePath()); } } } else if (sourceFile.getName().endsWith(".jar")) { // Source path is a JAR, so add it to each extension's source list. // The file-based parser for each extension will extract the JAR and process only files matching the extension byExtension.values().forEach(files -> files.add(sourceFile.getAbsolutePath())); } else { // Source path is a non-JAR file, so add it to the corresponding extension's source list String ext = FilenameUtils.getExtension(sourceFile.getName()); List<String> filesForExtension = byExtension.get(ext); if (filesForExtension != null) { filesForExtension.add(sourceFile.getAbsolutePath()); } } } } // Parse all schema files and JARs using the appropriate file format parser final ParseResult result = new ParseResult(); for (Map.Entry<String, List<String>> entry : byExtension.entrySet()) { String ext = entry.getKey(); List<String> files = entry.getValue(); _parserByFileExtension.get(ext).parseSources(files.toArray(new String[files.size()]), result); } return result; }
@Test(dataProvider = "entityRelationshipInputFiles") public void testSchemaFilesInExtensionPathInFolder(String[] files, String[] expectedExtensions) throws Exception { String pegasusWithFS = TEST_RESOURCES_DIR + FS; String resolverPath = pegasusWithFS + "extensionSchemas/extensions:" + pegasusWithFS + "extensionSchemas/others:" + pegasusWithFS + "extensionSchemas/pegasus"; List<SchemaDirectory> resolverDirectories = Arrays.asList( SchemaDirectoryName.EXTENSIONS, SchemaDirectoryName.PEGASUS); List<SchemaDirectory> sourceDirectories = Collections.singletonList(SchemaDirectoryName.EXTENSIONS); DataSchemaParser parser = new DataSchemaParser.Builder(resolverPath) .setResolverDirectories(resolverDirectories) .setSourceDirectories(sourceDirectories) .build(); String[] schemaFiles = Arrays.stream(files).map(casename -> TEST_RESOURCES_DIR + FS + "extensionSchemas" + FS + casename).toArray(String[]::new); DataSchemaParser.ParseResult parseResult = parser.parseSources(schemaFiles); Map<DataSchema, DataSchemaLocation> extensions = parseResult.getExtensionDataSchemaAndLocations(); assertEquals(extensions.size(), expectedExtensions.length); Set<String> actualNames = extensions .keySet() .stream() .map(dataSchema -> (NamedDataSchema) dataSchema) .map(NamedDataSchema::getName) .collect(Collectors.toSet()); assertEquals(actualNames, Arrays.stream(expectedExtensions).collect(Collectors.toSet())); }
@VisibleForTesting public static JobGraph createJobGraph(StreamGraph streamGraph) { return new StreamingJobGraphGenerator( Thread.currentThread().getContextClassLoader(), streamGraph, null, Runnable::run) .createJobGraph(); }
@Test void testSlotSharingOnAllVerticesInSameSlotSharingGroupByDefaultEnabled() { final StreamGraph streamGraph = createStreamGraphForSlotSharingTest(new Configuration()); // specify slot sharing group for map1 streamGraph.getStreamNodes().stream() .filter(n -> "map1".equals(n.getOperatorName())) .findFirst() .get() .setSlotSharingGroup("testSlotSharingGroup"); streamGraph.setAllVerticesInSameSlotSharingGroupByDefault(true); final JobGraph jobGraph = StreamingJobGraphGenerator.createJobGraph(streamGraph); final List<JobVertex> verticesSorted = jobGraph.getVerticesSortedTopologicallyFromSources(); assertThat(verticesSorted).hasSize(4); final List<JobVertex> verticesMatched = getExpectedVerticesList(verticesSorted); final JobVertex source1Vertex = verticesMatched.get(0); final JobVertex source2Vertex = verticesMatched.get(1); final JobVertex map1Vertex = verticesMatched.get(2); final JobVertex map2Vertex = verticesMatched.get(3); // all vertices should be in the same default slot sharing group // except for map1 which has a specified slot sharing group assertSameSlotSharingGroup(source1Vertex, source2Vertex, map2Vertex); assertDistinctSharingGroups(source1Vertex, map1Vertex); }
public Kind kind() { return kind; }
@Test void unsetKind() { MutableSpan span = new MutableSpan(); span.kind(Span.Kind.CLIENT); span.kind(null); assertThat(span.kind()).isNull(); }
public URI getServerAddress() { return serverAddresses.get(0); }
@Test public void shouldParseSingleServerAddress() throws Exception { // Given: final String singleServerAddress = SOME_SERVER_ADDRESS; final URI singleServerURI = new URI(singleServerAddress); // When: try (KsqlRestClient ksqlRestClient = clientWithServerAddresses(singleServerAddress)) { // Then: assertThat(ksqlRestClient.getServerAddress(), is(singleServerURI)); } }
@VisibleForTesting static boolean isMultiLanguagePipeline(Pipeline pipeline) { class IsMultiLanguageVisitor extends PipelineVisitor.Defaults { private boolean isMultiLanguage = false; private void performMultiLanguageTest(Node node) { if (node.getTransform() instanceof External.ExpandableTransform) { isMultiLanguage = true; } } @Override public CompositeBehavior enterCompositeTransform(Node node) { performMultiLanguageTest(node); return super.enterCompositeTransform(node); } @Override public void visitPrimitiveTransform(Node node) { performMultiLanguageTest(node); super.visitPrimitiveTransform(node); } } IsMultiLanguageVisitor visitor = new IsMultiLanguageVisitor(); pipeline.traverseTopologically(visitor); return visitor.isMultiLanguage; }
@Test public void testIsMultiLanguage() throws IOException { PipelineOptions options = buildPipelineOptions(); Pipeline pipeline = Pipeline.create(options); PCollection<String> col = pipeline .apply(Create.of("1", "2", "3")) .apply( External.of( "dummy_urn", new byte[] {}, "", new TestExpansionServiceClientFactory())); assertTrue(DataflowRunner.isMultiLanguagePipeline(pipeline)); }
@VisibleForTesting static UIntersectionType create(UExpression... bounds) { return create(ImmutableList.copyOf(bounds)); }
@Test public void serialization() { SerializableTester.reserializeAndAssert( UIntersectionType.create( UClassIdent.create("java.lang.CharSequence"), UClassIdent.create("java.io.Serializable"))); }
@Override public void execute(Runnable r) { runQueue.add(r); schedule(r); }
@Test void testNonSerial() { int total = 10; Map<String, Integer> map = new HashMap<>(); map.put("val", 0); Semaphore semaphore = new Semaphore(1); CountDownLatch startLatch = new CountDownLatch(1); AtomicBoolean failed = new AtomicBoolean(false); for (int i = 0; i < total; i++) { final int index = i; service.execute(() -> { if (!semaphore.tryAcquire()) { failed.set(true); } try { startLatch.await(); } catch (InterruptedException e) { throw new RuntimeException(e); } int num = map.get("val"); map.put("val", num + 1); if (num != index) { failed.set(true); } semaphore.release(); }); } await().until(() -> ((ThreadPoolExecutor) service).getActiveCount() == 4); startLatch.countDown(); await().until(() -> ((ThreadPoolExecutor) service).getCompletedTaskCount() == total); Assertions.assertTrue(failed.get()); }
public String getParentArtifactId() { return parentArtifactId; }
@Test public void testGetParentArtifactId() { Model instance = new Model(); instance.setParentArtifactId(""); String expResult = ""; String result = instance.getParentArtifactId(); assertEquals(expResult, result); }
@Override protected Map<String, ByteBuffer> onLeaderElected(String leaderId, String assignmentStrategy, List<JoinGroupResponseData.JoinGroupResponseMember> allSubscriptions, boolean skipAssignment) { ConsumerPartitionAssignor assignor = lookupAssignor(assignmentStrategy); if (assignor == null) throw new IllegalStateException("Coordinator selected invalid assignment protocol: " + assignmentStrategy); String assignorName = assignor.name(); Set<String> allSubscribedTopics = new HashSet<>(); Map<String, Subscription> subscriptions = new HashMap<>(); // collect all the owned partitions Map<String, List<TopicPartition>> ownedPartitions = new HashMap<>(); for (JoinGroupResponseData.JoinGroupResponseMember memberSubscription : allSubscriptions) { Subscription subscription = ConsumerProtocol.deserializeSubscription(ByteBuffer.wrap(memberSubscription.metadata())); subscription.setGroupInstanceId(Optional.ofNullable(memberSubscription.groupInstanceId())); subscriptions.put(memberSubscription.memberId(), subscription); allSubscribedTopics.addAll(subscription.topics()); ownedPartitions.put(memberSubscription.memberId(), subscription.ownedPartitions()); } // the leader will begin watching for changes to any of the topics the group is interested in, // which ensures that all metadata changes will eventually be seen updateGroupSubscription(allSubscribedTopics); isLeader = true; if (skipAssignment) { log.info("Skipped assignment for returning static leader at generation {}. The static leader " + "will continue with its existing assignment.", generation().generationId); assignmentSnapshot = metadataSnapshot; return Collections.emptyMap(); } log.debug("Performing assignment using strategy {} with subscriptions {}", assignorName, subscriptions); Map<String, Assignment> assignments = assignor.assign(metadata.fetch(), new GroupSubscription(subscriptions)).groupAssignment(); // skip the validation for built-in cooperative sticky assignor since we've considered // the "generation" of ownedPartition inside the assignor if (protocol == RebalanceProtocol.COOPERATIVE && !assignorName.equals(COOPERATIVE_STICKY_ASSIGNOR_NAME)) { validateCooperativeAssignment(ownedPartitions, assignments); } maybeUpdateGroupSubscription(assignorName, assignments, allSubscribedTopics); // metadataSnapshot could be updated when the subscription is updated therefore // we must take the assignment snapshot after. assignmentSnapshot = metadataSnapshot; log.info("Finished assignment for group at generation {}: {}", generation().generationId, assignments); Map<String, ByteBuffer> groupAssignment = new HashMap<>(); for (Map.Entry<String, Assignment> assignmentEntry : assignments.entrySet()) { ByteBuffer buffer = ConsumerProtocol.serializeAssignment(assignmentEntry.getValue()); groupAssignment.put(assignmentEntry.getKey(), buffer); } return groupAssignment; }
@Test public void testPerformAssignmentShouldSkipValidateCooperativeAssignmentForBuiltInCooperativeStickyAssignor() { SubscriptionState mockSubscriptionState = Mockito.mock(SubscriptionState.class); List<JoinGroupResponseData.JoinGroupResponseMember> metadata = validateCooperativeAssignmentTestSetup(); List<ConsumerPartitionAssignor> assignorsWithCooperativeStickyAssignor = new ArrayList<>(assignors); // create a mockPartitionAssignor with the same name as cooperative sticky assignor MockPartitionAssignor mockCooperativeStickyAssignor = new MockPartitionAssignor(Collections.singletonList(protocol)) { @Override public String name() { return COOPERATIVE_STICKY_ASSIGNOR_NAME; } }; assignorsWithCooperativeStickyAssignor.add(mockCooperativeStickyAssignor); // simulate the cooperative sticky assignor do the assignment with out-of-date ownedPartition Map<String, List<TopicPartition>> assignment = new HashMap<>(); assignment.put(consumerId, singletonList(t1p)); assignment.put(consumerId2, singletonList(t2p)); mockCooperativeStickyAssignor.prepare(assignment); try (ConsumerCoordinator coordinator = buildCoordinator(rebalanceConfig, new Metrics(), assignorsWithCooperativeStickyAssignor, false, mockSubscriptionState)) { // should not validate assignment for built-in cooperative sticky assignor coordinator.onLeaderElected("1", mockCooperativeStickyAssignor.name(), metadata, false); } }
@Override public boolean find(final Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return true; } try { if(containerService.isContainer(file)) { try { if(log.isDebugEnabled()) { log.debug(String.format("Test if bucket %s is accessible", file)); } return session.getClient().isBucketAccessible(containerService.getContainer(file).getName()); } catch(ServiceException e) { throw new S3ExceptionMappingService().map("Failure to read attributes of {0}", e, file); } } if(file.isFile() || file.isPlaceholder()) { attributes.find(file, listener); return true; } else { if(log.isDebugEnabled()) { log.debug(String.format("Search for common prefix %s", file)); } // Check for common prefix try { new S3ObjectListService(session, acl).list(file, new CancellingListProgressListener(), String.valueOf(Path.DELIMITER), 1); return true; } catch(ListCanceledException l) { // Found common prefix return true; } catch(NotfoundException e) { throw e; } } } catch(NotfoundException e) { return false; } catch(RetriableAccessDeniedException e) { // Must fail with server error throw e; } catch(AccessDeniedException e) { // Object is inaccessible to current user, but does exist. return true; } }
@Test public void testFindCommonPrefix() throws Exception { final Path container = new Path("test-eu-central-1-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); assertTrue(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(container)); final String prefix = new AlphanumericRandomStringService().random(); final Path test = new S3TouchFeature(session, new S3AccessControlListFeature(session)).touch( new Path(new Path(container, prefix, EnumSet.of(Path.Type.directory)), new AsciiRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); assertTrue(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(test)); assertFalse(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(new Path(test.getAbsolute(), EnumSet.of(Path.Type.directory)))); assertTrue(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(new Path(container, prefix, EnumSet.of(Path.Type.directory)))); assertTrue(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(new Path(container, prefix, EnumSet.of(Path.Type.directory, Path.Type.placeholder)))); assertTrue(new S3ObjectListService(session, new S3AccessControlListFeature(session)).list(new Path(container, prefix, EnumSet.of(Path.Type.directory)), new DisabledListProgressListener()).contains(test)); new S3DefaultDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); assertFalse(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(test)); assertFalse(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(new Path(container, prefix, EnumSet.of(Path.Type.directory)))); final PathCache cache = new PathCache(1); final Path directory = new Path(container, prefix, EnumSet.of(Path.Type.directory, Path.Type.placeholder)); assertFalse(new CachingFindFeature(session, cache, new S3FindFeature(session, new S3AccessControlListFeature(session))).find(directory)); assertFalse(cache.isCached(directory)); assertFalse(new S3FindFeature(session, new S3AccessControlListFeature(session)).find(new Path(container, prefix, EnumSet.of(Path.Type.directory, Path.Type.placeholder)))); }
@Override public int getMaxSchemaNameLength() { return 0; }
@Test void assertGetMaxSchemaNameLength() { assertThat(metaData.getMaxSchemaNameLength(), is(0)); }
void decode(int streamId, ByteBuf in, Http2Headers headers, boolean validateHeaders) throws Http2Exception { Http2HeadersSink sink = new Http2HeadersSink( streamId, headers, maxHeaderListSize, validateHeaders); // Check for dynamic table size updates, which must occur at the beginning: // https://www.rfc-editor.org/rfc/rfc7541.html#section-4.2 decodeDynamicTableSizeUpdates(in); decode(in, sink); // Now that we've read all of our headers we can perform the validation steps. We must // delay throwing until this point to prevent dynamic table corruption. sink.finish(); }
@Test public void testLiteralHuffmanEncodedWithPaddingGreaterThan7Throws() throws Http2Exception { byte[] input = {0, (byte) 0x81, -1}; final ByteBuf in = Unpooled.wrappedBuffer(input); try { assertThrows(Http2Exception.class, new Executable() { @Override public void execute() throws Throwable { hpackDecoder.decode(0, in, mockHeaders, true); } }); } finally { in.release(); } }
@Override public PageData<Asset> findAssetsByTenantIdAndType(UUID tenantId, String type, PageLink pageLink) { return DaoUtil.toPageData(assetRepository .findByTenantIdAndType( tenantId, type, pageLink.getTextSearch(), DaoUtil.toPageable(pageLink))); }
@Test public void testFindAssetsByTenantIdAndType() { String type = "TYPE_2"; String testLabel = "test_label"; assets.add(saveAsset(Uuids.timeBased(), tenantId2, customerId2, "TEST_ASSET", type, testLabel)); List<Asset> foundedAssetsByType = assetDao .findAssetsByTenantIdAndType(tenantId2, type, new PageLink(3)).getData(); compareFoundedAssetByType(foundedAssetsByType, type); List<Asset> foundedAssetsByTypeAndLabel = assetDao .findAssetsByTenantIdAndType(tenantId2, type, new PageLink(3, 0, testLabel)).getData(); assertEquals(1, foundedAssetsByTypeAndLabel.size()); }
@Override public FSDataOutputStream create(Path path, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) throws IOException { String confUmask = mAlluxioConf.getString(PropertyKey.SECURITY_AUTHORIZATION_PERMISSION_UMASK); Mode mode = ModeUtils.applyFileUMask(Mode.defaults(), confUmask); return this.create(path, new FsPermission(mode.toShort()), overwrite, bufferSize, replication, blockSize, progress); }
@Test public void hadoopShouldLoadFileSystemWhenConfigured() throws Exception { org.apache.hadoop.conf.Configuration conf = getConf(); URI uri = URI.create(Constants.HEADER + "localhost:19998/tmp/path.txt"); Map<PropertyKey, Object> properties = new HashMap<>(); properties.put(PropertyKey.MASTER_HOSTNAME, uri.getHost()); properties.put(PropertyKey.MASTER_RPC_PORT, uri.getPort()); properties.put(PropertyKey.ZOOKEEPER_ENABLED, false); properties.put(PropertyKey.ZOOKEEPER_ADDRESS, null); try (Closeable c = new ConfigurationRule(properties, mConfiguration).toResource()) { final org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.get(uri, conf); assertTrue(fs instanceof FileSystem); } }
public static WindowBytesStoreSupplier persistentWindowStore(final String name, final Duration retentionPeriod, final Duration windowSize, final boolean retainDuplicates) throws IllegalArgumentException { return persistentWindowStore(name, retentionPeriod, windowSize, retainDuplicates, false); }
@Test public void shouldThrowIfIPersistentWindowStoreStoreNameIsNull() { final Exception e = assertThrows(NullPointerException.class, () -> Stores.persistentWindowStore(null, ZERO, ZERO, false)); assertEquals("name cannot be null", e.getMessage()); }
@Override public InputChannel getChannel(int channelIndex) { int gateIndex = inputChannelToInputGateIndex[channelIndex]; return inputGatesByGateIndex .get(gateIndex) .getChannel(channelIndex - inputGateChannelIndexOffsets[gateIndex]); }
@Test void testGetChannelWithShiftedGateIndexes() { gateIndex = 2; final SingleInputGate inputGate1 = createInputGate(1); TestInputChannel inputChannel1 = new TestInputChannel(inputGate1, 0); inputGate1.setInputChannels(inputChannel1); final SingleInputGate inputGate2 = createInputGate(1); TestInputChannel inputChannel2 = new TestInputChannel(inputGate2, 0); inputGate2.setInputChannels(inputChannel2); UnionInputGate unionInputGate = new UnionInputGate(inputGate1, inputGate2); assertThat(unionInputGate.getChannel(0)).isEqualTo(inputChannel1); // Check that updated input channel is visible via UnionInputGate assertThat(unionInputGate.getChannel(1)).isEqualTo(inputChannel2); }
@SuppressWarnings("unchecked") public <IN, OUT> AvroDatumConverter<IN, OUT> create(Class<IN> inputClass) { boolean isMapOnly = ((JobConf) getConf()).getNumReduceTasks() == 0; if (AvroKey.class.isAssignableFrom(inputClass)) { Schema schema; if (isMapOnly) { schema = AvroJob.getMapOutputKeySchema(getConf()); if (null == schema) { schema = AvroJob.getOutputKeySchema(getConf()); } } else { schema = AvroJob.getOutputKeySchema(getConf()); } if (null == schema) { throw new IllegalStateException("Writer schema for output key was not set. Use AvroJob.setOutputKeySchema()."); } return (AvroDatumConverter<IN, OUT>) new AvroWrapperConverter(schema); } if (AvroValue.class.isAssignableFrom(inputClass)) { Schema schema; if (isMapOnly) { schema = AvroJob.getMapOutputValueSchema(getConf()); if (null == schema) { schema = AvroJob.getOutputValueSchema(getConf()); } } else { schema = AvroJob.getOutputValueSchema(getConf()); } if (null == schema) { throw new IllegalStateException( "Writer schema for output value was not set. Use AvroJob.setOutputValueSchema()."); } return (AvroDatumConverter<IN, OUT>) new AvroWrapperConverter(schema); } if (BooleanWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new BooleanWritableConverter(); } if (BytesWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new BytesWritableConverter(); } if (ByteWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new ByteWritableConverter(); } if (DoubleWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new DoubleWritableConverter(); } if (FloatWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new FloatWritableConverter(); } if (IntWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new IntWritableConverter(); } if (LongWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new LongWritableConverter(); } if (NullWritable.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new NullWritableConverter(); } if (Text.class.isAssignableFrom(inputClass)) { return (AvroDatumConverter<IN, OUT>) new TextConverter(); } throw new UnsupportedOperationException("Unsupported input type: " + inputClass.getName()); }
@Test void convertDoubleWritable() { AvroDatumConverter<DoubleWritable, Double> converter = mFactory.create(DoubleWritable.class); assertEquals(2.0, converter.convert(new DoubleWritable(2.0)), 0.00001); }
@Override public boolean accept(RequestedField field) { return ID_FIELDS.contains(field.name()) && field.hasDecorator("id"); }
@Test void accept() { final FieldDecorator decorator = new IdDecorator(); Assertions.assertThat(decorator.accept(RequestedField.parse("streams.id"))).isTrue(); Assertions.assertThat(decorator.accept(RequestedField.parse("gl2_source_input.id"))).isTrue(); Assertions.assertThat(decorator.accept(RequestedField.parse("gl2_source_node.id"))).isTrue(); // default is to decorate as title, we don't want IDs Assertions.assertThat(decorator.accept(RequestedField.parse("gl2_source_node"))).isFalse(); // unsupported fields are also ignored Assertions.assertThat(decorator.accept(RequestedField.parse("http_response_code"))).isFalse(); }
@Override public String getSessionId() { return sessionID; }
@Test public void testEditConfigRequestWithOnlyNewConfigurationWithChunkedFraming() { log.info("Starting edit-config async"); assertNotNull("Incorrect sessionId", session3.getSessionId()); try { assertTrue("NETCONF edit-config command failed", session3.editConfig(EDIT_CONFIG_REQUEST)); } catch (NetconfException e) { e.printStackTrace(); fail("NETCONF edit-config test failed: " + e.getMessage()); } log.info("Finishing edit-config async"); }
public static <T> CompletableFuture<T> run(Callable<T> callable) { CompletableFuture<T> result = new CompletableFuture<>(); CompletableFuture.runAsync( () -> { // we need to explicitly catch any exceptions, // otherwise they will be silently discarded try { result.complete(callable.call()); } catch (Throwable e) { result.completeExceptionally(e); } }, executor); return result; }
@Test public void testRun() throws Exception { assertEquals("", Async.run(() -> "").get()); }
@Override public int launch(AgentLaunchDescriptor descriptor) { LogConfigurator logConfigurator = new LogConfigurator("agent-launcher-logback.xml"); return logConfigurator.runWithLogger(() -> doLaunch(descriptor)); }
@Test public void shouldDownloadLauncherJarIfLocalCopyIsStale() throws IOException { //because new invocation will take care of pulling latest agent down, and will then operate on it with the latest launcher -jj File staleJar = randomFile(AGENT_LAUNCHER_JAR); long original = staleJar.length(); new AgentLauncherImpl().launch(launchDescriptor()); assertThat(staleJar.length(), not(original)); }
@Override public void remove(NamedNode master) { connection.sync(RedisCommands.SENTINEL_REMOVE, master.getName()); }
@Test public void testRemove() { Collection<RedisServer> masters = connection.masters(); connection.remove(masters.iterator().next()); }
public static String parseBoundaryFromHeader(String contentTypeHeader) { if (contentTypeHeader == null) { return null; } final Matcher matcher = BOUNDARY_FROM_HEADER_REGEXP.matcher(contentTypeHeader); return matcher.find() ? matcher.group(1) : null; }
@Test public void testParseBoundaryFromHeader() { assertNull(MultipartUtils.parseBoundaryFromHeader(null)); assertEquals("0aA'()+_,-./:=?", MultipartUtils.parseBoundaryFromHeader("multipart/subtype; boundary=\"0aA'()+_,-./:=?\"")); assertEquals("0aA'()+_, -./:=?", MultipartUtils.parseBoundaryFromHeader("multipart/subtype; boundary=\"0aA'()+_, -./:=?\"")); assertEquals("0aA'()+_, -./:=?", MultipartUtils.parseBoundaryFromHeader("multipart/subtype; boundary=\"0aA'()+_, -./:=? \"")); assertEquals("0aA'()+_,-./:=?", MultipartUtils.parseBoundaryFromHeader("multipart/subtype; boundary=0aA'()+_,-./:=?")); assertEquals("0aA'()+_, -./:=?", MultipartUtils.parseBoundaryFromHeader("multipart/subtype; boundary=0aA'()+_, -./:=?")); assertEquals("0aA'()+_, -./:=?", MultipartUtils.parseBoundaryFromHeader("multipart/subtype; boundary=0aA'()+_, -./:=? ")); assertEquals(" 0aA'()+_, -./:=?", MultipartUtils.parseBoundaryFromHeader("multipart/subtype; boundary= 0aA'()+_, -./:=?")); assertNull(MultipartUtils.parseBoundaryFromHeader("multipart/subtype; boundar=0aA'()+_, -./:=? ")); assertNull(MultipartUtils.parseBoundaryFromHeader("multipart/subtype; ")); assertNull(MultipartUtils.parseBoundaryFromHeader("multipart/subtype;")); assertNull(MultipartUtils.parseBoundaryFromHeader("multipart/subtype")); assertNull(MultipartUtils.parseBoundaryFromHeader("multipart/subtype; boundary=")); assertEquals("0aA'()+_,", MultipartUtils.parseBoundaryFromHeader("multipart/subtype; boundary=0aA'()+_,; -./:=? ")); assertEquals("0aA'()+_, -./:=?", MultipartUtils.parseBoundaryFromHeader("multipart/subtype; boundary=\"0aA'()+_, -./:=?")); assertEquals("0aA'()+_, -./:=?", MultipartUtils.parseBoundaryFromHeader("multipart/subtype; boundary=0aA'()+_, -./:=?\"")); assertEquals("1234567890123456789012345678901234567890123456789012345678901234567890", MultipartUtils.parseBoundaryFromHeader("multipart/subtype; " + "boundary=1234567890123456789012345678901234567890123456789012345678901234567890")); assertEquals("1234567890123456789012345678901234567890123456789012345678901234567890", MultipartUtils.parseBoundaryFromHeader("multipart/subtype; " + "boundary=12345678901234567890123456789012345678901234567890123456789012345678901")); assertNull(MultipartUtils.parseBoundaryFromHeader("multipart/subtype; boundary=")); assertNull(MultipartUtils.parseBoundaryFromHeader("multipart/subtype; boundary=\"\"")); assertNull(MultipartUtils.parseBoundaryFromHeader("multipart/subtype; boundary=;123")); assertNull(MultipartUtils.parseBoundaryFromHeader("multipart/subtype; boundary=\"\"123")); }
public static <FnT extends DoFn<?, ?>> DoFnSignature getSignature(Class<FnT> fn) { return signatureCache.computeIfAbsent(fn, DoFnSignatures::parseSignature); }
@Test public void testPipelineOptionsParameter() throws Exception { DoFnSignature sig = DoFnSignatures.getSignature( new DoFn<String, String>() { @ProcessElement public void process(ProcessContext c, PipelineOptions options) {} }.getClass()); assertThat( sig.processElement().extraParameters(), Matchers.hasItem(instanceOf(Parameter.PipelineOptionsParameter.class))); }
@Override public void execute(List<RegisteredMigrationStep> steps, MigrationStatusListener listener) { Profiler globalProfiler = Profiler.create(LOGGER); globalProfiler.startInfo(GLOBAL_START_MESSAGE, databaseMigrationState.getTotalMigrations()); boolean allStepsExecuted = false; try { for (RegisteredMigrationStep step : steps) { this.execute(step); listener.onMigrationStepCompleted(); } allStepsExecuted = true; } finally { long dbMigrationDuration = 0L; if (allStepsExecuted) { dbMigrationDuration = globalProfiler.stopInfo(GLOBAL_END_MESSAGE, databaseMigrationState.getCompletedMigrations(), databaseMigrationState.getTotalMigrations(), "success"); } else { dbMigrationDuration = globalProfiler.stopError(GLOBAL_END_MESSAGE, databaseMigrationState.getCompletedMigrations(), databaseMigrationState.getTotalMigrations(), "failure"); } telemetryDbMigrationTotalTimeProvider.setDbMigrationTotalTime(dbMigrationDuration); telemetryDbMigrationStepsProvider.setDbMigrationCompletedSteps(databaseMigrationState.getCompletedMigrations()); telemetryDbMigrationSuccessProvider.setDbMigrationSuccess(allStepsExecuted); } }
@Test void execute_throws_MigrationStepExecutionException_on_first_failing_step_execution_throws_any_exception() { migrationContainer.add(MigrationStep2.class, RuntimeExceptionFailingMigrationStep.class, MigrationStep3.class); List<RegisteredMigrationStep> steps = asList( registeredStepOf(1, MigrationStep2.class), registeredStepOf(2, RuntimeExceptionFailingMigrationStep.class), registeredStepOf(3, MigrationStep3.class)); ((SpringComponentContainer) migrationContainer).startComponents(); try { underTest.execute(steps, migrationStatusListener); fail("should throw MigrationStepExecutionException"); } catch (MigrationStepExecutionException e) { assertThat(e).hasMessage("Execution of migration step #2 '2-RuntimeExceptionFailingMigrationStep' failed"); assertThat(e.getCause()).isSameAs(RuntimeExceptionFailingMigrationStep.THROWN_EXCEPTION); verify(migrationStatusListener, times(1)).onMigrationStepCompleted(); } }