focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@SuppressWarnings("unchecked") @Override public String doSharding(final Collection<String> availableTargetNames, final PreciseShardingValue<Comparable<?>> shardingValue) { return standardShardingAlgorithm.doSharding(availableTargetNames, shardingValue); }
@Test void assertComplexKeysDoSharding() { ClassBasedShardingAlgorithm algorithm = (ClassBasedShardingAlgorithm) TypedSPILoader.getService(ShardingAlgorithm.class, "CLASS_BASED", PropertiesBuilder.build(new Property("strategy", "complex"), new Property("algorithmClassName", ClassBasedComplexKeysShardingAlgorithmFixture.class.getName()))); Collection<String> availableTargetNames = Arrays.asList("t_order_0", "t_order_1", "t_order_2", "t_order_3"); Collection<String> actual = algorithm.doSharding(availableTargetNames, new ComplexKeysShardingValue<>("t_order", null, null)); assertThat(actual.size(), is(4)); }
@Udf public Long round(@UdfParameter final long val) { return val; }
@Test public void shouldHandleDoubleLiteralsEndingWith5ThatCannotBeRepresentedExactlyAsDoubles() { assertThat(udf.round(new BigDecimal("265.335"), 2), is(new BigDecimal("265.340"))); assertThat(udf.round(new BigDecimal("-265.335"), 2), is(new BigDecimal("-265.330"))); assertThat(udf.round(new BigDecimal("265.365"), 2), is(new BigDecimal("265.370"))); assertThat(udf.round(new BigDecimal("-265.365"), 2), is(new BigDecimal("-265.360"))); }
@Operation(summary = "onlineCreateResource", description = "ONLINE_CREATE_RESOURCE_NOTES") @Parameters({ @Parameter(name = "type", description = "RESOURCE_TYPE", required = true, schema = @Schema(implementation = ResourceType.class)), @Parameter(name = "fileName", description = "RESOURCE_NAME", required = true, schema = @Schema(implementation = String.class)), @Parameter(name = "suffix", description = "SUFFIX", required = true, schema = @Schema(implementation = String.class)), @Parameter(name = "description", description = "RESOURCE_DESC", schema = @Schema(implementation = String.class)), @Parameter(name = "content", description = "CONTENT", required = true, schema = @Schema(implementation = String.class)), @Parameter(name = "currentDir", description = "RESOURCE_CURRENTDIR", required = true, schema = @Schema(implementation = String.class)) }) @PostMapping(value = "/online-create") @ApiException(CREATE_RESOURCE_FILE_ON_LINE_ERROR) public Result onlineCreateResource(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser, @RequestParam(value = "type") ResourceType type, @RequestParam(value = "fileName") String fileName, @RequestParam(value = "suffix") String fileSuffix, @RequestParam(value = "content") String content, @RequestParam(value = "currentDir") String currentDir) { if (StringUtils.isEmpty(content)) { log.error("resource file contents are not allowed to be empty"); return error(RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg()); } return resourceService.onlineCreateResource(loginUser, type, fileName, fileSuffix, content, currentDir); }
@Test public void testOnlineCreateResource() throws Exception { Result mockResult = new Result<>(); mockResult.setCode(Status.TENANT_NOT_EXIST.getCode()); Mockito.when(resourcesService .onlineCreateResource(Mockito.any(), Mockito.any(), Mockito.anyString(), Mockito.anyString(), Mockito.anyString(), Mockito.anyString())) .thenReturn(mockResult); MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); paramsMap.add("type", String.valueOf(ResourceType.FILE)); paramsMap.add("fileName", "test_file_1"); paramsMap.add("suffix", "sh"); paramsMap.add("description", "test"); paramsMap.add("content", "echo 1111"); paramsMap.add("pid", "123"); paramsMap.add("currentDir", "/xx"); MvcResult mvcResult = mockMvc.perform(post("/resources/online-create") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); Assertions.assertEquals(Status.TENANT_NOT_EXIST.getCode(), result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); }
static void parseMethodInfo(Map<String, Object> methodParameters, String method, String valueStr) { int idxSplit = valueStr.indexOf('#'); // "#"被认为是MethodSpecial配置的标志 if (idxSplit < 0) { return; } int idxLeft = valueStr.indexOf('['); int idxRight = valueStr.indexOf(']'); String parameters = valueStr.substring(idxLeft + 1, idxRight); String[] kvs = parameters.split("@"); if (kvs.length > 0) { Map<String, String> tmp = new HashMap<String, String>(); for (String kvp : kvs) { String[] kv = kvp.split("#"); if (kv.length == 2) { tmp.put(kv[0], kv[1]); } } // timeout特殊处理 String timeout = getValue(tmp, ATTR_TIMEOUT, KEY_TIMEOUT, TIMEOUT); if (timeout != null) { removeOldKeys(tmp, ATTR_TIMEOUT, KEY_TIMEOUT, TIMEOUT); try { methodParameters.put("." + method + "." + ATTR_TIMEOUT, Integer.parseInt(timeout)); } catch (Exception e) { LOGGER.error("method timeout is invalid : {}", timeout); } } // 其它就存起来 for (Map.Entry<String, String> entry : tmp.entrySet()) { methodParameters.put("." + method + "." + entry.getKey(), entry.getValue()); } } }
@Test public void parseMethodInfo() throws Exception { // 不用测试null等情况, Map<String, Object> map = new HashMap<String, Object>(); SofaRegistryHelper.parseMethodInfo(map, "xx", "[]"); Assert.assertTrue(map.size() == 0); map.clear(); SofaRegistryHelper.parseMethodInfo(map, "xx", "[xxxx]"); Assert.assertTrue(map.size() == 0); map.clear(); SofaRegistryHelper.parseMethodInfo(map, "xx", "[clientTimeout#5555]"); Assert.assertTrue(map.size() == 1); Assert.assertTrue(5555 == (Integer) map.get(".xx.timeout")); map.clear(); SofaRegistryHelper.parseMethodInfo(map, "xx", "[_AUTORECONNECT#false@_TIMEOUT#2000]"); Assert.assertTrue(map.size() == 2); Assert.assertTrue(2000 == (Integer) map.get(".xx.timeout")); Assert.assertTrue("false".equals(map.get(".xx._AUTORECONNECT"))); map.clear(); SofaRegistryHelper.parseMethodInfo(map, "xx", "[clientTimeout#4444@retries#3]"); Assert.assertTrue(map.size() == 2); Assert.assertTrue(4444 == (Integer) map.get(".xx.timeout")); Assert.assertTrue("3".equals(map.get(".xx.retries"))); }
public BigtableConfig withProjectId(ValueProvider<String> projectId) { checkArgument(projectId != null, "Project Id of BigTable can not be null"); return toBuilder().setProjectId(projectId).build(); }
@Test public void testWithProjectId() { assertEquals(PROJECT_ID.get(), config.withProjectId(PROJECT_ID).getProjectId().get()); thrown.expect(IllegalArgumentException.class); config.withProjectId(null); }
public Class<?> getTargetClass() { return targetClass; }
@Test void testConstructorWithTargetTypeAndCause() { Type type = SimpleType.constructUnsafe(NacosDeserializationExceptionTest.class); NacosDeserializationException exception = new NacosDeserializationException(type, new RuntimeException("test")); assertEquals(Constants.Exception.DESERIALIZE_ERROR_CODE, exception.getErrCode()); assertEquals(String.format("errCode: 101, errMsg: Nacos deserialize for class [%s] failed, cause error[%s]. ", type.getTypeName(), "test"), exception.getMessage()); assertNull(exception.getTargetClass()); }
public void setMethod(Method method) { this.method = method; }
@Test public void testSetMethod() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { MethodDesc methodDesc = getMethodDesc(); assertThat(methodDesc.getMethod()).isNotNull(); methodDesc.setMethod(null); assertThat(methodDesc.getMethod()).isNull(); }
@Override protected List<Object[]> rows() { List<Object[]> rows = new ArrayList<>(mappings.size()); for (Mapping mapping : mappings) { Map<String, String> options; if (!securityEnabled) { options = mapping.options(); } else { options = new TreeMap<>(); final SqlConnector sqlConnector = sqlConnectorCache.forType(mapping.connectorType()); final Set<String> secureConnectorOptions = sqlConnector.nonSensitiveConnectorOptions(); for (Entry<String, String> e : mapping.options().entrySet()) { if (secureConnectorOptions.contains(e.getKey())) { options.put(e.getKey(), e.getValue()); } } } Object[] row = new Object[]{ catalog(), mappingsSchema, mapping.name(), quoteCompoundIdentifier(mapping.externalName()), Optional.ofNullable(mapping.dataConnection()) .map(dataConnectionTypeResolver) .orElse(mapping.connectorType()), uncheckCall(() -> JsonUtil.toJson(options)) }; rows.add(row); } return rows; }
@Test public void test_rows_security_enabled() { // given Mapping mapping = new Mapping( "table-name", new String[]{"external-schema", "table-external-name"}, null, "IMap", null, emptyList(), singletonMap("key", "value") ); MappingsTable mappingTable = new MappingsTable( "catalog", null, "table-schema", singletonList(mapping), mock(SqlConnectorCache.class, RETURNS_MOCKS), (s) -> fail("Should not be invoked"), true); // when List<Object[]> rows = mappingTable.rows(); // then assertThat(rows).containsExactly(new Object[]{ "catalog" , "table-schema" , "table-name" , "\"external-schema\".\"table-external-name\"" , "IMap" , "{}" }); }
public static NacosNamingServiceWrapper createNamingService(URL connectionURL) { boolean check = connectionURL.getParameter(NACOS_CHECK_KEY, true); int retryTimes = connectionURL.getPositiveParameter(NACOS_RETRY_KEY, 10); int sleepMsBetweenRetries = connectionURL.getPositiveParameter(NACOS_RETRY_WAIT_KEY, 10); NacosConnectionManager nacosConnectionManager = new NacosConnectionManager(connectionURL, check, retryTimes, sleepMsBetweenRetries); return new NacosNamingServiceWrapper(nacosConnectionManager, retryTimes, sleepMsBetweenRetries); }
@Test void testRequest() { try (MockedStatic<NacosFactory> nacosFactoryMockedStatic = Mockito.mockStatic(NacosFactory.class)) { AtomicInteger atomicInteger = new AtomicInteger(0); NamingService mock = new MockNamingService() { @Override public List<Instance> getAllInstances(String serviceName, boolean subscribe) throws NacosException { if (atomicInteger.incrementAndGet() > 10) { return null; } else { throw new NacosException(); } } @Override public String getServerStatus() { return UP; } }; nacosFactoryMockedStatic .when(() -> NacosFactory.createNamingService((Properties) any())) .thenReturn(mock); URL url = URL.valueOf("nacos://127.0.0.1:8848") .addParameter("nacos.retry", 5) .addParameter("nacos.retry-wait", 10); Assertions.assertThrows( IllegalStateException.class, () -> NacosNamingServiceUtils.createNamingService(url)); try { NacosNamingServiceUtils.createNamingService(url); } catch (Throwable t) { Assertions.fail(t); } } }
public static HttpAsyncContext getContext() { return LOCAL.get(); }
@Test public void getContext() { HttpAsyncUtils.remove(); Assert.assertNull(HttpAsyncUtils.getContext()); }
public CompletableFuture<CompletedCheckpoint> triggerCheckpoint(boolean isPeriodic) { return triggerCheckpointFromCheckpointThread(checkpointProperties, null, isPeriodic); }
@Test void testIOExceptionForPeriodicSchedulingWithInactiveTasks() throws Exception { CheckpointCoordinator checkpointCoordinator = setupCheckpointCoordinatorWithInactiveTasks(new IOExceptionCheckpointStorage()); final CompletableFuture<CompletedCheckpoint> onCompletionPromise = checkpointCoordinator.triggerCheckpoint( CheckpointProperties.forCheckpoint( CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION), null, true); manuallyTriggeredScheduledExecutor.triggerAll(); try { onCompletionPromise.get(); fail("should not trigger periodic checkpoint after IOException occurred."); } catch (Exception e) { final Optional<CheckpointException> checkpointExceptionOptional = ExceptionUtils.findThrowable(e, CheckpointException.class); if (!checkpointExceptionOptional.isPresent() || checkpointExceptionOptional.get().getCheckpointFailureReason() != IO_EXCEPTION) { throw e; } } }
@Transactional public Map<String, String> collectSamlMetadata(String id) throws CollectSamlMetadataException { Map<String, String> map = new HashMap<>(); List<Connection> list; LOGGER.info("Start collecting metadata!!"); digidXClient.remoteLog("1446", null); try { list = id.equals("all") ? connectionService.listWithAllConnections() : connectionService.listWithOneConnection(Long.valueOf(id)); for (Connection con : list) startCollectMetadata(con, map); map.put("count", String.valueOf(list.size())); } catch (Exception e) { LOGGER.error("An error has occurred collecting metadata connections: {}", e.getMessage()); throw new CollectSamlMetadataException(e.getMessage()); } digidXClient.remoteLog("1447", map); return map; }
@Test public void startCollectMetadataSignatureTest() throws CollectSamlMetadataException { metadataProcessorServiceMock.collectSamlMetadata("all"); verify(digidXClientMock, times(1)).remoteLog(anyString(), anyMap()); }
public static boolean isSentToMultisig(Script script) { List<ScriptChunk> chunks = script.chunks(); if (chunks.size() < 4) return false; ScriptChunk chunk = chunks.get(chunks.size() - 1); // Must end in OP_CHECKMULTISIG[VERIFY]. if (!(chunk.equalsOpCode(OP_CHECKMULTISIG) || chunk.equalsOpCode(OP_CHECKMULTISIGVERIFY))) return false; // Second to last chunk must be an OP_N opcode and there should be that many data chunks (keys). int nOpCode = chunks.get(chunks.size() - 2).opcode; if (nOpCode < OP_1 || nOpCode > OP_16) return false; int numKeys = decodeFromOpN(nOpCode); if (numKeys < 1 || chunks.size() != 3 + numKeys) return false; for (int i = 1; i < chunks.size() - 2; i++) { if (chunks.get(i).isOpCode()) return false; } // First chunk must be an OP_N opcode too. int mOpCode = chunks.get(0).opcode; return mOpCode >= OP_1 && mOpCode <= OP_16; }
@Test public void testCreateMultiSigOutputScript() { assertTrue(ScriptPattern.isSentToMultisig( ScriptBuilder.createMultiSigOutputScript(2, keys) )); }
public static String convertChecksumToString(int checksum) { ByteBuffer buffer = ByteBuffer.allocate(Integer.BYTES); buffer.order(ByteOrder.BIG_ENDIAN); buffer.putInt(checksum); return BASE64_ENCODER.encodeToString(buffer.array()); }
@Test public void shouldConvertToStringChecksum() { assertEquals("AAAwOQ==", ChecksumUtils.convertChecksumToString(12345)); assertEquals("AADUMQ==", ChecksumUtils.convertChecksumToString(54321)); }
public Matrix.LU lu() { return lu(false); }
@Test public void testLU() { System.out.println("LU"); float[][] A = { {0.9000f, 0.4000f, 0.7000f}, {0.4000f, 0.5000f, 0.3000f}, {0.7000f, 0.3000f, 0.8000f} }; float[] b = {0.5f, 0.5f, 0.5f}; float[] x = {-0.2027027f, 0.8783784f, 0.4729730f}; Matrix a = Matrix.of(A); Matrix.LU lu = a.lu(); float[] x2 = lu.solve(b); assertEquals(x.length, x2.length); for (int i = 0; i < x.length; i++) { assertEquals(x[i], x2[i], 1E-6f); } float[][] B = { {0.5f, 0.2f}, {0.5f, 0.8f}, {0.5f, 0.3f} }; float[][] X = { {-0.2027027f, -1.2837838f}, { 0.8783784f, 2.2297297f}, { 0.4729730f, 0.6621622f} }; Matrix X2 = Matrix.of(B); lu.solve(X2); assertEquals(X.length, X2.nrow()); assertEquals(X[0].length, X2.ncol()); for (int i = 0; i < X.length; i++) { for (int j = 0; j < X[i].length; j++) { assertEquals(X[i][j], X2.get(i, j), 1E-6f); } } }
public AggregateAnalysisResult analyze( final ImmutableAnalysis analysis, final List<SelectExpression> finalProjection ) { if (!analysis.getGroupBy().isPresent()) { throw new IllegalArgumentException("Not an aggregate query"); } final AggAnalyzer aggAnalyzer = new AggAnalyzer(analysis, functionRegistry); aggAnalyzer.process(finalProjection); return aggAnalyzer.result(); }
@Test public void shouldThrowOnNestedHavingAggFunctions() { // Given: final FunctionCall nestedCall = new FunctionCall(FunctionName.of("MIN"), ImmutableList.of(AGG_FUNCTION_CALL, COL2)); givenHavingExpression(nestedCall); // When: final KsqlException e = assertThrows(KsqlException.class, () -> analyzer.analyze(analysis, selects)); // Then: assertThat(e.getMessage(), containsString("Aggregate functions can not be nested: MIN(MAX())")); }
public Collection<String> getUsedDataSourceNames() { Collection<String> result = new ArrayList<>(cachedConnections.size()); String databaseName = connectionSession.getUsedDatabaseName().toLowerCase(); for (String each : cachedConnections.keySet()) { String[] split = each.split("\\.", 2); String cachedDatabaseName = split[0]; String cachedDataSourceName = split[1]; if (databaseName.equals(cachedDatabaseName)) { result.add(cachedDataSourceName); } } return result; }
@Test void assertGetDataSourceNamesOfCachedConnections() { databaseConnectionManager.getCachedConnections().put(connectionSession.getUsedDatabaseName() + ".ds_0", null); databaseConnectionManager.getCachedConnections().put(connectionSession.getUsedDatabaseName() + ".ds_1", null); databaseConnectionManager.getCachedConnections().put(connectionSession.getUsedDatabaseName() + ".ds_2", null); List<String> actual = new ArrayList<>(databaseConnectionManager.getUsedDataSourceNames()); Collections.sort(actual); assertThat(actual, is(Arrays.asList("ds_0", "ds_1", "ds_2"))); }
ImmutableList<PayloadDefinition> validatePayloads(List<PayloadDefinition> payloads) { for (PayloadDefinition p : payloads) { checkArgument(p.hasName(), "Parsed payload does not have a name."); checkArgument( p.getInterpretationEnvironment() != PayloadGeneratorConfig.InterpretationEnvironment .INTERPRETATION_ENVIRONMENT_UNSPECIFIED, "Parsed payload does not have an interpretation_environment."); checkArgument( p.getExecutionEnvironment() != PayloadGeneratorConfig.ExecutionEnvironment.EXECUTION_ENVIRONMENT_UNSPECIFIED, "Parsed payload does not have an exeuction_environment."); checkArgument( !p.getVulnerabilityTypeList().isEmpty(), "Parsed payload has no entries for vulnerability_type."); checkArgument(p.hasPayloadString(), "Parsed payload does not have a payload_string."); if (p.getUsesCallbackServer().getValue()) { checkArgument( p.getPayloadString().getValue().contains("$TSUNAMI_PAYLOAD_TOKEN_URL"), "Parsed payload uses callback server but $TSUNAMI_PAYLOAD_TOKEN_URL not found in" + " payload_string."); } else { checkArgument( p.getValidationType() != PayloadValidationType.VALIDATION_TYPE_UNSPECIFIED, "Parsed payload has no validation_type and does not use the callback server."); if (p.getValidationType() == PayloadValidationType.VALIDATION_REGEX) { checkArgument( p.hasValidationRegex(), "Parsed payload has no validation_regex but uses PayloadValidationType.REGEX"); } } } return ImmutableList.copyOf(payloads); }
@Test public void validatePayloads_withoutVulnerabilityType_throwsException() throws IOException { PayloadDefinition p = goodCallbackDefinition.clearVulnerabilityType().build(); Throwable thrown = assertThrows( IllegalArgumentException.class, () -> module.validatePayloads(ImmutableList.of(p))); assertThat(thrown).hasMessageThat().contains("vulnerability_type"); }
public void put(Schema schema) { if (!(NAMED_SCHEMA_TYPES.contains(schema.getType()))) { throw new AvroTypeException("You can only put a named schema into the context"); } String fullName = requireValidFullName(schema.getFullName()); Schema alreadyKnownSchema = oldSchemas.get(fullName); if (alreadyKnownSchema != null) { if (!schema.equals(alreadyKnownSchema)) { throw new SchemaParseException("Can't redefine: " + fullName); } } else { resolvingVisitor = null; Schema previouslyAddedSchema = newSchemas.putIfAbsent(fullName, schema); if (previouslyAddedSchema != null && !previouslyAddedSchema.equals(schema)) { throw new SchemaParseException("Can't redefine: " + fullName); } } }
@Test public void verifyAddDoesNotAllowChangingSchemas() { Schema fooEnum = SchemaBuilder.enumeration("ns.Foo").symbols(); ParseContext context = new ParseContext(); context.put(fooRecord); assertThrows(AvroRuntimeException.class, () -> context.put(fooEnum)); }
@Override public KvMetadata resolveMetadata( boolean isKey, List<MappingField> resolvedFields, Map<String, String> options, InternalSerializationService serializationService ) { Map<QueryPath, MappingField> fieldsByPath = extractFields(resolvedFields, isKey); List<TableField> fields = new ArrayList<>(); for (Entry<QueryPath, MappingField> entry : fieldsByPath.entrySet()) { QueryPath path = entry.getKey(); QueryDataType type = entry.getValue().type(); String name = entry.getValue().name(); fields.add(new MapTableField(name, type, false, path)); } maybeAddDefaultField(isKey, resolvedFields, fields, QueryDataType.OBJECT); return new KvMetadata( fields, JsonQueryTargetDescriptor.INSTANCE, JsonUpsertTargetDescriptor.INSTANCE ); }
@Test @Parameters({ "true, __key", "false, this" }) public void test_resolveMetadata(boolean key, String prefix) { KvMetadata metadata = INSTANCE.resolveMetadata( key, singletonList(field("field", QueryDataType.INT, prefix + ".field")), emptyMap(), null ); assertThat(metadata.getFields()).containsExactly( new MapTableField("field", QueryDataType.INT, false, QueryPath.create(prefix + ".field")), new MapTableField(prefix, QueryDataType.OBJECT, true, QueryPath.create(prefix)) ); assertThat(metadata.getQueryTargetDescriptor()).isEqualTo(JsonQueryTargetDescriptor.INSTANCE); assertThat(metadata.getUpsertTargetDescriptor()).isEqualTo(JsonUpsertTargetDescriptor.INSTANCE); }
static Set<Set<Integer>> computeStronglyConnectedComponents( final int numVertex, final List<List<Integer>> outEdges) { final Set<Set<Integer>> stronglyConnectedComponents = new HashSet<>(); // a vertex will be added into this stack when it is visited for the first time final Deque<Integer> visitingStack = new ArrayDeque<>(numVertex); final boolean[] onVisitingStack = new boolean[numVertex]; // stores the order that a vertex is visited for the first time, -1 indicates it is not // visited yet final int[] vertexIndices = new int[numVertex]; Arrays.fill(vertexIndices, -1); final AtomicInteger indexCounter = new AtomicInteger(0); final int[] vertexLowLinks = new int[numVertex]; for (int vertex = 0; vertex < numVertex; vertex++) { if (!isVisited(vertex, vertexIndices)) { dfs( vertex, outEdges, vertexIndices, vertexLowLinks, visitingStack, onVisitingStack, indexCounter, stronglyConnectedComponents); } } return stronglyConnectedComponents; }
@Test void testWithNoCycle() { final List<List<Integer>> edges = Arrays.asList( Arrays.asList(1), Arrays.asList(2), Arrays.asList(3), Arrays.asList(4), Collections.emptyList()); final Set<Set<Integer>> result = computeStronglyConnectedComponents(5, edges); final Set<Set<Integer>> expected = new HashSet<>(); expected.add(Collections.singleton(0)); expected.add(Collections.singleton(1)); expected.add(Collections.singleton(2)); expected.add(Collections.singleton(3)); expected.add(Collections.singleton(4)); assertThat(result).isEqualTo(expected); }
public BigMatrix mm(Transpose transA, BigMatrix A, Transpose transB, BigMatrix B) { return mm(transA, A, transB, B, 1.0, 0.0); }
@Test public void testMm() { System.out.println("mm"); double[][] A = { { 0.7220180, 0.07121225, 0.6881997f}, {-0.2648886, -0.89044952, 0.3700456f}, {-0.6391588, 0.44947578, 0.6240573f} }; double[][] B = { {0.6881997, -0.07121225, 0.7220180f}, {0.3700456, 0.89044952, -0.2648886f}, {0.6240573, -0.44947578, -0.6391588f} }; double[][] C = { { 0.9527204, -0.2973347, 0.06257778f}, {-0.2808735, -0.9403636, -0.19190231f}, { 0.1159052, 0.1652528, -0.97941688f} }; double[][] D = { { 0.9887140, 0.1482942, -0.0212965f}, { 0.1482942, -0.9889421, -0.0015881f}, {-0.0212965, -0.0015881, -0.9997719f} }; double[][] E = { {0.0000, 0.0000, 1.0000f}, {0.0000, -1.0000, 0.0000f}, {1.0000, 0.0000, 0.0000f} }; BigMatrix a = BigMatrix.of(A); BigMatrix b = BigMatrix.of(B); double[][] F = b.mm(a).transpose().toArray(); assertTrue(MathEx.equals(a.mm(b).toArray(), C, 1E-7)); assertTrue(MathEx.equals(a.mt(b).toArray(), D, 1E-7)); assertTrue(MathEx.equals(a.tm(b).toArray(), E, 1E-7)); assertTrue(MathEx.equals(a.tt(b).toArray(), F, 1E-7)); }
public boolean offer(Serializable event) { if (queue == null) { throw new IllegalStateException("client has no event queue"); } return queue.offer(event); }
@Test public void testOfferEventAndRun() throws Exception { client.offer(TEST_EVENT); Thread thread = new Thread(client); thread.start(); // MockEventQueue will interrupt the thread when the queue is drained thread.join(1000); Assertions.assertFalse(thread.isAlive()); ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(outputStream.toByteArray())); Assertions.assertEquals(TEST_EVENT, ois.readObject()); }
public WrappedFileRange createFileRange(final long offset, final int length, final Object reference) { checkAvailable(); return new WrappedFileRange(createFileRange.invoke(null, offset, length, reference)); }
@Test public void testCreateFileRange() { Object reference = "backref"; FileRangeBridge.WrappedFileRange range = FileRangeBridge.instance().createFileRange(512L, 16384, reference); LOG.info("created range {}", range); assertNotNull("null range", range); assertNotNull("null range instance", range.getFileRange()); assertEquals("offset of " + range, 512L, range.getOffset()); assertEquals("length of " + range, 16384, range.getLength()); assertSame("backref of " + range, reference, range.getReference()); // this isn't set until readVectored() is called assertNull("non-null range future", range.getData()); }
@SuppressWarnings("unused") // Part of required API. public void execute( final ConfiguredStatement<InsertValues> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext ) { final InsertValues insertValues = statement.getStatement(); final MetaStore metaStore = executionContext.getMetaStore(); final KsqlConfig config = statement.getSessionConfig().getConfig(true); final DataSource dataSource = getDataSource(config, metaStore, insertValues); validateInsert(insertValues.getColumns(), dataSource); final ProducerRecord<byte[], byte[]> record = buildRecord(statement, metaStore, dataSource, serviceContext); try { producer.sendRecord(record, serviceContext, config.getProducerClientConfigProps()); } catch (final TopicAuthorizationException e) { // TopicAuthorizationException does not give much detailed information about why it failed, // except which topics are denied. Here we just add the ACL to make the error message // consistent with other authorization error messages. final Exception rootCause = new KsqlTopicAuthorizationException( AclOperation.WRITE, e.unauthorizedTopics() ); throw new KsqlException(createInsertFailedExceptionMessage(insertValues), rootCause); } catch (final ClusterAuthorizationException e) { // ClusterAuthorizationException is thrown when using idempotent producers // and either a topic write permission or a cluster-level idempotent write // permission (only applicable for broker versions no later than 2.8) is // missing. In this case, we include additional context to help the user // distinguish this type of failure from other permissions exceptions // such as the ones thrown above when TopicAuthorizationException is caught. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } catch (final KafkaException e) { if (e.getCause() != null && e.getCause() instanceof ClusterAuthorizationException) { // The error message thrown when an idempotent producer is missing permissions // is (nondeterministically) inconsistent: it is either a raw ClusterAuthorizationException, // as checked for above, or a ClusterAuthorizationException wrapped inside a KafkaException. // ksqlDB handles these two the same way, accordingly. // See https://issues.apache.org/jira/browse/KAFKA-14138 for more. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } else { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } } catch (final Exception e) { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } }
@Test public void shouldThrowWhenInsertValuesOnProcessingLogTopic() { // Given givenDataSourceWithSchema("default_ksql_processing_log", SCHEMA, SerdeFeatures.of(), SerdeFeatures.of(), false, false); final KsqlConfig ksqlConfig = new KsqlConfig(ImmutableMap.of()); final ConfiguredStatement<InsertValues> statement = ConfiguredStatement.of( PreparedStatement.of( "", new InsertValues(SourceName.of("TOPIC"), allAndPseudoColumnNames(SCHEMA), ImmutableList.of( new LongLiteral(1L), new StringLiteral("str"), new StringLiteral("str"), new LongLiteral(2L) ))), SessionConfig.of(ksqlConfig, ImmutableMap.of()) ); // When: final Exception e = assertThrows( KsqlException.class, () -> executor.execute(statement, mock(SessionProperties.class), engine, serviceContext) ); // Then: assertThat(e.getMessage(), containsString( "Cannot insert values into read-only topic: default_ksql_processing_log")); }
public static <W extends BoundedWindow> StateContext<W> windowOnlyContext(W window) { return new WindowOnlyContext<>(window); }
@Test public void windowOnlyContextWindowReturnsWindow() { BoundedWindow window = new IntervalWindow(new Instant(-137), Duration.millis(21L)); StateContext<BoundedWindow> context = StateContexts.windowOnlyContext(window); assertThat(context.window(), equalTo(window)); }
@Override public Proxy find(final String target) { for(java.net.Proxy proxy : selector.select(URI.create(target))) { switch(proxy.type()) { case DIRECT: { return Proxy.DIRECT; } case HTTP: { if(proxy.address() instanceof InetSocketAddress) { final InetSocketAddress address = (InetSocketAddress) proxy.address(); return new Proxy(Proxy.Type.HTTP, address.getHostName(), address.getPort()); } } case SOCKS: { if(proxy.address() instanceof InetSocketAddress) { final InetSocketAddress address = (InetSocketAddress) proxy.address(); return new Proxy(Proxy.Type.SOCKS, address.getHostName(), address.getPort()); } } } } return Proxy.DIRECT; }
@Test public void testExcludedLocalHost() { final DefaultProxyFinder proxy = new DefaultProxyFinder(); assertEquals(Proxy.Type.DIRECT, proxy.find("http://cyberduck.local").getType()); assertEquals(Proxy.Type.DIRECT, proxy.find("sftp://cyberduck.local").getType()); }
@Override public double cdf(double x) { if (x <= 0) { return 0.0; } else { return 1 - Math.exp(-Math.pow(x / lambda, k)); } }
@Test public void testCdf() { System.out.println("cdf"); WeibullDistribution instance = new WeibullDistribution(1.5, 1.0); instance.rand(); assertEquals(0.0, instance.cdf(0.0), 1E-7); assertEquals(0.03112801, instance.cdf(0.1), 1E-7); assertEquals(0.08555936, instance.cdf(0.2), 1E-7); assertEquals(0.2978115, instance.cdf(0.5), 1E-7); assertEquals(0.8407241, instance.cdf(1.5), 1E-7); assertEquals(0.9808, instance.cdf(2.5), 1E-7); assertEquals(0.999986, instance.cdf(5.0), 1E-6); }
@Override public InterpreterResult interpret(final String st, final InterpreterContext context) throws InterpreterException { if (LOGGER.isDebugEnabled()) { LOGGER.debug("st:\n{}", st); } final FormType form = getFormType(); RemoteInterpreterProcess interpreterProcess = null; try { interpreterProcess = getOrCreateInterpreterProcess(); } catch (IOException e) { throw new InterpreterException(e); } if (!interpreterProcess.isRunning()) { return new InterpreterResult(InterpreterResult.Code.ERROR, "Interpreter process is not running\n" + interpreterProcess.getErrorMessage()); } return interpreterProcess.callRemoteFunction(client -> { RemoteInterpreterResult remoteResult = client.interpret( sessionId, className, st, convert(context)); Map<String, Object> remoteConfig = (Map<String, Object>) GSON.fromJson( remoteResult.getConfig(), new TypeToken<Map<String, Object>>() { }.getType()); context.getConfig().clear(); if (remoteConfig != null) { context.getConfig().putAll(remoteConfig); } GUI currentGUI = context.getGui(); GUI currentNoteGUI = context.getNoteGui(); if (form == FormType.NATIVE) { GUI remoteGui = GUI.fromJson(remoteResult.getGui()); GUI remoteNoteGui = GUI.fromJson(remoteResult.getNoteGui()); currentGUI.clear(); currentGUI.setParams(remoteGui.getParams()); currentGUI.setForms(remoteGui.getForms()); currentNoteGUI.setParams(remoteNoteGui.getParams()); currentNoteGUI.setForms(remoteNoteGui.getForms()); } else if (form == FormType.SIMPLE) { final Map<String, Input> currentForms = currentGUI.getForms(); final Map<String, Object> currentParams = currentGUI.getParams(); final GUI remoteGUI = GUI.fromJson(remoteResult.getGui()); final Map<String, Input> remoteForms = remoteGUI.getForms(); final Map<String, Object> remoteParams = remoteGUI.getParams(); currentForms.putAll(remoteForms); currentParams.putAll(remoteParams); } return convert(remoteResult); } ); }
@Test void testExecuteCorrectPrecode() throws TTransportException, IOException, InterpreterException { interpreterSetting.getOption().setPerUser(InterpreterOption.SHARED); interpreterSetting.setProperty("zeppelin.SleepInterpreter.precode", "1"); Interpreter interpreter1 = interpreterSetting.getInterpreter("user1", note1Id, "sleep"); InterpreterContext context1 = createDummyInterpreterContext(); assertEquals(Code.SUCCESS, interpreter1.interpret("10", context1).code()); }
public static StatementExecutorResponse execute( final ConfiguredStatement<CreateConnector> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext ) { final CreateConnector createConnector = statement.getStatement(); final ConnectClient client = serviceContext.getConnectClient(); final Optional<KsqlEntity> connectorsResponse = handleIfNotExists( statement, createConnector, client); if (connectorsResponse.isPresent()) { return StatementExecutorResponse.handled(connectorsResponse); } final ConnectResponse<ConnectorInfo> response = client.create( createConnector.getName(), buildConnectorConfig(createConnector)); if (response.datum().isPresent()) { return StatementExecutorResponse.handled(Optional.of( new CreateConnectorEntity( statement.getMaskedStatementText(), response.datum().get() ) )); } if (response.error().isPresent()) { final String errorMsg = "Failed to create connector: " + response.error().get(); throw new KsqlRestException(EndpointResponse.create() .status(response.httpCode()) .entity(new KsqlErrorMessage(Errors.toErrorCode(response.httpCode()), errorMsg)) .build() ); } throw new IllegalStateException("Either response.datum() or response.error() must be present"); }
@Test public void shouldThrowOnCreationError() { // Given: givenCreationError(); // When / Then: assertThrows( KsqlRestException.class, () -> ConnectExecutor.execute( CREATE_CONNECTOR_CONFIGURED, mock(SessionProperties.class), null, serviceContext)); }
public boolean isCombo() { return getCOSObject().getFlag(COSName.FF, FLAG_COMBO); }
@Test void createComboBox() { PDChoice choiceField = new PDComboBox(acroForm); assertEquals(choiceField.getFieldType(), choiceField.getCOSObject().getNameAsString(COSName.FT)); assertEquals("Ch", choiceField.getFieldType()); assertTrue(choiceField.isCombo()); }
public static String execLine(File workingDir, String command) { return exec(false, workingDir, tokenize(command)); }
@Test void testCommandReturn() { String cmd = FileUtils.isOsWindows() ? "cmd /c dir" : "ls"; String result = Command.execLine(new File("target"), cmd); assertTrue(result.contains("karate")); }
public static <T> List<T> sub(List<T> list, int start, int end) { return ListUtil.sub(list, start, end); }
@Test public void subInput1NegativeZeroPositiveOutput0() { // Arrange final List<Integer> list = new ArrayList<>(); list.add(0); final int start = -1; final int end = 0; final int step = 2; // Act final List<Integer> retval = CollUtil.sub(list, start, end, step); // Assert result final List<Integer> arrayList = new ArrayList<>(); assertEquals(arrayList, retval); }
public static List<String> extractLinks(String content) { if (content == null || content.length() == 0) { return Collections.emptyList(); } List<String> extractions = new ArrayList<>(); final Matcher matcher = LINKS_PATTERN.matcher(content); while (matcher.find()) { extractions.add(matcher.group()); } return extractions; }
@Test public void testExtractLinksNone() { List<String> links = null; links = RegexUtils.extractLinks(null); assertNotNull(links); assertEquals(0, links.size()); links = RegexUtils.extractLinks(""); assertNotNull(links); assertEquals(0, links.size()); links = RegexUtils.extractLinks("Test with no links " + "What about www.google.com"); assertNotNull(links); assertEquals(0, links.size()); }
public static boolean isEmpty(Collection<?> col) { return !isNotEmpty(col); }
@Test public void testIsEmpty() { Map<String, Object> map = new HashMap<>(); Assertions.assertTrue(CollectionUtils.isEmpty(map)); map.put("k", "v"); Assertions.assertFalse(CollectionUtils.isEmpty(map)); map = null; Assertions.assertTrue(CollectionUtils.isEmpty(map)); }
@Override public <T> @NonNull Schema schemaFor(TypeDescriptor<T> typeDescriptor) { return schemaFor(typeDescriptor.getRawType()); }
@Test(expected = IllegalStateException.class) public void testMainStructSchemaWithoutTypedefRegistration() { // container typedefs like set<string> cannot be inferred based on available metadata defaultSchemaProvider.schemaFor(TypeDescriptor.of(TestThriftStruct.class)); }
public static Optional<Expression> convert( org.apache.flink.table.expressions.Expression flinkExpression) { if (!(flinkExpression instanceof CallExpression)) { return Optional.empty(); } CallExpression call = (CallExpression) flinkExpression; Operation op = FILTERS.get(call.getFunctionDefinition()); if (op != null) { switch (op) { case IS_NULL: return onlyChildAs(call, FieldReferenceExpression.class) .map(FieldReferenceExpression::getName) .map(Expressions::isNull); case NOT_NULL: return onlyChildAs(call, FieldReferenceExpression.class) .map(FieldReferenceExpression::getName) .map(Expressions::notNull); case LT: return convertFieldAndLiteral(Expressions::lessThan, Expressions::greaterThan, call); case LT_EQ: return convertFieldAndLiteral( Expressions::lessThanOrEqual, Expressions::greaterThanOrEqual, call); case GT: return convertFieldAndLiteral(Expressions::greaterThan, Expressions::lessThan, call); case GT_EQ: return convertFieldAndLiteral( Expressions::greaterThanOrEqual, Expressions::lessThanOrEqual, call); case EQ: return convertFieldAndLiteral( (ref, lit) -> { if (NaNUtil.isNaN(lit)) { return Expressions.isNaN(ref); } else { return Expressions.equal(ref, lit); } }, call); case NOT_EQ: return convertFieldAndLiteral( (ref, lit) -> { if (NaNUtil.isNaN(lit)) { return Expressions.notNaN(ref); } else { return Expressions.notEqual(ref, lit); } }, call); case NOT: return onlyChildAs(call, CallExpression.class) .flatMap(FlinkFilters::convert) .map(Expressions::not); case AND: return convertLogicExpression(Expressions::and, call); case OR: return convertLogicExpression(Expressions::or, call); case STARTS_WITH: return convertLike(call); } } return Optional.empty(); }
@Test public void testGreaterThanEquals() { UnboundPredicate<Integer> expected = org.apache.iceberg.expressions.Expressions.greaterThanOrEqual("field1", 1); Optional<org.apache.iceberg.expressions.Expression> actual = FlinkFilters.convert(resolve(Expressions.$("field1").isGreaterOrEqual(Expressions.lit(1)))); assertThat(actual).isPresent(); assertPredicatesMatch(expected, actual.get()); Optional<org.apache.iceberg.expressions.Expression> actual1 = FlinkFilters.convert(resolve(Expressions.lit(1).isLessOrEqual(Expressions.$("field1")))); assertThat(actual1).isPresent(); assertPredicatesMatch(expected, actual1.get()); }
public static Statement sanitize( final Statement node, final MetaStore metaStore) { return sanitize(node, metaStore, true); }
@Test public void shouldThrowIfLambdasAreDisabled() { // Given: final Statement stmt = givenQuery("SELECT transform(arr, x => x+1) FROM TEST1;"); // When: final Exception e = assertThrows( UnsupportedOperationException.class, () -> AstSanitizer.sanitize(stmt, META_STORE, false) ); // Then: assertThat(e.getMessage(), containsString( "Lambdas are not enabled at this time.")); }
public static <PrimaryKeyT, SecondaryKeyT, ValueT> SortValues<PrimaryKeyT, SecondaryKeyT, ValueT> create( BufferedExternalSorter.Options sorterOptions) { return new SortValues<>(sorterOptions); }
@Test public void testSecondaryKeySorting() { // Create a PCollection of <Key, <SecondaryKey, Value>> pairs. PCollection<KV<String, KV<String, Integer>>> input = p.apply( Create.of( Arrays.asList( KV.of("key1", KV.of("secondaryKey2", 20)), KV.of("key2", KV.of("secondaryKey2", 200)), KV.of("key1", KV.of("secondaryKey3", 30)), KV.of("key1", KV.of("secondaryKey1", 10)), KV.of("key2", KV.of("secondaryKey1", 100))))); // Group by Key, bringing <SecondaryKey, Value> pairs for the same Key together. PCollection<KV<String, Iterable<KV<String, Integer>>>> grouped = input.apply(GroupByKey.create()); // For every Key, sort the iterable of <SecondaryKey, Value> pairs by SecondaryKey. PCollection<KV<String, Iterable<KV<String, Integer>>>> groupedAndSorted = grouped.apply(SortValues.create(BufferedExternalSorter.options())); PAssert.that(groupedAndSorted) .satisfies( new AssertThatHasExpectedContentsForTestSecondaryKeySorting<>( Arrays.asList( KV.of( "key1", Arrays.asList( KV.of("secondaryKey1", 10), KV.of("secondaryKey2", 20), KV.of("secondaryKey3", 30))), KV.of( "key2", Arrays.asList(KV.of("secondaryKey1", 100), KV.of("secondaryKey2", 200)))))); p.run(); }
@Operation(summary = "listWorker", description = "WORKER_LIST_NOTES") @GetMapping(value = "/workers") @ResponseStatus(HttpStatus.OK) @ApiException(LIST_WORKERS_ERROR) public Result<List<WorkerServerModel>> listWorker(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { List<WorkerServerModel> workerServerModels = monitorService.queryWorker(loginUser); return Result.success(workerServerModels); }
@Test public void testListWorker() throws Exception { MvcResult mvcResult = mockMvc.perform(get("/monitor/workers") .header(SESSION_ID, sessionId) /* .param("type", ResourceType.FILE.name()) */) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); result.getCode().equals(Status.SUCCESS.getCode()); Assertions.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue()); logger.info(mvcResult.getResponse().getContentAsString()); }
public void init(final DubboRegisterConfig dubboRegisterConfig) { if (Objects.isNull(applicationConfig)) { applicationConfig = new ApplicationConfig("shenyu_proxy"); } if (needUpdateRegistryConfig(dubboRegisterConfig)) { RegistryConfig registryConfigTemp = new RegistryConfig(); registryConfigTemp.setProtocol(dubboRegisterConfig.getProtocol()); registryConfigTemp.setId("shenyu_proxy"); registryConfigTemp.setRegister(false); registryConfigTemp.setAddress(dubboRegisterConfig.getRegister()); Optional.ofNullable(dubboRegisterConfig.getGroup()).ifPresent(registryConfigTemp::setGroup); registryConfig = registryConfigTemp; } if (Objects.isNull(consumerConfig)) { consumerConfig = new ConsumerConfig(); consumerConfig.refresh(); Optional.ofNullable(dubboRegisterConfig.getThreadpool()).ifPresent(consumerConfig::setThreadpool); Optional.ofNullable(dubboRegisterConfig.getCorethreads()).ifPresent(consumerConfig::setCorethreads); Optional.ofNullable(dubboRegisterConfig.getThreads()).ifPresent(consumerConfig::setThreads); Optional.ofNullable(dubboRegisterConfig.getQueues()).ifPresent(consumerConfig::setQueues); } }
@Test public void testInit() { DubboRegisterConfig dubboRegisterConfig = new DubboRegisterConfig(); dubboRegisterConfig.setRegister("zookeeper://127.0.0.1:2181"); dubboRegisterConfig.setProtocol("dubbo"); this.apacheDubboConfigCache.init(dubboRegisterConfig); RegistryConfig registryConfig = null; try { Field registryConfigField = ApacheDubboConfigCache.class.getDeclaredField("registryConfig"); registryConfigField.setAccessible(true); Object config = registryConfigField.get(this.apacheDubboConfigCache); assertNotNull(config); registryConfig = (RegistryConfig) config; } catch (NoSuchFieldException | IllegalAccessException e) { fail(); } DubboRegisterConfig dubboRegisterConfig1 = new DubboRegisterConfig(); dubboRegisterConfig1.setRegister("zookeeper://127.0.0.2:2181"); dubboRegisterConfig1.setProtocol("dubbo"); this.apacheDubboConfigCache.init(dubboRegisterConfig1); RegistryConfig registryConfig1 = null; try { Field registryConfigField = ApacheDubboConfigCache.class.getDeclaredField("registryConfig"); registryConfigField.setAccessible(true); Object config = registryConfigField.get(this.apacheDubboConfigCache); assertNotNull(config); registryConfig1 = (RegistryConfig) config; } catch (NoSuchFieldException | IllegalAccessException e) { fail(); } assertNotSame(registryConfig, registryConfig1); }
@Override protected void run(final Environment environment, final Namespace namespace, final WhisperServerConfiguration configuration, final CommandDependencies commandDependencies) throws Exception { final PushNotificationExperiment<T> experiment = experimentFactory.buildExperiment(commandDependencies, configuration); final int maxConcurrency = namespace.getInt(MAX_CONCURRENCY_ARGUMENT); log.info("Finishing \"{}\" with max concurrency: {}", experiment.getExperimentName(), maxConcurrency); final AccountsManager accountsManager = commandDependencies.accountsManager(); final PushNotificationExperimentSamples pushNotificationExperimentSamples = commandDependencies.pushNotificationExperimentSamples(); final Flux<PushNotificationExperimentSample<T>> finishedSamples = pushNotificationExperimentSamples.getSamples(experiment.getExperimentName(), experiment.getStateClass()) .doOnNext(sample -> Metrics.counter(SAMPLES_READ_COUNTER_NAME, "final", String.valueOf(sample.finalState() != null)).increment()) .flatMap(sample -> { if (sample.finalState() == null) { // We still need to record a final state for this sample return Mono.fromFuture(() -> accountsManager.getByAccountIdentifierAsync(sample.accountIdentifier())) .retryWhen(Retry.backoff(3, Duration.ofSeconds(1))) .doOnNext(ignored -> ACCOUNT_READ_COUNTER.increment()) .flatMap(maybeAccount -> { final T finalState = experiment.getState(maybeAccount.orElse(null), maybeAccount.flatMap(account -> account.getDevice(sample.deviceId())).orElse(null)); return Mono.fromFuture( () -> pushNotificationExperimentSamples.recordFinalState(sample.accountIdentifier(), sample.deviceId(), experiment.getExperimentName(), finalState)) .onErrorResume(ConditionalCheckFailedException.class, throwable -> Mono.empty()) .onErrorResume(JsonProcessingException.class, throwable -> { log.error("Failed to parse sample state JSON", throwable); return Mono.empty(); }) .retryWhen(Retry.backoff(3, Duration.ofSeconds(1))) .onErrorResume(throwable -> { log.warn("Failed to record final state for {}:{} in experiment {}", sample.accountIdentifier(), sample.deviceId(), experiment.getExperimentName(), throwable); return Mono.empty(); }) .doOnSuccess(ignored -> FINAL_SAMPLE_STORED_COUNTER.increment()); }); } else { return Mono.just(sample); } }, maxConcurrency); experiment.analyzeResults(finishedSamples); }
@Test void runFinalSampleAlreadyRecorded() { when(commandDependencies.pushNotificationExperimentSamples().getSamples(eq(EXPERIMENT_NAME), eq(String.class))) .thenReturn(Flux.just(new PushNotificationExperimentSample<>(UUID.randomUUID(), Device.PRIMARY_ID, true, "test", "test"))); assertDoesNotThrow(() -> finishPushNotificationExperimentCommand.run(null, NAMESPACE, null, commandDependencies)); verify(commandDependencies.accountsManager(), never()).getByAccountIdentifier(any()); verify(experiment, never()).getState(any(), any()); verify(commandDependencies.pushNotificationExperimentSamples(), never()) .recordFinalState(any(), anyByte(), any(), any()); }
public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return valueList.stream().map(Object::toString).collect(Collectors.joining(",")); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } }
@Test public void testConvertValueToStringDouble() { assertEquals("3.125", ConfigDef.convertToString(3.125, Type.DOUBLE)); assertNull(ConfigDef.convertToString(null, Type.DOUBLE)); }
public boolean denied(String name, MediaType mediaType) { String suffix = (name.contains(".") ? name.substring(name.lastIndexOf(".") + 1) : "").toLowerCase(Locale.ROOT); boolean defaultDeny = false; if (CollectionUtils.isNotEmpty(denyFiles)) { if (denyFiles.contains(suffix)) { return true; } defaultDeny = false; } if (CollectionUtils.isNotEmpty(allowFiles)) { if (allowFiles.contains(suffix)) { return false; } defaultDeny = true; } if (CollectionUtils.isNotEmpty(denyMediaType)) { if (denyMediaType.contains(mediaType.toString())) { return true; } defaultDeny = false; } if (CollectionUtils.isNotEmpty(allowMediaType)) { if (allowMediaType.contains(mediaType.toString())) { return false; } defaultDeny = true; } return defaultDeny; }
@Test public void testDenyWithAllowMediaType(){ FileUploadProperties uploadProperties=new FileUploadProperties(); uploadProperties.setAllowMediaType(new HashSet<>(Arrays.asList("application/xls","application/json"))); assertFalse(uploadProperties.denied("test.json", MediaType.APPLICATION_JSON)); assertTrue(uploadProperties.denied("test.exe", MediaType.ALL)); }
protected static String buildVersionName(String name, int version) { return name + "@" + version; }
@Test public void testBuildVersionName() throws Exception { assertEquals("/a/b@3", KeyProvider.buildVersionName("/a/b", 3)); assertEquals("/aaa@12", KeyProvider.buildVersionName("/aaa", 12)); }
public static String hashToString(final String value) { return getHashString(value).toString(); }
@Test void shouldReturnConsistentHashString() { assertEquals(Hashing.hashToString("random"), "52a21b70c71a4e7819b310ddc9f83874"); }
public SessionFactory build(HibernateBundle<?> bundle, Environment environment, PooledDataSourceFactory dbConfig, List<Class<?>> entities) { return build(bundle, environment, dbConfig, entities, DEFAULT_NAME); }
@Test void setsPoolName() { build(); ArgumentCaptor<SessionFactoryManager> sessionFactoryManager = ArgumentCaptor.forClass(SessionFactoryManager.class); verify(lifecycleEnvironment).manage(sessionFactoryManager.capture()); assertThat(sessionFactoryManager.getValue().getDataSource()) .isInstanceOfSatisfying(ManagedPooledDataSource.class, dataSource -> assertThat(dataSource.getPool().getName()).isEqualTo("hibernate")); }
public static JSONObject createObj() { return new JSONObject(); }
@Test public void sqlExceptionTest() { //https://github.com/dromara/hutool/issues/1399 // SQLException实现了Iterable接口,默认是遍历之,会栈溢出,修正后只返回string final JSONObject set = JSONUtil.createObj().set("test", new SQLException("test")); assertEquals("{\"test\":\"java.sql.SQLException: test\"}", set.toString()); }
@Operation(summary = "Receive SAML AuthnRequest") @PostMapping(value = {"/frontchannel/saml/v4/entrance/request_authentication", "/frontchannel/saml/v4/idp/request_authentication"}) public RedirectView requestAuthenticationService(HttpServletRequest request) throws SamlValidationException, SharedServiceClientException, DienstencatalogusException, UnsupportedEncodingException, ComponentInitializationException, MessageDecodingException, SamlSessionException, SamlParseException { logger.info("Receive SAML AuthnRequest"); if (request.getParameter("SAMLRequest") != null) { AuthenticationRequest authenticationRequest = authenticationService.startAuthenticationProcess(request); return new RedirectView(authenticationRequest.getProtocolType().equals(ProtocolType.SAML_ROUTERINGSDIENST) ? authenticationIdpService.redirectWithCorrectAttributesForAd(request, authenticationRequest) : authenticationEntranceService.redirectWithCorrectAttributesForAd(request, authenticationRequest) ); } else { RedirectView redirectView = new RedirectView("/saml/v4/idp/redirect_with_artifact"); redirectView.setStatusCode(HttpStatus.BAD_REQUEST); return redirectView; } }
@Test public void requestAuthenticationIdpServiceTest() throws SamlSessionException, SharedServiceClientException, DienstencatalogusException, ComponentInitializationException, SamlValidationException, MessageDecodingException, SamlParseException, UnsupportedEncodingException { AuthenticationRequest authenticationRequest = new AuthenticationRequest(); authenticationRequest.setProtocolType(ProtocolType.SAML_ROUTERINGSDIENST); when(request.getParameter("SAMLRequest")).thenReturn("test"); when(authenticationServiceMock.startAuthenticationProcess(any(HttpServletRequest.class))).thenReturn(authenticationRequest); RedirectView result = authenticationControllerMock.requestAuthenticationService(request); assertNotNull(result); verify(authenticationServiceMock, times(1)).startAuthenticationProcess(any(HttpServletRequest.class)); verify(authenticationIdpServiceMock, times(1)).redirectWithCorrectAttributesForAd(any(HttpServletRequest.class), any(AuthenticationRequest.class)); }
@Override public Exchange add(CamelContext camelContext, String key, Exchange oldExchange, Exchange newExchange) throws OptimisticLockingException { if (!optimistic) { throw new UnsupportedOperationException(); } LOG.trace("Adding an Exchange with ID {} for key {} in an optimistic manner.", newExchange.getExchangeId(), key); if (oldExchange == null) { DefaultExchangeHolder newHolder = DefaultExchangeHolder.marshal(newExchange, true, allowSerializedHeaders); DefaultExchangeHolder oldHolder = cache.getAndPut(key, newHolder); if (oldHolder != null) { Exchange exchange = unmarshallExchange(camelContext, oldHolder); LOG.error( "Optimistic locking failed for exchange with key {}: IMap#putIfAbsend returned Exchange with ID {}, while it's expected no exchanges to be returned", key, exchange != null ? exchange.getExchangeId() : "<null>"); throw new OptimisticLockingException(); } } else { DefaultExchangeHolder oldHolder = DefaultExchangeHolder.marshal(oldExchange, true, allowSerializedHeaders); DefaultExchangeHolder newHolder = DefaultExchangeHolder.marshal(newExchange, true, allowSerializedHeaders); if (!cache.replace(key, oldHolder, newHolder)) { LOG.error( "Optimistic locking failed for exchange with key {}: IMap#replace returned no Exchanges, while it's expected to replace one", key); throw new OptimisticLockingException(); } } LOG.trace("Added an Exchange with ID {} for key {} in optimistic manner.", newExchange.getExchangeId(), key); return oldExchange; }
@Test public void nonOptimisticRepoFailsOnOptimisticAdd() throws Exception { JCacheAggregationRepository repo = createRepository(false); repo.start(); try { final CamelContext context = context(); Exchange oldOne = new DefaultExchange(context); Exchange newOne = new DefaultExchange(context); assertThrows(UnsupportedOperationException.class, () -> repo.add(context, "myKey", oldOne, newOne)); } finally { repo.stop(); } }
public static void validateImageInDaemonConf(Map<String, Object> conf) { List<String> allowedImages = getAllowedImages(conf, true); if (allowedImages.isEmpty()) { LOG.debug("{} is not configured; skip image validation", DaemonConfig.STORM_OCI_ALLOWED_IMAGES); } else { String defaultImage = (String) conf.get(DaemonConfig.STORM_OCI_IMAGE); validateImage(allowedImages, defaultImage, DaemonConfig.STORM_OCI_IMAGE); } }
@Test public void validateImageInDaemonConfWithNullDefault() { assertThrows(IllegalArgumentException.class, () -> { Map<String, Object> conf = new HashMap<>(); List<String> allowedImages = new ArrayList<>(); allowedImages.add("storm/rhel7:dev_test"); conf.put(DaemonConfig.STORM_OCI_ALLOWED_IMAGES, allowedImages); conf.put(DaemonConfig.STORM_OCI_IMAGE, null); //or not set OciUtils.validateImageInDaemonConf(conf); }); }
public boolean isThereChanges(final Values otherValues) { if (this.isEmpty() && !otherValues.isEmpty()) { return true; } else if (!this.isEmpty() && otherValues.isEmpty()) { return true; } else if (this.isEmpty() && otherValues.isEmpty()) { return false; } else if (this.size() != otherValues.size()) { return true; } else if (!areValuesEqual(otherValues)) { return true; } else { return false; } }
@Test void testChanges() throws Exception { final Values a = new Values(); final Values b = new Values(); assertThat(a.isThereChanges(b)).isFalse(); }
public static String[] splitString( String string, String separator ) { /* * 0123456 Example a;b;c;d --> new String[] { a, b, c, d } */ // System.out.println("splitString ["+path+"] using ["+separator+"]"); List<String> list = new ArrayList<>(); if ( string == null || string.length() == 0 ) { return new String[] {}; } int sepLen = separator.length(); int from = 0; int end = string.length() - sepLen + 1; for ( int i = from; i < end; i += sepLen ) { if ( string.substring( i, i + sepLen ).equalsIgnoreCase( separator ) ) { // OK, we found a separator, the string to add to the list // is [from, i[ list.add( nullToEmpty( string.substring( from, i ) ) ); from = i + sepLen; } } // Wait, if the string didn't end with a separator, we still have information at the end of the string... // In our example that would be "d"... if ( from + sepLen <= string.length() ) { list.add( nullToEmpty( string.substring( from, string.length() ) ) ); } return list.toArray( new String[list.size()] ); }
@Test public void testSplitStringWithDelimiterAndQuoteEnclosureRemoveEnclosure1() { //"Hello, world" String mask = "%sHello%s world%s"; String[] chunks1 = { "Hello" + DELIMITER1 + " world" }; String stringToSplit = String.format( mask, ENCLOSURE1, DELIMITER1, ENCLOSURE1 ); String[] result = Const.splitString( stringToSplit, DELIMITER1, ENCLOSURE1, true ); assertSplit( result, chunks1 ); }
@Override public boolean equals(Object o) { if (o == this) { return true; } else if (o == null || getClass() != o.getClass()) { return false; } else { return subject == ((UserGroupInformation) o).subject; } }
@Test (timeout = 30000) public void testEquals() throws Exception { UserGroupInformation uugi = UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAMES); assertEquals(uugi, uugi); // The subjects should be different, so this should fail UserGroupInformation ugi2 = UserGroupInformation.createUserForTesting(USER_NAME, GROUP_NAMES); assertFalse(uugi.equals(ugi2)); assertFalse(uugi.hashCode() == ugi2.hashCode()); // two ugi that have the same subject need to be equal UserGroupInformation ugi3 = new UserGroupInformation(uugi.getSubject()); assertEquals(uugi, ugi3); assertEquals(uugi.hashCode(), ugi3.hashCode()); }
@Override public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { try { final StoregateApiClient client = session.getClient(); final HttpUriRequest request = new HttpGet(String.format("%s/v4.2/download/files/%s?stream=true", client.getBasePath(), fileid.getFileId(file))); if(status.isAppend()) { final HttpRange range = HttpRange.withStatus(status); final String header; if(TransferStatus.UNKNOWN_LENGTH == range.getEnd()) { header = String.format("bytes=%d-", range.getStart()); } else { header = String.format("bytes=%d-%d", range.getStart(), range.getEnd()); } if(log.isDebugEnabled()) { log.debug(String.format("Add range header %s for file %s", header, file)); } request.addHeader(new BasicHeader(HttpHeaders.RANGE, header)); // Disable compression request.addHeader(new BasicHeader(HttpHeaders.ACCEPT_ENCODING, "identity")); } final HttpResponse response = client.getClient().execute(request); switch(response.getStatusLine().getStatusCode()) { case HttpStatus.SC_OK: case HttpStatus.SC_PARTIAL_CONTENT: return new HttpMethodReleaseInputStream(response); case HttpStatus.SC_NOT_FOUND: fileid.cache(file, null); // Break through default: throw new DefaultHttpResponseExceptionMappingService().map("Download {0} failed", new HttpResponseException( response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase()), file); } } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Download {0} failed", e, file); } }
@Test public void testReadCloseReleaseEntity() throws Exception { final TransferStatus status = new TransferStatus(); final byte[] content = RandomUtils.nextBytes(32769); final TransferStatus writeStatus = new TransferStatus(); writeStatus.setLength(content.length); final StoregateIdProvider nodeid = new StoregateIdProvider(session); final Path room = new StoregateDirectoryFeature(session, nodeid).mkdir( new Path(String.format("/My files/%s", new AlphanumericRandomStringService().random()), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus()); final Path test = new Path(room, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)); final StoregateWriteFeature writer = new StoregateWriteFeature(session, nodeid); final HttpResponseOutputStream<File> out = writer.write(test, writeStatus, new DisabledConnectionCallback()); assertNotNull(out); new StreamCopier(writeStatus, writeStatus).transfer(new ByteArrayInputStream(content), out); final CountingInputStream in = new CountingInputStream(new StoregateReadFeature(session, nodeid).read(test, status, new DisabledConnectionCallback())); in.close(); assertEquals(0L, in.getByteCount(), 0L); new StoregateDeleteFeature(session, nodeid).delete(Collections.singletonList(room), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public FEELFnResult<List<Object>> invoke(@ParameterName("list") Object[] lists) { if ( lists == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "lists", "cannot be null")); } final Set<Object> resultSet = new LinkedHashSet<>(); for ( final Object list : lists ) { if ( list instanceof Collection ) { resultSet.addAll((Collection) list); } else { resultSet.add(list); } } // spec requires us to return a new list return FEELFnResult.ofResult( new ArrayList<>(resultSet) ); }
@Test void invokeListIsNull() { FunctionTestUtil.assertResult(unionFunction.invoke(new Object[]{null}), Collections.singletonList(null)); }
public void shutdown() { // we want to immediately terminate; we don't want to wait till pending tasks have completed. scheduler.shutdownNow(); }
@Test public void shutdown() { metricsRegistry.shutdown(); }
public EndpointResponse terminateCluster( final KsqlSecurityContext securityContext, final ClusterTerminateRequest request ) { LOG.info("Received: " + request); throwIfNotConfigured(); ensureValidPatterns(request.getDeleteTopicList()); try { final Map<String, Object> streamsProperties = request.getStreamsProperties(); denyListPropertyValidator.validateAll(streamsProperties); final KsqlEntityList entities = handler.execute( securityContext, TERMINATE_CLUSTER, new SessionProperties( streamsProperties, localHost, localUrl, false ) ); return EndpointResponse.ok(entities); } catch (final Exception e) { return Errors.serverErrorForStatement( e, TerminateCluster.TERMINATE_CLUSTER_STATEMENT_TEXT, new KsqlEntityList()); } }
@Test public void shouldThrowOnHandleTerminateIfNotConfigured() { // Given: ksqlResource = new KsqlResource( ksqlEngine, commandRunner, DISTRIBUTED_COMMAND_RESPONSE_TIMEOUT, activenessRegistrar, (ec, sc) -> InjectorChain.of( schemaInjectorFactory.apply(sc), topicInjectorFactory.apply(ec), new TopicDeleteInjector(ec, sc)), Optional.of(authorizationValidator), errorsHandler, denyListPropertyValidator, commandRunnerWarning ); // When: final KsqlRestException e = assertThrows( KsqlRestException.class, () -> ksqlResource.terminateCluster( securityContext, new ClusterTerminateRequest(ImmutableList.of("")) ) ); // Then: assertThat(e, exceptionStatusCode(CoreMatchers.is(SERVICE_UNAVAILABLE.code()))); assertThat(e, exceptionErrorMessage(errorMessage(Matchers.is("Server initializing")))); }
public String json(String text) { if (text == null || text.isEmpty()) { return text; } return StringEscapeUtils.escapeJson(text); }
@Test public void testJson() { String text = null; EscapeTool instance = new EscapeTool(); String expResult = null; String result = instance.json(text); assertEquals(expResult, result); text = ""; expResult = ""; result = instance.json(text); assertEquals(expResult, result); text = "test \"quote\"\""; expResult = "test \\\"quote\\\"\\\""; result = instance.json(text); assertEquals(expResult, result); }
public static KTableHolder<GenericKey> build( final KGroupedStreamHolder groupedStream, final StreamAggregate aggregate, final RuntimeBuildContext buildContext, final MaterializedFactory materializedFactory) { return build( groupedStream, aggregate, buildContext, materializedFactory, new AggregateParamsFactory() ); }
@Test public void shouldBuildKeySerdeCorrectlyForUnwindowedAggregate() { // Given: givenUnwindowedAggregate(); // When: aggregate.build(planBuilder, planInfo); // Then: verify(buildContext).buildKeySerde(KEY_FORMAT, PHYSICAL_AGGREGATE_SCHEMA, MATERIALIZE_CTX); }
public static TableFactoryHelper createTableFactoryHelper( DynamicTableFactory factory, DynamicTableFactory.Context context) { return new TableFactoryHelper(factory, context); }
@Test void testInvalidFactoryHelperWithMapOption() { final Map<String, String> options = new HashMap<>(); options.put("properties.prop-1", "value-1"); options.put("properties.prop-2", "value-2"); options.put("unknown", "value-3"); final FactoryUtil.TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper( new TestFactoryWithMap(), FactoryMocks.createTableContext(SCHEMA, options)); assertThatThrownBy(helper::validate) .satisfies( anyCauseMatches( ValidationException.class, "Unsupported options found for 'test-factory-with-map'.\n\n" + "Unsupported options:\n\n" + "unknown\n\n" + "Supported options:\n\n" + "connector\n" + "properties\n" + "properties.prop-1\n" + "properties.prop-2\n" + "property-version")); }
public boolean tryToMoveTo(State to) { AtomicReference<State> lastFrom = new AtomicReference<>(); State newState = this.state.updateAndGet(from -> { lastFrom.set(from); if (TRANSITIONS.get(from).contains(to)) { return to; } return from; }); boolean updated = newState == to && lastFrom.get() != to; LOG.trace("tryToMoveTo from {} to {} => {}", lastFrom.get(), to, updated); return updated; }
@Test public void no_state_can_not_move_to_itself() { for (State state : values()) { assertThat(newLifeCycle(state).tryToMoveTo(state)).isFalse(); } }
static UWildcard create(Kind kind, @Nullable UTree<?> bound) { checkArgument(BOUND_KINDS.containsKey(kind)); // verify bound is null iff kind is UNBOUNDED_WILDCARD checkArgument((bound == null) == (kind == Kind.UNBOUNDED_WILDCARD)); return new AutoValue_UWildcard(kind, bound); }
@Test public void serialization() { SerializableTester.reserializeAndAssert( UWildcard.create(Kind.EXTENDS_WILDCARD, UClassIdent.create("java.lang.Number"))); }
public <T extends BaseRequest<T, R>, R extends BaseResponse> R execute(BaseRequest<T, R> request) { return api.send(request); }
@Test public void setStickerSetThumb() { String thumbFile = "CAACAgIAAxkBAAJ0ll6DO4bNCynpfZmS6g-YcGY2zrP5AAIBAAPANk8TGC5zMKs_LVEYBA"; BaseResponse response = bot.execute(new SetStickerSetThumb(stickerSetAnim, chatId, thumbFile)); assertTrue(response.isOk()); response = bot.execute(new SetStickerSetThumbnail(stickerSetAnim, chatId, thumbFile)); assertTrue(response.isOk()); StickerSet set = bot.execute(new GetStickerSet(stickerSetAnim)).stickerSet(); assertFalse(set.isAnimated()); PhotoSize thumb = set.thumb(); PhotoSizeTest.checkPhotos(thumb); assertEquals(Integer.valueOf(100), thumb.width()); assertEquals(Integer.valueOf(100), thumb.height()); assertEquals(Long.valueOf(8244), thumb.fileSize()); // clear thumb by not sending it response = bot.execute(new SetStickerSetThumb(stickerSetAnim, chatId)); assertTrue(response.isOk()); response = bot.execute(new SetStickerSetThumbnail(stickerSetAnim, chatId)); assertTrue(response.isOk()); }
public byte[] asBytes() { if (_values.length == 1) { return asBytesSingleVal(_values[0]); } int sizeInBytes = 0; byte[][] cache = new byte[_values.length][]; for (int i = 0; i < _values.length; i++) { Object value = _values[i]; if (value instanceof Integer) { sizeInBytes += Integer.BYTES; } else if (value instanceof Long) { sizeInBytes += Long.BYTES; } else if (value instanceof String) { cache[i] = ((String) value).getBytes(StandardCharsets.UTF_8); sizeInBytes += cache[i].length + Integer.BYTES; } else if (value instanceof ByteArray) { cache[i] = ((ByteArray) value).getBytes(); sizeInBytes += cache[i].length + Integer.BYTES; } else if (value instanceof Float) { sizeInBytes += Float.BYTES; } else if (value instanceof Double) { sizeInBytes += Double.BYTES; } else if (value instanceof BigDecimal) { cache[i] = BigDecimalUtils.serialize((BigDecimal) value); sizeInBytes += cache[i].length + Integer.BYTES; } else { throw new IllegalStateException( String.format("Unsupported value: %s of type: %s", value, value != null ? value.getClass() : null)); } } ByteBuffer byteBuffer = ByteBuffer.allocate(sizeInBytes); for (int i = 0; i < _values.length; i++) { Object value = _values[i]; if (value instanceof Integer) { byteBuffer.putInt((Integer) value); } else if (value instanceof Long) { byteBuffer.putLong((Long) value); } else if (value instanceof Float) { byteBuffer.putFloat((Float) value); } else if (value instanceof Double) { byteBuffer.putDouble((Double) value); } else { byteBuffer.putInt(cache[i].length); byteBuffer.put(cache[i]); } } return byteBuffer.array(); }
@Test public void testSerialization() { byte[] rawbytes = {0xa, 0x2, (byte) 0xff}; Object[] values = new Object[]{ "foo_bar", 2, 2.0d, 3.14f, System.currentTimeMillis(), new ByteArray(rawbytes), new BigDecimal(100) }; PrimaryKey pk = new PrimaryKey(values); byte[] bytes = pk.asBytes(); ByteBuffer byteBuffer = ByteBuffer.wrap(bytes); int length = byteBuffer.getInt(); assertEquals(length, ((String) values[0]).length()); byte[] arr = new byte[length]; byteBuffer.get(arr); String out = new String(arr, StandardCharsets.UTF_8); assertEquals(out, values[0]); assertEquals(byteBuffer.getInt(), values[1]); assertEquals(byteBuffer.getDouble(), values[2]); assertEquals(byteBuffer.getFloat(), values[3]); assertEquals(byteBuffer.getLong(), values[4]); assertEquals(byteBuffer.getInt(), rawbytes.length); arr = new byte[rawbytes.length]; byteBuffer.get(arr); assertEquals(arr, rawbytes); length = byteBuffer.getInt(); arr = new byte[length]; byteBuffer.get(arr); assertEquals(BigDecimalUtils.deserialize(arr), values[6]); }
public String getSchemaName() { return schemaName; }
@Test public void getSchemaNameOutputNull() { // Arrange final DdlResult objectUnderTest = new DdlResult(); // Act final String actual = objectUnderTest.getSchemaName(); // Assert result Assert.assertNull(actual); }
protected boolean hasFields(String... mandatoryFields) { return hasFields(object, mandatoryFields); }
@Test public void hasFields() { assertTrue("does not have mandatory field", cfg.hasFields(TEXT, LONG, DOUBLE, MAC)); assertTrue("did not detect missing field", expectInvalidField(() -> cfg.hasFields("none"))); }
public static Builder builder() { return new Builder(); }
@Test void ensureRetryerClonesItself() throws Exception { server.enqueue(new MockResponse().setResponseCode(503).setBody("foo 1")); server.enqueue(new MockResponse().setResponseCode(200).setBody("foo 2")); server.enqueue(new MockResponse().setResponseCode(503).setBody("foo 3")); server.enqueue(new MockResponse().setResponseCode(200).setBody("foo 4")); MockRetryer retryer = new MockRetryer(); TestInterface api = Feign.builder().retryer(retryer) .errorDecoder( (methodKey, response) -> new RetryableException(response.status(), "play it again sam!", HttpMethod.POST, NON_RETRYABLE, response.request())) .target(TestInterface.class, "http://localhost:" + server.getPort()); api.post(); api.post(); // if retryer instance was reused, this statement will throw an exception assertThat(server.getRequestCount()).isEqualTo(4); }
InputSchemaFactory inputSchemaFactory() { return inputSchemaFactory(null); }
@Test public void testInputSchemaFactory() { assertEquals( Schema.of( Field.of(DEFAULT_ATTRIBUTES_KEY_NAME, ATTRIBUTES_FIELD_TYPE), Field.of(DEFAULT_EVENT_TIMESTAMP_KEY_NAME, EVENT_TIMESTAMP_FIELD_TYPE)), PubsubRowToMessage.builder().build().inputSchemaFactory().buildSchema()); assertEquals( Schema.of( Field.of(DEFAULT_ATTRIBUTES_KEY_NAME, ATTRIBUTES_FIELD_TYPE), Field.of(DEFAULT_EVENT_TIMESTAMP_KEY_NAME, EVENT_TIMESTAMP_FIELD_TYPE), Field.of(DEFAULT_PAYLOAD_KEY_NAME, FieldType.BYTES)), PubsubRowToMessage.builder().build().inputSchemaFactory(FieldType.BYTES).buildSchema()); assertEquals( Schema.of( Field.of(DEFAULT_ATTRIBUTES_KEY_NAME, ATTRIBUTES_FIELD_TYPE), Field.of(DEFAULT_EVENT_TIMESTAMP_KEY_NAME, EVENT_TIMESTAMP_FIELD_TYPE), Field.of(DEFAULT_PAYLOAD_KEY_NAME, FieldType.row(ALL_DATA_TYPES_SCHEMA))), PubsubRowToMessage.builder() .build() .inputSchemaFactory(FieldType.row(ALL_DATA_TYPES_SCHEMA)) .buildSchema()); String prefix = "_"; assertEquals( Schema.of( Field.of(prefix + ATTRIBUTES_KEY_NAME, ATTRIBUTES_FIELD_TYPE), Field.of(prefix + EVENT_TIMESTAMP_KEY_NAME, EVENT_TIMESTAMP_FIELD_TYPE)), PubsubRowToMessage.builder() .setKeyPrefix(prefix) .build() .inputSchemaFactory() .buildSchema()); Field[] userFields = ALL_DATA_TYPES_SCHEMA.getFields().toArray(new Field[0]); assertEquals( merge( Schema.of( Field.of(DEFAULT_ATTRIBUTES_KEY_NAME, ATTRIBUTES_FIELD_TYPE), Field.of(DEFAULT_EVENT_TIMESTAMP_KEY_NAME, EVENT_TIMESTAMP_FIELD_TYPE)), ALL_DATA_TYPES_SCHEMA), PubsubRowToMessage.builder().build().inputSchemaFactory().buildSchema(userFields)); }
@Override public AgentMetadataDTO toDTO(AgentMetadata agentMetadata) { return new AgentMetadataDTO(agentMetadata.elasticAgentId(), agentMetadata.agentState(), agentMetadata.buildState(), agentMetadata.configState()); }
@Test public void fromDTO_shouldConvertToAgentMetadataDTOFromAgentMetadata() { final AgentMetadata agentMetadata = new AgentMetadata("agent-id", "Idle", "Building", "Enabled"); final com.thoughtworks.go.plugin.access.elastic.v5.AgentMetadataDTO agentMetadataDTO = new AgentMetadataConverterV5().toDTO(agentMetadata); assertThat(agentMetadataDTO.elasticAgentId(), is("agent-id")); assertThat(agentMetadataDTO.agentState(), is("Idle")); assertThat(agentMetadataDTO.buildState(), is("Building")); assertThat(agentMetadataDTO.configState(), is("Enabled")); }
public static TopicPublishInfo topicRouteData2TopicPublishInfo(final String topic, final TopicRouteData route) { TopicPublishInfo info = new TopicPublishInfo(); // TO DO should check the usage of raw route, it is better to remove such field info.setTopicRouteData(route); if (route.getOrderTopicConf() != null && route.getOrderTopicConf().length() > 0) { String[] brokers = route.getOrderTopicConf().split(";"); for (String broker : brokers) { String[] item = broker.split(":"); int nums = Integer.parseInt(item[1]); for (int i = 0; i < nums; i++) { MessageQueue mq = new MessageQueue(topic, item[0], i); info.getMessageQueueList().add(mq); } } info.setOrderTopic(true); } else if (route.getOrderTopicConf() == null && route.getTopicQueueMappingByBroker() != null && !route.getTopicQueueMappingByBroker().isEmpty()) { info.setOrderTopic(false); ConcurrentMap<MessageQueue, String> mqEndPoints = topicRouteData2EndpointsForStaticTopic(topic, route); info.getMessageQueueList().addAll(mqEndPoints.keySet()); info.getMessageQueueList().sort((mq1, mq2) -> MixAll.compareInteger(mq1.getQueueId(), mq2.getQueueId())); } else { List<QueueData> qds = route.getQueueDatas(); Collections.sort(qds); for (QueueData qd : qds) { if (PermName.isWriteable(qd.getPerm())) { BrokerData brokerData = null; for (BrokerData bd : route.getBrokerDatas()) { if (bd.getBrokerName().equals(qd.getBrokerName())) { brokerData = bd; break; } } if (null == brokerData) { continue; } if (!brokerData.getBrokerAddrs().containsKey(MixAll.MASTER_ID)) { continue; } for (int i = 0; i < qd.getWriteQueueNums(); i++) { MessageQueue mq = new MessageQueue(topic, qd.getBrokerName(), i); info.getMessageQueueList().add(mq); } } } info.setOrderTopic(false); } return info; }
@Test public void testTopicRouteData2TopicPublishInfoWithOrderTopicConf() { TopicRouteData topicRouteData = createTopicRouteData(); when(topicRouteData.getOrderTopicConf()).thenReturn("127.0.0.1:4"); TopicPublishInfo actual = MQClientInstance.topicRouteData2TopicPublishInfo(topic, topicRouteData); assertFalse(actual.isHaveTopicRouterInfo()); assertEquals(4, actual.getMessageQueueList().size()); }
public int depth() { String path = uri.getPath(); int depth = 0; int slash = path.length() == 1 && path.charAt(0) == '/' ? -1 : 0; while (slash != -1) { depth++; slash = path.indexOf(SEPARATOR, slash + 1); } return depth; }
@Test void testDepth() { Path p = new Path("/my/path"); assertThat(p.depth()).isEqualTo(2); p = new Path("/my/fancy/path/"); assertThat(p.depth()).isEqualTo(3); p = new Path("/my/fancy/fancy/fancy/fancy/fancy/fancy/fancy/fancy/fancy/fancy/path"); assertThat(p.depth()).isEqualTo(12); p = new Path("/"); assertThat(p.depth()).isZero(); p = new Path("C:/my/windows/path"); assertThat(p.depth()).isEqualTo(4); }
protected String[] getRunCommand(String command, String groupId, String userName, Path pidFile, Configuration config) { return getRunCommand(command, groupId, userName, pidFile, config, null); }
@Test (timeout = 5000) public void testRunCommandWithNoResources() { assumeWindows(); Configuration conf = new Configuration(); String[] command = containerExecutor.getRunCommand("echo", "group1", null, null, conf, Resource.newInstance(1024, 1)); // Assert the cpu and memory limits are set correctly in the command String[] expected = { Shell.WINUTILS, "task", "create", "-m", "-1", "-c", "-1", "group1", "cmd /c " + "echo" }; Assert.assertTrue(Arrays.equals(expected, command)); }
public IterationResult<T> iterate(UUID cursorId, int maxCount) { UUID iteratorId = cursorToIteratorId.get(cursorId); if (iteratorId == null) { throw new IllegalStateException("There is no iteration with cursor id " + cursorId + " on member " + this.nodeEngine.getThisAddress() + "."); } IteratorWithCursor<T> paginator = iterators.get(iteratorId); if (paginator == null) { throw new IllegalStateException("There is no iteration with cursor id " + cursorId + " on member " + this.nodeEngine.getThisAddress() + "."); } IterationResult<T> result = paginator.iterate(cursorId, maxCount); if (result.getCursorIdToForget() != null) { // Remove the previous cursor id. cursorToIteratorId.remove(result.getCursorIdToForget()); } // Put the new cursor id. cursorToIteratorId.put(result.getCursorId(), iteratorId); return result; }
@Test public void testIteration() throws InterruptedException { ExecutorService executor = newFixedThreadPool(THREAD_COUNT); CountDownLatch latch = new CountDownLatch(THREAD_COUNT); for (int i = 0; i < THREAD_COUNT; i++) { final int j = i; executor.execute(() -> { iterate(j); latch.countDown(); }); } latch.await(); executor.shutdown(); }
@Override public void register(final String key, final String value) { try { Instance instance = buildInstanceFromUpstream(key, value); namingService.registerInstance(key, groupName, instance); LOGGER.info("Registering service with key: {} and value: {}", key, value); } catch (NacosException nacosException) { LOGGER.error("Error registering Nacos service instance: {}", nacosException.getMessage(), nacosException); throw new ShenyuException(nacosException); } }
@Test void testRegister() throws NacosException { final String key = "test"; final String value = "{\"weight\":20,\"url\":\"127.0.0.1:8080\"}"; doNothing().when(namingService).registerInstance(anyString(), anyString(), any(Instance.class)); nacosDiscoveryServiceUnderTest.register(key, value); // Verify whether the method is called correctly verify(namingService).registerInstance(anyString(), anyString(), any(Instance.class)); // Mock the wrong json format assertThrows(ShenyuException.class, () -> nacosDiscoveryServiceUnderTest.register(key, "test")); // Mock the throwing of registerInstance exception doThrow(new NacosException()).when(namingService).registerInstance(anyString(), anyString(), any(Instance.class)); assertThrows(ShenyuException.class, () -> nacosDiscoveryServiceUnderTest.register(key, value)); }
public Predicate<InMemoryFilterable> parse(final List<String> filterExpressions, final List<EntityAttribute> attributes) { if (filterExpressions == null || filterExpressions.isEmpty()) { return Predicates.alwaysTrue(); } final Map<String, List<Filter>> groupedByField = filterExpressions.stream() .map(expr -> singleFilterParser.parseSingleExpression(expr, attributes)) .collect(groupingBy(Filter::field)); return groupedByField.values().stream() .map(grouped -> grouped.stream() .map(Filter::toPredicate) .collect(Collectors.toList())) .map(groupedPredicates -> groupedPredicates.stream().reduce(Predicate::or).orElse(Predicates.alwaysTrue())) .reduce(Predicate::and).orElse(Predicates.alwaysTrue()); }
@Test void throwsExceptionOnFieldThatIsNotFilterable() { assertThrows(IllegalArgumentException.class, () -> toTest.parse(List.of("owner:juan"), List.of(EntityAttribute.builder() .id("owner") .title("Owner") .filterable(false) .build()) )); }
public static String getVersion(Class<?> clazz) { String version = clazz.getPackage().getImplementationVersion(); if (version != null) return version; return getManifestAttributeValue(clazz, "Bundle-Version"); }
@Test void gsonVersion() { assertThat(JarUtils.getVersion(Gson.class)).isEqualTo("2.11.0"); }
public static void trimRecordTemplate(RecordTemplate recordTemplate, MaskTree override, final boolean failOnMismatch) { trimRecordTemplate(recordTemplate.data(), recordTemplate.schema(), override, failOnMismatch); }
@Test public void testTrimmerWithPrimitivesRecordsUnionsMix() throws CloneNotSupportedException { TyperefTest recordTemplate = new TyperefTest(); recordTemplate.setBoolean(true); RecordBar foo = new RecordBar(); foo.setLocation("foo"); recordTemplate.setBar1(foo); TyperefTest.Union5 union = new TyperefTest.Union5(); union.setIntRef(5); recordTemplate.setUnion5(union); RecordTemplate expected = recordTemplate.copy(); // Introduce bad elements recordTemplate.getBar1().data().put("troublemaker", "foo"); ((DataMap)recordTemplate.getUnion5().data()).put("troublemaker", "foo"); recordTemplate.data().put("foo", "bar"); DataList list = new DataList(); list.add(1); DataMap map = new DataMap(); map.put("foo", 666); recordTemplate.data().put("keyFoo", list); recordTemplate.data().put("keyBar", map); // Pre filtering Assert.assertEquals(recordTemplate.data().size(), 6); Assert.assertEquals(recordTemplate.getBar1().data().size(), 2); RestUtils.trimRecordTemplate(recordTemplate, false); // Post filtering Assert.assertEquals(recordTemplate, expected); }
public static String resolveUrl(String genericUrl, MessageParameters parameters) { Preconditions.checkState( parameters.isResolved(), "Not all mandatory message parameters were resolved."); StringBuilder path = new StringBuilder(genericUrl); StringBuilder queryParameters = new StringBuilder(); for (MessageParameter<?> pathParameter : parameters.getPathParameters()) { if (pathParameter.isResolved()) { int start = path.indexOf(':' + pathParameter.getKey()); final String pathValue = Preconditions.checkNotNull(pathParameter.getValueAsString()); // only replace path parameters if they are present if (start != -1) { path.replace(start, start + pathParameter.getKey().length() + 1, pathValue); } } } boolean isFirstQueryParameter = true; for (MessageQueryParameter<?> queryParameter : parameters.getQueryParameters()) { if (queryParameter.isResolved()) { if (isFirstQueryParameter) { queryParameters.append('?'); isFirstQueryParameter = false; } else { queryParameters.append('&'); } queryParameters.append(queryParameter.getKey()); queryParameters.append('='); queryParameters.append(queryParameter.getValueAsString()); } } path.append(queryParameters); return path.toString(); }
@Test void testUnresolvedParameters() { String genericUrl = "/jobs/:jobid/state"; TestMessageParameters parameters = new TestMessageParameters(); assertThatThrownBy(() -> MessageParameters.resolveUrl(genericUrl, parameters)) .isInstanceOf(IllegalStateException.class); JobID jobID = new JobID(); parameters.pathParameter.resolve(jobID); String resolvedUrl = MessageParameters.resolveUrl(genericUrl, parameters); assertThat(resolvedUrl).isEqualTo("/jobs/" + jobID + "/state"); }
public void isNull() { standardIsEqualTo(null); }
@Test public void isNull() { Object o = null; assertThat(o).isNull(); }
@Override public long getMin() { if (values.length == 0) { return 0; } return values[0]; }
@Test public void calculatesTheMinimumValue() throws Exception { assertThat(snapshot.getMin()) .isEqualTo(1); }
@Override public void destroy() { map.destroy(); }
@Test public void testDestroy() { map.put(23, "foobar"); adapter.destroy(); assertTrue(map.isEmpty()); }
protected String buildLockKey(TableRecords rowsIncludingPK) { if (rowsIncludingPK.size() == 0) { return null; } StringBuilder sb = new StringBuilder(); sb.append(rowsIncludingPK.getTableMeta().getTableName()); sb.append(":"); int rowSequence = 0; List<Map<String, Field>> pksRows = rowsIncludingPK.pkRows(); List<String> primaryKeysOnlyName = rowsIncludingPK.getTableMeta().getPrimaryKeyOnlyName(); for (Map<String, Field> rowMap : pksRows) { int pkSplitIndex = 0; for (String pkName : primaryKeysOnlyName) { if (pkSplitIndex > 0) { sb.append("_"); } Object pkVal = rowMap.get(pkName).getValue(); validPk(String.valueOf(pkVal)); sb.append(pkVal); pkSplitIndex++; } rowSequence++; if (rowSequence < pksRows.size()) { sb.append(","); } } return sb.toString(); }
@Test public void testBuildLockKey() { //build expect data String tableName = "test_name"; String fieldOne = "1"; String fieldTwo = "2"; String split1 = ":"; String split2 = ","; String pkColumnName="id"; //test_name:1,2 String buildLockKeyExpect = tableName + split1 + fieldOne + split2 + fieldTwo; // mock field Field field1 = mock(Field.class); when(field1.getValue()).thenReturn(fieldOne); Field field2 = mock(Field.class); when(field2.getValue()).thenReturn(fieldTwo); List<Map<String,Field>> pkRows =new ArrayList<>(); pkRows.add(Collections.singletonMap(pkColumnName, field1)); pkRows.add(Collections.singletonMap(pkColumnName, field2)); // mock tableMeta TableMeta tableMeta = mock(TableMeta.class); when(tableMeta.getTableName()).thenReturn(tableName); when(tableMeta.getPrimaryKeyOnlyName()).thenReturn(Arrays.asList(new String[]{pkColumnName})); // mock tableRecords TableRecords tableRecords = mock(TableRecords.class); when(tableRecords.getTableMeta()).thenReturn(tableMeta); when(tableRecords.size()).thenReturn(pkRows.size()); when(tableRecords.pkRows()).thenReturn(pkRows); // mock executor BaseTransactionalExecutor executor = mock(BaseTransactionalExecutor.class); when(executor.buildLockKey(tableRecords)).thenCallRealMethod(); when(executor.getTableMeta()).thenReturn(tableMeta); assertThat(executor.buildLockKey(tableRecords)).isEqualTo(buildLockKeyExpect); }
static byte[] adaptArray(byte[] ftdiData) { int length = ftdiData.length; if(length > 64) { int n = 1; int p = 64; // Precalculate length without FTDI headers while(p < length) { n++; p = n*64; } int realLength = length - n*2; byte[] data = new byte[realLength]; copyData(ftdiData, data); return data; } else if (length == 2) // special case optimization that returns the same instance. { return EMPTY_BYTE_ARRAY; } else { return Arrays.copyOfRange(ftdiData, 2, length); } }
@Test public void adaptEmptyByteArray() { byte[] onlyHeaders = {1, 2}; byte[] adapted = FTDISerialDevice.adaptArray(onlyHeaders); Assert.assertEquals("Should be empty", 0, adapted.length); byte[] adaptAgain = FTDISerialDevice.adaptArray(onlyHeaders); Assert.assertSame("Should be the same instance of empty array", adapted, adaptAgain); }
public void bind(String name, Object bean) { registry.bind(name, bean); }
@Test public void testDisableHangupSupport() throws Exception { // lets make a simple route Main main = new Main(); DefaultMainShutdownStrategy shutdownStrategy = new DefaultMainShutdownStrategy(main); shutdownStrategy.disableHangupSupport(); main.setShutdownStrategy(shutdownStrategy); main.configure().addRoutesBuilder(new MyRouteBuilder()); main.enableTrace(); main.bind("foo", 31); main.start(); CamelContext camelContext = main.getCamelContext(); assertEquals(31, camelContext.getRegistry().lookupByName("foo"), "Could not find the registry bound object"); MockEndpoint endpoint = camelContext.getEndpoint("mock:results", MockEndpoint.class); endpoint.expectedMinimumMessageCount(1); main.getCamelTemplate().sendBody("direct:start", "<message>1</message>"); endpoint.assertIsSatisfied(); main.stop(); }
private static Forest createForest() { Plant grass = new Plant("Grass", "Herb"); Plant oak = new Plant("Oak", "Tree"); Animal zebra = new Animal("Zebra", Set.of(grass), Collections.emptySet()); Animal buffalo = new Animal("Buffalo", Set.of(grass), Collections.emptySet()); Animal lion = new Animal("Lion", Collections.emptySet(), Set.of(zebra, buffalo)); return new Forest("Amazon", Set.of(lion, buffalo, zebra), Set.of(grass, oak)); }
@Test void clobSerializerTest() { Forest forest = createForest(); try (LobSerializer serializer = new ClobSerializer()) { Object serialized = serializer.serialize(forest); int id = serializer.persistToDb(1, forest.getName(), serialized); Object fromDb = serializer.loadFromDb(id, Forest.class.getSimpleName()); Forest forestFromDb = serializer.deSerialize(fromDb); Assertions.assertEquals(forest.hashCode(), forestFromDb.hashCode(), "Hashes of objects after Serializing and Deserializing are the same"); } catch (SQLException | IOException | TransformerException | ParserConfigurationException | SAXException | ClassNotFoundException e) { throw new RuntimeException(e); } }
public Optional<Long> getTokenTimeout( final Optional<String> token, final KsqlConfig ksqlConfig, final Optional<KsqlAuthTokenProvider> authTokenProvider ) { final long maxTimeout = ksqlConfig.getLong(KsqlConfig.KSQL_WEBSOCKET_CONNECTION_MAX_TIMEOUT_MS); if (maxTimeout > 0) { if (authTokenProvider.isPresent() && token.isPresent()) { try { final long tokenTimeout = authTokenProvider.get() .getLifetimeMs(StringUtils.removeStart(token.get(), BEARER)) - clock.millis(); return Optional.of(Math.min(tokenTimeout, maxTimeout)); } catch (final Exception e) { log.error(e.getMessage()); } } return Optional.of(maxTimeout); } else { return Optional.empty(); } }
@Test public void shouldReturnTokenExpiryTime() { assertThat(authenticationUtil.getTokenTimeout(Optional.of(TOKEN), ksqlConfig, Optional.of(authTokenProvider)), equalTo(Optional.of(50000L))); }
public static KTableHolder<GenericKey> build( final KGroupedTableHolder groupedTable, final TableAggregate aggregate, final RuntimeBuildContext buildContext, final MaterializedFactory materializedFactory) { return build( groupedTable, aggregate, buildContext, materializedFactory, new AggregateParamsFactory() ); }
@Test public void shouldReturnCorrectSchema() { // When: final KTableHolder<GenericKey> result = aggregate.build(planBuilder, planInfo); // Then: assertThat(result.getSchema(), is(AGGREGATE_SCHEMA)); }
@ApiOperation(value = "Get a single deadletter job", tags = { "Jobs" }) @ApiResponses(value = { @ApiResponse(code = 200, message = "Indicates the suspended job exists and is returned."), @ApiResponse(code = 404, message = "Indicates the requested job does not exist.") }) @GetMapping(value = "/management/deadletter-jobs/{jobId}", produces = "application/json") public JobResponse getDeadletterJob(@ApiParam(name = "jobId") @PathVariable String jobId) { Job job = getDeadLetterJobById(jobId); return restResponseFactory.createDeadLetterJobResponse(job); }
@Test @Deployment(resources = { "org/flowable/rest/service/api/management/JobResourceTest.testTimerProcess.bpmn20.xml" }) public void testGetDeadLetterJob() throws Exception { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("timerProcess"); Job timerJob = managementService.createTimerJobQuery().processInstanceId(processInstance.getId()).singleResult(); assertThat(timerJob).isNotNull(); Job deadLetterJob = managementService.createDeadLetterJobQuery().processInstanceId(processInstance.getId()).singleResult(); assertThat(deadLetterJob).isNull(); managementService.moveJobToDeadLetterJob(timerJob.getId()); timerJob = managementService.createTimerJobQuery().processInstanceId(processInstance.getId()).singleResult(); assertThat(timerJob).isNull(); deadLetterJob = managementService.createDeadLetterJobQuery().processInstanceId(processInstance.getId()).singleResult(); assertThat(deadLetterJob).isNotNull(); CloseableHttpResponse response = executeRequest( new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB, deadLetterJob.getId())), HttpStatus.SC_OK); JsonNode responseNode = objectMapper.readTree(response.getEntity().getContent()); closeResponse(response); assertThat(responseNode).isNotNull(); assertThatJson(responseNode) .when(Option.IGNORING_EXTRA_FIELDS) .isEqualTo("{" + "id: '" + deadLetterJob.getId() + "'," + "correlationId: '" + deadLetterJob.getCorrelationId() + "'," + "exceptionMessage: " + deadLetterJob.getExceptionMessage() + "," + "executionId: '" + deadLetterJob.getExecutionId() + "'," + "processDefinitionId: '" + deadLetterJob.getProcessDefinitionId() + "'," + "processInstanceId: '" + deadLetterJob.getProcessInstanceId() + "'," + "elementId: 'escalationTimer'," + "elementName: 'Escalation'," + "handlerType: 'trigger-timer'," + "retries: " + deadLetterJob.getRetries() + "," + "dueDate: " + new TextNode(getISODateStringWithTZ(deadLetterJob.getDuedate())) + "," + "tenantId: ''" + "}"); assertThat(responseNode.path("url").asText(null)) .endsWith(RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB, deadLetterJob.getId())); // Set tenant on deployment managementService.executeCommand(new ChangeDeploymentTenantIdCmd(deploymentId, "myTenant")); response = executeRequest(new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_DEADLETTER_JOB, deadLetterJob.getId())), HttpStatus.SC_OK); responseNode = objectMapper.readTree(response.getEntity().getContent()); closeResponse(response); assertThat(responseNode).isNotNull(); assertThatJson(responseNode) .when(Option.IGNORING_EXTRA_FIELDS) .isEqualTo("{" + "tenantId: 'myTenant'" + "}"); }
public static void writeString(final @NotNull ByteBuf buf, final @NotNull CharSequence string) { writeString(buf, string, Short.MAX_VALUE); }
@Test void testWriteStringTooLong() { assertThrows(EncoderException.class, () -> BufUtil.writeString(this.buf, "aa", 1)); }
public MapTileArea set(final int pZoom, final int pLeft, final int pTop, final int pRight, final int pBottom) { mZoom = pZoom; mMapTileUpperBound = 1 << mZoom; mWidth = computeSize(pLeft, pRight); mHeight = computeSize(pTop, pBottom); mLeft = cleanValue(pLeft); mTop = cleanValue(pTop); return this; }
@Test public void testSetAll() { final MapTileArea area = new MapTileArea(); int zoom; zoom = 0; area.set(zoom, -10, -100, 50, 90); checkAll(zoom, area); zoom = 1; area.set(zoom, -10, -100, 50, 90); checkAll(zoom, area); zoom = 2; area.set(zoom, -10, -100, 50, 90); checkAll(zoom, area); }
protected Locale detectLocale(HttpHeaders headers) { final List<Locale> languages; try { languages = headers.getAcceptableLanguages(); } catch (HeaderValueException e) { throw new WebApplicationException(e.getMessage(), Response.Status.BAD_REQUEST); } for (Locale locale : languages) { if (!locale.toString().contains("*")) { // Freemarker doesn't do wildcards well return locale; } } return Locale.getDefault(); }
@Test void detectLocaleShouldReturnDefaultLocaleWhenHeaderNotSpecified() { // We call the real methods to make sure that 'getAcceptableLanguages' returns a locale with a wildcard // (which is their default value). This also validates that 'detectLocale' skips wildcard languages. when(headers.getAcceptableLanguages()).thenCallRealMethod(); when(headers.getQualifiedAcceptableLanguages()).thenCallRealMethod(); when(headers.getHeaderString(HttpHeaders.ACCEPT_LANGUAGE)).thenReturn(null); final ViewMessageBodyWriter writer = new ViewMessageBodyWriter(metricRegistry, Collections.emptyList()); final Locale result = writer.detectLocale(headers); assertThat(result).isSameAs(Locale.getDefault()); }
public V getValue() { if (value == null && serializationService != null) { value = serializationService.toObject(valueData); } return value; }
@Test public void testGetValue_withDataValue() { assertEquals("value", dataEvent.getValue()); }
@Override protected void parse(final ProtocolFactory protocols, final Local file) throws AccessDeniedException { try (final BufferedReader in = new BufferedReader(new InputStreamReader(file.getInputStream(), StandardCharsets.UTF_8))) { Host current = null; String line; while((line = in.readLine()) != null) { if(line.startsWith("[")) { if(current != null) { this.add(current); } current = new Host(protocols.forScheme(Scheme.ftp)); current.getCredentials().setUsername( PreferencesFactory.get().getProperty("connection.login.anon.name")); Pattern pattern = Pattern.compile("\\[(.*)\\]"); Matcher matcher = pattern.matcher(line); if(matcher.matches()) { current.setNickname(matcher.group(1)); } } else { if(null == current) { log.warn("Failed to detect start of bookmark"); continue; } final Scanner scanner = new Scanner(line); scanner.useDelimiter("="); if(!scanner.hasNext()) { log.warn("Missing key in line:" + line); continue; } final String name = scanner.next().toLowerCase(Locale.ROOT); if(!scanner.hasNext()) { log.warn("Missing value in line:" + line); continue; } final String value = scanner.next(); switch(name) { case "host": current.setHostname(value); break; case "directory": current.setDefaultPath(value); break; case "username": current.getCredentials().setUsername(value); break; default: log.warn(String.format("Ignore property %s", name)); break; } } } if(current != null) { this.add(current); } } catch(IOException e) { throw new AccessDeniedException(e.getMessage(), e); } }
@Test public void testParse() throws Exception { TotalCommanderBookmarkCollection c = new TotalCommanderBookmarkCollection(); assertEquals(0, c.size()); c.parse(new ProtocolFactory(new HashSet<>(Arrays.asList(new TestProtocol(Scheme.ftp), new TestProtocol(Scheme.ftps), new TestProtocol(Scheme.sftp)))), new Local("src/test/resources/wcx_ftp.ini")); assertEquals(2, c.size()); assertEquals("sudo.ch", c.get(0).getHostname()); assertEquals("fo|cyberduck.io session bookmark", c.get(1).getNickname()); assertEquals("cyberduck.io", c.get(1).getHostname()); assertEquals("/remote", c.get(1).getDefaultPath()); }
public StringBuilder bytesToPrintFriendlyStringBuilder(byte[] phiBytes) { return bytesToPrintFriendlyStringBuilder(phiBytes, 0, phiBytes != null ? phiBytes.length : -1); }
@Test public void testBytesToPrintFriendlyStringBuilder() { assertEquals(hl7util.NULL_REPLACEMENT_VALUE, hl7util.bytesToPrintFriendlyStringBuilder((byte[]) null).toString()); assertEquals(hl7util.EMPTY_REPLACEMENT_VALUE, hl7util.bytesToPrintFriendlyStringBuilder(new byte[0]).toString()); assertEquals(EXPECTED_MESSAGE, hl7util.bytesToPrintFriendlyStringBuilder(TEST_MESSAGE_BYTES).toString()); }
@Override public Cursor<Tuple> zScan(byte[] key, ScanOptions options) { return new KeyBoundCursor<Tuple>(key, 0, options) { private RedisClient client; @Override protected ScanIteration<Tuple> doScan(byte[] key, long cursorId, ScanOptions options) { if (isQueueing() || isPipelined()) { throw new UnsupportedOperationException("'ZSCAN' cannot be called in pipeline / transaction mode."); } List<Object> args = new ArrayList<Object>(); args.add(key); args.add(Long.toUnsignedString(cursorId)); if (options.getPattern() != null) { args.add("MATCH"); args.add(options.getPattern()); } if (options.getCount() != null) { args.add("COUNT"); args.add(options.getCount()); } RFuture<ListScanResult<Tuple>> f = executorService.readAsync(client, key, ByteArrayCodec.INSTANCE, ZSCAN, args.toArray()); ListScanResult<Tuple> res = syncFuture(f); client = res.getRedisClient(); return new ScanIteration<Tuple>(Long.parseUnsignedLong(res.getPos()), res.getValues()); } }.open(); }
@Test public void testZScan() { connection.zAdd("key".getBytes(), 1, "value1".getBytes()); connection.zAdd("key".getBytes(), 2, "value2".getBytes()); Cursor<Tuple> t = connection.zScan("key".getBytes(), ScanOptions.scanOptions().build()); assertThat(t.hasNext()).isTrue(); assertThat(t.next().getValue()).isEqualTo("value1".getBytes()); assertThat(t.hasNext()).isTrue(); assertThat(t.next().getValue()).isEqualTo("value2".getBytes()); }
@Override public void isEqualTo(@Nullable Object expected) { super.isEqualTo(expected); }
@Test public void isEqualTo_WithoutToleranceParameter_Fail_Shorter() { expectFailureWhenTestingThat(array(2.2d, 3.3d)).isEqualTo(array(2.2d)); }
@SuppressWarnings({ "nullness" // TODO(https://github.com/apache/beam/issues/20497) }) public static TableReference parseTableSpec(String tableSpec) { Matcher match = BigQueryIO.TABLE_SPEC.matcher(tableSpec); if (!match.matches()) { throw new IllegalArgumentException( String.format( "Table specification [%s] is not in one of the expected formats (" + " [project_id]:[dataset_id].[table_id]," + " [project_id].[dataset_id].[table_id]," + " [dataset_id].[table_id])", tableSpec)); } TableReference ref = new TableReference(); ref.setProjectId(match.group("PROJECT")); return ref.setDatasetId(match.group("DATASET")).setTableId(match.group("TABLE")); }
@Test public void testTableParsingError_2() { thrown.expect(IllegalArgumentException.class); BigQueryHelpers.parseTableSpec("myproject:.bar"); }
@Override public double mean() { return lambda; }
@Test public void testMean() { System.out.println("mean"); PoissonDistribution instance = new PoissonDistribution(3.5); instance.rand(); assertEquals(3.5, instance.mean(), 1E-7); }
public static Set<Result> anaylze(String log) { Set<Result> results = new HashSet<>(); for (Rule rule : Rule.values()) { Matcher matcher = rule.pattern.matcher(log); if (matcher.find()) { results.add(new Result(rule, log, matcher)); } } return results; }
@Test public void splashScreen() throws IOException { CrashReportAnalyzer.Result result = findResultByRule( CrashReportAnalyzer.anaylze(loadLog("/crash-report/splashscreen.txt")), CrashReportAnalyzer.Rule.GRAPHICS_DRIVER); }