focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public void accept(UnitVisitor visitor) { Arrays.stream(children).forEach(child -> child.accept(visitor)); }
@Test void testAccept() { final var children = new Unit[5]; Arrays.setAll(children, (i) -> mock(Unit.class)); final var unit = this.factory.apply(children); final var visitor = mock(UnitVisitor.class); unit.accept(visitor); verifyVisit(unit, visitor); Arrays.stream(children).forEach(child -> verify(child).accept(eq(visitor))); verifyNoMoreInteractions(children); verifyNoMoreInteractions(visitor); }
public static <T> int linearSearch(List<? extends T> list, Comparator<T> comparator, T key, int initialGuess) { int guess = initialGuess; if (guess >= list.size()) { guess = list.size()-1; } int comparison = comparator.compare(list.get(guess), key); if (comparison == 0) { return guess; } if (comparison < 0) { guess++; while (guess < list.size()) { comparison = comparator.compare(list.get(guess), key); if (comparison == 0) { return guess; } if (comparison > 0) { return -(guess+1); } guess++; } return -(list.size()+1); } else { guess--; while (guess >= 0) { comparison = comparator.compare(list.get(guess), key); if (comparison == 0) { return guess; } if (comparison < 0) { return -(guess+2); } guess--; } return -1; } }
@Test public void testLinearSearch() { List<Integer> list = Lists.newArrayList(0, 1, 3, 4); doTest(list, 5, 10); doTest(list, 5, 4); doTest(list, 5, 3); doTest(list, 5, 2); doTest(list, 5, 1); doTest(list, 5, 0); doTest(list, 4, 10); doTest(list, 4, 4); doTest(list, 4, 3); doTest(list, 4, 2); doTest(list, 4, 1); doTest(list, 4, 0); doTest(list, 3, 10); doTest(list, 3, 4); doTest(list, 3, 3); doTest(list, 3, 2); doTest(list, 3, 1); doTest(list, 3, 0); doTest(list, 2, 10); doTest(list, 2, 4); doTest(list, 2, 3); doTest(list, 2, 2); doTest(list, 2, 1); doTest(list, 2, 0); doTest(list, 1, 10); doTest(list, 1, 4); doTest(list, 1, 3); doTest(list, 1, 2); doTest(list, 1, 1); doTest(list, 1, 0); doTest(list, 0, 10); doTest(list, 0, 4); doTest(list, 0, 3); doTest(list, 0, 2); doTest(list, 0, 1); doTest(list, 0, 0); doTest(list, -1, 10); doTest(list, -1, 4); doTest(list, -1, 3); doTest(list, -1, 2); doTest(list, -1, 1); doTest(list, -1, 0); }
@Override public Collection<TaskAwarePlugin> getTaskAwarePluginList() { return mainLock.applyWithReadLock(taskAwarePluginList::getPlugins); }
@Test public void testGetTaskAwarePluginList() { manager.register(new TestTaskAwarePlugin()); Assert.assertEquals(1, manager.getTaskAwarePluginList().size()); }
@Override public DdlCommand create( final String sqlExpression, final DdlStatement ddlStatement, final SessionConfig config ) { return FACTORIES .getOrDefault(ddlStatement.getClass(), (statement, cf, ci) -> { throw new KsqlException( "Unable to find ddl command factory for statement:" + statement.getClass() + " valid statements:" + FACTORIES.keySet() ); }) .handle( this, new CallInfo(sqlExpression, config), ddlStatement); }
@Test public void shouldCreateCommandForCreateTable() { // Given: final CreateTable statement = new CreateTable(SOME_NAME, TableElements.of( tableElement("COL1", new Type(SqlTypes.BIGINT)), tableElement("COL2", new Type(SqlTypes.STRING))), false, true, withProperties, false); // When: final DdlCommand result = commandFactories .create(sqlExpression, statement, SessionConfig.of(ksqlConfig, emptyMap())); // Then: assertThat(result, is(createTableCommand)); verify(createSourceFactory).createTableCommand(statement, ksqlConfig); }
@Override public void init() { try { pojo = clazz.getDeclaredConstructor().newInstance(); } catch (Exception e) { throw QueryException.error("Unable to instantiate class \"" + clazz.getName() + "\" : " + e.getMessage(), e); } }
@Test public void when_filterPassed_then_init_succcess() { var filter = mock(ReflectionClassNameFilter.class); UpsertTarget target = new PojoUpsertTarget( Object.class.getName(), ImmutableMap.of("field", int.class.getName()), filter ); target.init(); verify(filter).filter(eq(Object.class.getName())); }
public static HttpClient newConnection() { return new HttpClientConnect(new HttpConnectionProvider(ConnectionProvider.newConnection())); }
@Test public void testSharedNameResolver_SharedClientNoConnectionPool() throws InterruptedException { doTestSharedNameResolver(HttpClient.newConnection(), true); }
public void add(String name, String value) { add(new EnvironmentVariable(name, value, false)); }
@Test void add_shouldAddEnvironmentVariable() { final EnvironmentVariables environmentVariables = new EnvironmentVariables(); assertThat(environmentVariables).hasSize(0); environmentVariables.add("foo", "bar"); assertThat(environmentVariables).hasSize(1); assertThat(environmentVariables).containsExactly(new EnvironmentVariable("foo", "bar")); }
Topology build(List<Call.CallDetail> serviceRelationClientCalls, List<Call.CallDetail> serviceRelationServerCalls) { Map<String, Node> nodes = new HashMap<>(); List<Call> calls = new LinkedList<>(); HashMap<String, Call> callMap = new HashMap<>(); for (Call.CallDetail clientCall : serviceRelationClientCalls) { final IDManager.ServiceID.ServiceIDDefinition sourceService = IDManager.ServiceID.analysisId( clientCall.getSource()); String sourceServiceId = clientCall.getSource(); IDManager.ServiceID.ServiceIDDefinition destService = IDManager.ServiceID.analysisId( clientCall.getTarget()); String targetServiceId = clientCall.getTarget(); /* * Use the alias name to make topology relationship accurate. */ if (networkAddressAliasCache.get(destService.getName()) != null) { /* * If alias exists, mean this network address is representing a real service. */ final NetworkAddressAlias networkAddressAlias = networkAddressAliasCache.get(destService.getName()); destService = IDManager.ServiceID.analysisId( networkAddressAlias.getRepresentServiceId()); targetServiceId = IDManager.ServiceID.buildId(destService.getName(), true); } /* * Set the conjectural node type. */ if (!nodes.containsKey(targetServiceId)) { final Node conjecturalNode = buildNode(targetServiceId, destService); nodes.put(targetServiceId, conjecturalNode); if (!conjecturalNode.isReal() && StringUtil.isEmpty(conjecturalNode.getType())) { conjecturalNode.setType( componentLibraryCatalogService.getServerNameBasedOnComponent(clientCall.getComponentId())); } } if (!nodes.containsKey(sourceServiceId)) { nodes.put(sourceServiceId, buildNode(sourceServiceId, sourceService)); } final String relationId = IDManager.ServiceID.buildRelationId( new IDManager.ServiceID.ServiceRelationDefine(sourceServiceId, targetServiceId)); if (!callMap.containsKey(relationId)) { Call call = new Call(); callMap.put(relationId, call); call.setSource(sourceServiceId); call.setTarget(targetServiceId); call.setId(relationId); call.addDetectPoint(DetectPoint.CLIENT); call.addSourceComponent(componentLibraryCatalogService.getComponentName(clientCall.getComponentId())); calls.add(call); } else { Call call = callMap.get(relationId); call.addSourceComponent(componentLibraryCatalogService.getComponentName(clientCall.getComponentId())); } } for (Call.CallDetail serverCall : serviceRelationServerCalls) { final IDManager.ServiceID.ServiceIDDefinition sourceService = IDManager.ServiceID.analysisId( serverCall.getSource()); IDManager.ServiceID.ServiceIDDefinition destService = IDManager.ServiceID.analysisId( serverCall.getTarget()); /* * Create the client node if it hasn't been created in client side call. */ Node clientSideNode = nodes.get(serverCall.getSource()); if (clientSideNode == null) { clientSideNode = buildNode(serverCall.getSource(), sourceService); nodes.put(serverCall.getSource(), clientSideNode); } /* * conjectural node type. */ if (!clientSideNode.isReal()) { clientSideNode.setType( componentLibraryCatalogService.getServerNameBasedOnComponent(serverCall.getComponentId())); } /* * Format the User name type. */ if (userID.equals(serverCall.getSource())) { nodes.get(userID).setType(Const.USER_SERVICE_NAME.toUpperCase()); } /* * Create the server node if it hasn't been created. */ if (!nodes.containsKey(serverCall.getTarget())) { final Node node = buildNode(serverCall.getTarget(), destService); nodes.put(serverCall.getTarget(), node); } /* * Set the node type due to service side component id has higher priority */ final Node serverSideNode = nodes.get(serverCall.getTarget()); final String nodeType = serverSideNode.getType(); if (nodeType == null || !serverSideNode.hasSetOnceAtServerSide()) { serverSideNode.setTypeFromServerSide( componentLibraryCatalogService.getComponentName(serverCall.getComponentId())); } else { final Integer componentId = componentLibraryCatalogService.getComponentId(nodeType); if (componentId != null) { if (componentLibraryCatalogService.compare(componentId, serverCall.getComponentId())) { serverSideNode.setTypeFromServerSide( componentLibraryCatalogService.getComponentName(serverCall.getComponentId())); } else { //Do nothing, as the current value has higher priority } } else { serverSideNode.setTypeFromServerSide( componentLibraryCatalogService.getComponentName(serverCall.getComponentId())); } } if (!callMap.containsKey(serverCall.getId())) { Call call = new Call(); callMap.put(serverCall.getId(), call); call.setSource(serverCall.getSource()); call.setTarget(serverCall.getTarget()); call.setId(serverCall.getId()); call.addDetectPoint(DetectPoint.SERVER); call.addTargetComponent(componentLibraryCatalogService.getComponentName(serverCall.getComponentId())); calls.add(call); } else { Call call = callMap.get(serverCall.getId()); call.addDetectPoint(DetectPoint.SERVER); call.addTargetComponent(componentLibraryCatalogService.getComponentName(serverCall.getComponentId())); } } Topology topology = new Topology(); topology.getCalls().addAll(calls); topology.getNodes().addAll(nodes.values()); return topology; }
@SneakyThrows @Test public void testServiceTopologyBuild() { Service svrA = getSvrA(); Service svrB = getSvrB(); final ServiceTopologyBuilder serviceTopologyBuilder = new ServiceTopologyBuilder(moduleManager); Whitebox.setInternalState(serviceTopologyBuilder, "metadataQueryService", metadataQueryService); when(metadataQueryService.getService(svrA.getId())).thenReturn(svrA); when(metadataQueryService.getService(svrB.getId())).thenReturn(svrB); List<Call.CallDetail> serviceRelationClientCalls = new ArrayList<>(); Call.CallDetail call1 = new Call.CallDetail(); call1.buildFromServiceRelation( IDManager.ServiceID.buildRelationId( new IDManager.ServiceID.ServiceRelationDefine( IDManager.ServiceID.buildId(svrA.getName(), true), IDManager.ServiceID.buildId(svrB.getName(), true) ) ), // mtls 142, DetectPoint.CLIENT ); serviceRelationClientCalls.add(call1); Call.CallDetail call2 = new Call.CallDetail(); call2.buildFromServiceRelation( IDManager.ServiceID.buildRelationId( new IDManager.ServiceID.ServiceRelationDefine( IDManager.ServiceID.buildId(svrA.getName(), true), IDManager.ServiceID.buildId(svrB.getName(), true) ) ), // http 49, DetectPoint.CLIENT ); serviceRelationClientCalls.add(call2); List<Call.CallDetail> serviceRelationServerCalls = new ArrayList<>(); Call.CallDetail call3 = new Call.CallDetail(); call3.buildFromServiceRelation( IDManager.ServiceID.buildRelationId( new IDManager.ServiceID.ServiceRelationDefine( IDManager.ServiceID.buildId(svrA.getName(), true), IDManager.ServiceID.buildId(svrB.getName(), true) ) ), // mtls 142, DetectPoint.SERVER ); serviceRelationServerCalls.add(call3); Call.CallDetail call4 = new Call.CallDetail(); call4.buildFromServiceRelation( IDManager.ServiceID.buildRelationId( new IDManager.ServiceID.ServiceRelationDefine( IDManager.ServiceID.buildId(svrA.getName(), true), IDManager.ServiceID.buildId(svrB.getName(), true) ) ), // http 49, DetectPoint.SERVER ); serviceRelationServerCalls.add(call4); final Topology topology = serviceTopologyBuilder.build(serviceRelationClientCalls, serviceRelationServerCalls); Assertions.assertEquals(2, topology.getNodes().size()); for (final Node node : topology.getNodes()) { if (node.getName().equals("SvrB")) { Assertions.assertEquals("http", node.getType()); Assertions.assertEquals(Set.of(Layer.MESH.name(), Layer.MESH_DP.name()), node.getLayers()); } else if (node.getName().equals("SvrA")) { Assertions.assertEquals(null, node.getType()); Assertions.assertEquals(Set.of(Layer.GENERAL.name()), node.getLayers()); } } for (final Call call : topology.getCalls()) { Assertions.assertEquals(2, call.getSourceComponents().size()); Assertions.assertEquals(List.of("mtls", "http"), call.getTargetComponents()); } }
public static String formatSql(final AstNode root) { final StringBuilder builder = new StringBuilder(); new Formatter(builder).process(root, 0); return StringUtils.stripEnd(builder.toString(), "\n"); }
@Test public void shouldEscapeReservedNameAndAlias() { // Given: final Statement statement = parseSingle("CREATE STREAM a AS SELECT `SELECT` FROM `TABLE`;"); // When: final String result = SqlFormatter.formatSql(statement); // Then: assertThat(result, is("CREATE STREAM A AS SELECT `SELECT`\nFROM `TABLE` `TABLE`\nEMIT CHANGES")); }
@Override public ObjectNode encode(Criterion criterion, CodecContext context) { EncodeCriterionCodecHelper encoder = new EncodeCriterionCodecHelper(criterion, context); return encoder.encode(); }
@Test public void matchEthSrcMaskTest() { Criterion criterion = Criteria.matchEthSrcMasked(mcastMac, mcastMacMask); ObjectNode result = criterionCodec.encode(criterion, context); assertThat(result, matchesCriterion(criterion)); }
@SuppressWarnings({"rawtypes", "unchecked"}) @Override public Object getValue(final int columnIndex, final Class<?> type) throws SQLException { Optional<ColumnProjection> columnProjection = selectStatementContext.getProjectionsContext().findColumnProjection(columnIndex); if (!columnProjection.isPresent()) { return mergedResult.getValue(columnIndex, type); } Optional<MaskTable> maskTable = maskRule.findMaskTable(columnProjection.get().getOriginalTable().getValue()); if (!maskTable.isPresent()) { return mergedResult.getValue(columnIndex, type); } Optional<MaskAlgorithm> maskAlgorithm = maskTable.get().findAlgorithm(columnProjection.get().getName().getValue()); if (!maskAlgorithm.isPresent()) { return mergedResult.getValue(columnIndex, type); } Object originalValue = mergedResult.getValue(columnIndex, Object.class); return null == originalValue ? null : maskAlgorithm.get().mask(originalValue); }
@Test void assertGetValue() throws SQLException { when(mergedResult.getValue(1, Object.class)).thenReturn("VALUE"); assertThat(new MaskMergedResult(mockMaskRule(), mockSelectStatementContext(), mergedResult).getValue(1, String.class), is("MASK_VALUE")); }
@InvokeOnHeader(Web3jConstants.DB_PUT_HEX) void dbPutHex(Message message) throws IOException { String databaseName = message.getHeader(Web3jConstants.DATABASE_NAME, configuration::getDatabaseName, String.class); String keyName = message.getHeader(Web3jConstants.KEY_NAME, configuration::getKeyName, String.class); Request<?, DbPutHex> request = web3j.dbPutHex(databaseName, keyName, message.getBody(String.class)); setRequestId(message, request); DbPutHex response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.valueStored()); } }
@Test public void dbPutHexTest() throws Exception { DbPutHex response = Mockito.mock(DbPutHex.class); Mockito.when(mockWeb3j.dbPutHex(any(), any(), any())).thenReturn(request); Mockito.when(request.send()).thenReturn(response); Mockito.when(response.valueStored()).thenReturn(Boolean.TRUE); Exchange exchange = createExchangeWithBodyAndHeader(null, OPERATION, Web3jConstants.DB_PUT_HEX); template.send(exchange); Boolean body = exchange.getIn().getBody(Boolean.class); assertTrue(body); }
public static TableElements parse(final String schema, final TypeRegistry typeRegistry) { return new SchemaParser(typeRegistry).parse(schema); }
@Test public void shouldParseValidSchema() { // Given: final String schema = "foo INTEGER, bar MAP<VARCHAR, VARCHAR>"; // When: final TableElements elements = parser.parse(schema); // Then: assertThat(elements, contains( new TableElement(FOO, new Type(SqlTypes.INTEGER)), new TableElement(BAR, new Type(SqlTypes.map(SqlTypes.STRING, SqlTypes.STRING ))) )); }
public void begin(InterpretationContext ec, String localName, Attributes attributes) { if ("substitutionProperty".equals(localName)) { addWarn("[substitutionProperty] element has been deprecated. Please use the [property] element instead."); } String name = attributes.getValue(NAME_ATTRIBUTE); String value = attributes.getValue(VALUE_ATTRIBUTE); String scopeStr = attributes.getValue(SCOPE_ATTRIBUTE); Scope scope = ActionUtil.stringToScope(scopeStr); if (checkFileAttributeSanity(attributes)) { String file = attributes.getValue(FILE_ATTRIBUTE); file = ec.subst(file); try { FileInputStream istream = new FileInputStream(file); loadAndSetProperties(ec, istream, scope); } catch (FileNotFoundException e) { addError("Could not find properties file [" + file + "].", e); } catch (IOException e1) { addError("Could not read properties file [" + file + "].", e1); } } else if (checkResourceAttributeSanity(attributes)) { String resource = attributes.getValue(RESOURCE_ATTRIBUTE); resource = ec.subst(resource); URL resourceURL = Loader.getResourceBySelfClassLoader(resource); if (resourceURL == null) { addError("Could not find resource [" + resource + "]."); } else { try { InputStream istream = resourceURL.openStream(); loadAndSetProperties(ec, istream, scope); } catch (IOException e) { addError("Could not read resource file [" + resource + "].", e); } } } else if (checkValueNameAttributesSanity(attributes)) { value = RegularEscapeUtil.basicEscape(value); // now remove both leading and trailing spaces value = value.trim(); value = ec.subst(value); ActionUtil.setProperty(ec, name, value, scope); } else { addError(INVALID_ATTRIBUTES); } }
@Test public void noAttributes() { propertyAction.begin(ec, null, atts); assertEquals(1, context.getStatusManager().getCount()); assertTrue(checkError()); StatusPrinter.print(context); }
@Override public Stream<ColumnName> resolveSelectStar( final Optional<SourceName> sourceName ) { return getSource().resolveSelectStar(sourceName) .map(name -> aliases.getOrDefault(name, name)); }
@Test public void shouldResolveStarSelectByCallingParent() { // Given: final Optional<SourceName> sourceName = Optional.of(SourceName.of("Bob")); // When: projectNode.resolveSelectStar(sourceName); // Then: verify(source).resolveSelectStar(sourceName); }
@Override public DefaultIssueLocation message(String message) { validateMessage(message); String sanitizedMessage = sanitizeNulls(message); this.message = abbreviate(trim(sanitizedMessage), Issue.MESSAGE_MAX_SIZE); return this; }
@Test public void message_whenSettingMessageWithFormattings_shouldReplaceNullChar() { assertThat(new DefaultIssueLocation().message("test " + '\u0000' + "123", Collections.emptyList()).message()).isEqualTo("test [NULL]123"); }
@GetMapping("/status") public AccountDataResult getAccountStatus(@RequestHeader(MijnDigidSession.MIJN_DIGID_SESSION_HEADER) String mijnDigiDsessionId){ MijnDigidSession mijnDigiDSession = retrieveMijnDigiDSession(mijnDigiDsessionId); return this.accountService.getAccountData(mijnDigiDSession.getAccountId()); }
@Test public void testValidRequest() { AccountDataResult result = new AccountDataResult(); result.setStatus(Status.OK); result.setError("error"); result.setEmailStatus(EmailStatus.NOT_VERIFIED); result.setClassifiedDeceased(true); result.setSetting2Factor(true); result.setUnreadNotifications(0); result.setCurrentEmailAddress("email"); when(accountService.getAccountData(eq(mijnDigiDSession.getAccountId().longValue()))).thenReturn(result); AccountDataResult accountData = accountDataController.getAccountStatus(mijnDigiDSession.getId()); assertEquals(Status.OK, accountData.getStatus()); assertEquals("error", accountData.getError()); assertEquals(EmailStatus.NOT_VERIFIED, accountData.getEmailStatus()); assertEquals(true, accountData.getClassifiedDeceased()); assertEquals(0, accountData.getUnreadNotifications()); assertEquals(true, accountData.getSetting2Factor()); assertEquals("email", accountData.getCurrentEmailAddress()); }
protected void commitTransaction(final Map<TopicPartition, OffsetAndMetadata> offsets, final ConsumerGroupMetadata consumerGroupMetadata) { if (!eosEnabled()) { throw new IllegalStateException(formatException("Exactly-once is not enabled")); } maybeBeginTransaction(); try { // EOS-v2 assumes brokers are on version 2.5+ and thus can understand the full set of consumer group metadata // Thus if we are using EOS-v1 and can't make this assumption, we must downgrade the request to include only the group id metadata final ConsumerGroupMetadata maybeDowngradedGroupMetadata = processingMode == EXACTLY_ONCE_V2 ? consumerGroupMetadata : new ConsumerGroupMetadata(consumerGroupMetadata.groupId()); producer.sendOffsetsToTransaction(offsets, maybeDowngradedGroupMetadata); producer.commitTransaction(); transactionInFlight = false; } catch (final ProducerFencedException | InvalidProducerEpochException | CommitFailedException | InvalidPidMappingException error) { throw new TaskMigratedException( formatException("Producer got fenced trying to commit a transaction"), error ); } catch (final TimeoutException timeoutException) { // re-throw to trigger `task.timeout.ms` throw timeoutException; } catch (final KafkaException error) { throw new StreamsException( formatException("Error encountered trying to commit a transaction"), error ); } }
@Test public void shouldFailOnEosCommitTxFatal() { eosAlphaMockProducer.commitTransactionException = new RuntimeException("KABOOM!"); final RuntimeException thrown = assertThrows( RuntimeException.class, () -> eosAlphaStreamsProducer.commitTransaction(offsetsAndMetadata, new ConsumerGroupMetadata("appId")) ); assertThat(eosAlphaMockProducer.sentOffsets(), is(true)); assertThat(thrown.getMessage(), is("KABOOM!")); }
@Override public int addListener(ObjectListener listener) { if (listener instanceof SetAddListener) { return addListener("__keyevent@*:zadd", (SetAddListener) listener, SetAddListener::onAdd); } if (listener instanceof SetRemoveListener) { return addListener("__keyevent@*:zrem", (SetRemoveListener) listener, SetRemoveListener::onRemove); } if (listener instanceof TrackingListener) { return addTrackingListener((TrackingListener) listener); } return super.addListener(listener); }
@Test public void testAddListener() { testWithParams(redisson -> { RSetCache<Integer> ss = redisson.getSetCache("test"); AtomicInteger latch = new AtomicInteger(); int id = ss.addListener(new SetAddListener() { @Override public void onAdd(String name) { latch.incrementAndGet(); } }); ss.add(1, 10, TimeUnit.SECONDS); Awaitility.await().atMost(Duration.ofSeconds(1)).untilAsserted(() -> { assertThat(latch.get()).isEqualTo(1); }); ss.destroy(); ss.add(1, 10, TimeUnit.SECONDS); Awaitility.await().pollDelay(Duration.ofSeconds(1)).atMost(Duration.ofSeconds(2)) .untilAsserted(() -> assertThat(latch.get()).isEqualTo(1)); }, NOTIFY_KEYSPACE_EVENTS, "Ez"); }
public static Token of(TokenDomain domain, String secretTokenString) { return new Token(domain, secretTokenString); }
@Test void different_check_hash_domains_give_different_outputs() { var d1 = TokenDomain.of("domain: 1"); var d2 = TokenDomain.of("domain: 2"); var d3 = TokenDomain.of("domain: 3"); assertEquals("cc0c504b52bfd9b0a9cdb1651c0f3515", TokenCheckHash.of(Token.of(d1, "foo"), 16).toHexString()); assertEquals("a27c7fc350699c71bc456a86bd571479", TokenCheckHash.of(Token.of(d2, "foo"), 16).toHexString()); assertEquals("119cc7046689e6de796fd4005aaab6dc", TokenCheckHash.of(Token.of(d3, "foo"), 16).toHexString()); }
public static Containerizer from( CommonCliOptions commonCliOptions, ConsoleLogger logger, CacheDirectories cacheDirectories) throws InvalidImageReferenceException, FileNotFoundException { Containerizer containerizer = create(commonCliOptions, logger); applyHandlers(containerizer, logger); applyConfiguration(containerizer, commonCliOptions, cacheDirectories); return containerizer; }
@Test public void testApplyConfiguration_defaults() throws InvalidImageReferenceException, FileNotFoundException, CacheDirectoryCreationException { CommonCliOptions commonCliOptions = CommandLine.populateCommand(new CommonCliOptions(), "-t", "test-image-ref"); ContainerizerTestProxy containerizer = new ContainerizerTestProxy( Containerizers.from(commonCliOptions, consoleLogger, cacheDirectories)); assertThat(Boolean.getBoolean(JibSystemProperties.SEND_CREDENTIALS_OVER_HTTP)).isFalse(); assertThat(Boolean.getBoolean(JibSystemProperties.SERIALIZE)).isFalse(); assertThat(containerizer.getToolName()).isEqualTo(VersionInfo.TOOL_NAME); assertThat(containerizer.getToolVersion()).isEqualTo(VersionInfo.getVersionSimple()); assertThat(Boolean.getBoolean("sendCredentialsOverHttp")).isFalse(); assertThat(containerizer.getAllowInsecureRegistries()).isFalse(); assertThat(containerizer.getBaseImageLayersCacheDirectory()).isEqualTo(baseImageCache); assertThat(containerizer.getApplicationsLayersCacheDirectory()).isEqualTo(applicationCache); assertThat(containerizer.getAdditionalTags()).isEqualTo(ImmutableSet.of()); }
public void densify(FeatureMap fMap) { // Densify! - guitar solo List<String> featureNames = new ArrayList<>(fMap.keySet()); Collections.sort(featureNames); densify(featureNames); }
@Test public void testArrayExampleDensify() { MockOutput output = new MockOutput("UNK"); Example<MockOutput> example, expected; // Single feature example = new ArrayExample<>(output, new String[]{"F0"}, new double[]{1.0}); example.densify(Arrays.asList(featureNames)); expected = new ArrayExample<>(new MockOutput("UNK"), featureNames, new double[]{1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0}); checkDenseExample(expected,example); testProtoSerialization(example); // Already dense example = new ArrayExample<>(output, featureNames, new double[]{1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0}); example.densify(Arrays.asList(featureNames)); expected = new ArrayExample<>(new MockOutput("UNK"), featureNames, new double[]{1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0}); checkDenseExample(expected,example); testProtoSerialization(example); // No edges example = new ArrayExample<>(output, new String[]{"F1","F3","F5","F6","F8"}, new double[]{1.0,1.0,1.0,1.0,1.0}); example.densify(Arrays.asList(featureNames)); expected = new ArrayExample<>(new MockOutput("UNK"), featureNames, new double[]{0.0,1.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,0.0}); checkDenseExample(expected,example); testProtoSerialization(example); // Only edges example = new ArrayExample<>(output, new String[]{"F0","F1","F8","F9"}, new double[]{1.0,1.0,1.0,1.0}); example.densify(Arrays.asList(featureNames)); expected = new ArrayExample<>(new MockOutput("UNK"), featureNames, new double[]{1.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0}); checkDenseExample(expected,example); testProtoSerialization(example); }
static List<String> splitByComma(String string) { if (string == null) { return Collections.emptyList(); } return asList(string.trim().split("\\s*,\\s*")); }
@Test public void splitByCommaTest() { assertEquals(asList("project1", "project2"), splitByComma("project1,project2")); assertEquals(asList("project1", "project2"), splitByComma(" project1 , project2 ")); assertEquals(asList("project1"), splitByComma("project1")); assertEquals(emptyList(), splitByComma(null)); }
static void remove(Object[] input, int i) { int j = i + 2; for (; j < input.length; i += 2, j += 2) { if (input[j] == null) break; // found null key input[i] = input[j]; input[i + 1] = input[j + 1]; } input[i] = input[i + 1] = null; }
@Test void remove() { // internally, remove is never called on odd number, or at or after array length { Object[] array = new Object[] {1, 2, 3, 4, 5, 6}; MutableSpan.remove(array, 0); assertThat(array).containsExactly(3, 4, 5, 6, null, null); } { Object[] array = new Object[] {1, 2, 3, 4, 5, 6}; MutableSpan.remove(array, 2); assertThat(array).containsExactly(1, 2, 5, 6, null, null); } { Object[] array = new Object[] {1, 2, 3, 4, 5, 6}; MutableSpan.remove(array, 4); assertThat(array).containsExactly(1, 2, 3, 4, null, null); } }
@Override public List<String> getServerList() { return serverList.isEmpty() ? serversFromEndpoint : serverList; }
@Test void testConstructWithEndpointAndRefresh() throws Exception { Properties properties = new Properties(); properties.put(PropertyKeyConst.ENDPOINT, "127.0.0.1"); serverListManager = new ServerListManager(properties); List<String> serverList = serverListManager.getServerList(); assertEquals(1, serverList.size()); assertEquals("127.0.0.1:8848", serverList.get(0)); httpRestResult.setData("127.0.0.1:8848\n127.0.0.1:8948"); mockThreadInvoke(serverListManager, true); serverList = serverListManager.getServerList(); assertEquals(2, serverList.size()); assertEquals("127.0.0.1:8848", serverList.get(0)); assertEquals("127.0.0.1:8948", serverList.get(1)); }
public static String Rpad( String valueToPad, String filler, int size ) { if ( ( size == 0 ) || ( valueToPad == null ) || ( filler == null ) ) { return valueToPad; } int vSize = valueToPad.length(); int fSize = filler.length(); // This next if ensures previous behavior, but prevents infinite loop // if "" is passed in as a filler. if ( ( vSize >= size ) || ( fSize == 0 ) ) { return valueToPad; } int tgt = ( size - vSize ); StringBuilder sb1 = new StringBuilder( size ); sb1.append( filler ); while ( sb1.length() < tgt ) { // instead of adding one character at a time, this // is exponential - much fewer times in loop sb1.append( sb1 ); } StringBuilder sb = new StringBuilder( valueToPad ); sb.append( sb1 ); return sb.substring( 0, size ); }
@Test public void testRpad() { final String s = "pad me"; assertEquals( s, Const.Rpad( s, "-", 0 ) ); assertEquals( s, Const.Rpad( s, "-", 3 ) ); assertEquals( s + "--", Const.Rpad( s, "-", 8 ) ); // add in some edge cases assertEquals( s, Const.Rpad( s, null, 15 ) ); // No NPE assertEquals( s, Const.Rpad( s, "", 15 ) ); assertEquals( s, Const.Rpad( s, "*", 5 ) ); assertEquals( null, Const.Rpad( null, "*", 15 ) ); assertEquals( "Test****", Const.Rpad( "Test", "**********", 8 ) ); assertEquals( "Test*", Const.Rpad( "Test", "**", 5 ) ); assertEquals( "****", Const.Rpad( "", "*", 4 ) ); }
public Optional<User> login(String nameOrEmail, String password) { if (nameOrEmail == null || password == null) { return Optional.empty(); } User user = userDAO.findByName(nameOrEmail); if (user == null) { user = userDAO.findByEmail(nameOrEmail); } if (user != null && !user.isDisabled()) { boolean authenticated = encryptionService.authenticate(password, user.getPassword(), user.getSalt()); if (authenticated) { performPostLoginActivities(user); return Optional.of(user); } } return Optional.empty(); }
@Test void callingLoginShouldNotReturnUserObjectWhenGivenNullPassword() { Optional<User> user = userService.login("testusername", null); Assertions.assertFalse(user.isPresent()); }
public static SourceOperationResponse performSplit( SourceSplitRequest request, PipelineOptions options) throws Exception { return performSplitWithApiLimit( request, options, DEFAULT_NUM_BUNDLES_LIMIT, DATAFLOW_SPLIT_RESPONSE_API_SIZE_LIMIT); }
@Test public void testOversplittingDesiredBundleSizeScaledFirst() throws Exception { // Create a source that greatly oversplits but with coalescing/compression it would still fit // under the API limit. Test that the API limit gets applied first, so oversplitting is // reduced. com.google.api.services.dataflow.model.Source source = translateIOToCloudSource(CountingSource.upTo(8000), options); // Without either limit, produces 1000 bundles, total size ~500kb. // With only numBundles limit 100, produces 100 bundles, total size ~72kb. // With only apiSize limit = 10kb, 72 bundles, total size ~40kb (over the limit but oh well). // With numBundles limit 100 and apiSize limit 10kb, should produce 72 bundles. // On the other hand, if the numBundles limit of 100 was applied first, we'd get 100 bundles. SourceSplitResponse bundledWithOnlyNumBundlesLimit = performSplit( source, options, 8L, 100 /* numBundles limit */, 10000 * 1024L /* API size limit */); assertEquals(100, bundledWithOnlyNumBundlesLimit.getBundles().size()); assertThat( DataflowApiUtils.computeSerializedSizeBytes(bundledWithOnlyNumBundlesLimit), greaterThan(10 * 1024L)); SourceSplitResponse bundledWithOnlySizeLimit = performSplit( source, options, 8L, 1000000 /* numBundles limit */, 10 * 1024L /* API size limit */); int numBundlesWithOnlySizeLimit = bundledWithOnlySizeLimit.getBundles().size(); assertThat(numBundlesWithOnlySizeLimit, lessThan(100)); SourceSplitResponse bundledWithSizeLimit = performSplit(source, options, 8L, 100, 10 * 1024L); assertEquals(numBundlesWithOnlySizeLimit, bundledWithSizeLimit.getBundles().size()); }
@Override public void setUnixPermission(final Path file, final TransferStatus status) throws BackgroundException { final FileAttributes attr = new FileAttributes.Builder() .withPermissions(Integer.parseInt(status.getPermission().getMode(), 8)) .build(); try { session.sftp().setAttributes(file.getAbsolute(), attr); } catch(IOException e) { throw new SFTPExceptionMappingService().map("Failure to write attributes of {0}", e, file); } }
@Test @Ignore public void testRetainStickyBits() throws Exception { final Path test = new Path(new SFTPHomeDirectoryService(session).find(), UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)); new SFTPTouchFeature(session).touch(test, new TransferStatus()); final SFTPUnixPermissionFeature feature = new SFTPUnixPermissionFeature(session); feature.setUnixPermission(test, new Permission(Permission.Action.all, Permission.Action.read, Permission.Action.read, true, false, false)); assertEquals(new Permission(Permission.Action.all, Permission.Action.read, Permission.Action.read, true, false, false), new SFTPListService(session).list(test.getParent(), new DisabledListProgressListener()).get( test).attributes().getPermission()); feature.setUnixPermission(test, new Permission(Permission.Action.all, Permission.Action.read, Permission.Action.read, false, true, false)); assertEquals(new Permission(Permission.Action.all, Permission.Action.read, Permission.Action.read, false, true, false), new SFTPListService(session).list(test.getParent(), new DisabledListProgressListener()).get( test).attributes().getPermission()); feature.setUnixPermission(test, new Permission(Permission.Action.all, Permission.Action.read, Permission.Action.read, false, false, true)); assertEquals(new Permission(Permission.Action.all, Permission.Action.read, Permission.Action.read, false, false, true), new SFTPListService(session).list(test.getParent(), new DisabledListProgressListener()).get( test).attributes().getPermission()); new SFTPDeleteFeature(session).delete(Collections.<Path>singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public String getDataSource() { return DataSourceConstant.DERBY; }
@Test void testGetDataSource() { String dataSource = tenantInfoMapperByDerby.getDataSource(); assertEquals(DataSourceConstant.DERBY, dataSource); }
public static CookieBuilder newCookieBuilder(HttpRequest request) { return new CookieBuilder(request); }
@Test public void fail_with_NPE_when_cookie_name_is_null() { assertThatThrownBy(() -> newCookieBuilder(request).setName(null)) .isInstanceOf(NullPointerException.class); }
public void populateModel(HashMap<String, Object> model) { model.put("matchers", matcher()); model.put("email", email); model.put("emailMe", emailMe); model.put("notificationFilters", notificationFilters); }
@Test void shouldPopulateMatchers() { user = new User("UserName", new String[]{"Jez,Pavan"}, "user@mail.com", true); HashMap<String, Object> data = new HashMap<>(); user.populateModel(data); Object value = data.get("matchers"); assertThat(value).isEqualTo(new Matcher("Jez,Pavan")); }
public static String toJson(UpdateRequirement updateRequirement) { return toJson(updateRequirement, false); }
@Test public void testAssertDefaultSpecIdToJson() { String requirementType = UpdateRequirementParser.ASSERT_DEFAULT_SPEC_ID; int specId = 5; String expected = String.format("{\"type\":\"%s\",\"default-spec-id\":%d}", requirementType, specId); UpdateRequirement actual = new UpdateRequirement.AssertDefaultSpecID(specId); assertThat(UpdateRequirementParser.toJson(actual)) .as("AssertDefaultSpecId should convert to the correct JSON value") .isEqualTo(expected); }
public Set<String> indexNamesForStreamsInTimeRange(final Set<String> streamIds, final TimeRange timeRange) { Set<String> dataStreamIndices = streamIds.stream() .filter(s -> s.startsWith(Stream.DATASTREAM_PREFIX)) .map(s -> s.substring(Stream.DATASTREAM_PREFIX.length())) .collect(Collectors.toSet()); final Set<String> candidateIndices = indexRangesForStreamsInTimeRange(streamIds, timeRange).stream().map(IndexRange::indexName).collect(Collectors.toSet()); return Sets.union(dataStreamIndices, candidateIndices); }
@Test void returnsEmptySetForEmptyStreamIds() { final IndexLookup sut = new IndexLookup(mock(IndexRangeService.class), mockStreamService(Collections.emptySet()), mock(IndexRangeContainsOneOfStreams.class)); Set<String> result = sut.indexNamesForStreamsInTimeRange(emptySet(), timeRangeWithNoIndexRanges); assertThat(result).isEmpty(); }
public static String toCamel(String src, boolean isUpper) { return toUnderline(src, '_', isUpper); }
@Test public void testToCamel2() { String result = FieldUtils.toCamel("To-Camel", '-', true); Assert.assertEquals("ToCamel", result); String result1 = FieldUtils.toCamel("To-Camel", '-', false); Assert.assertEquals("toCamel", result1); }
public final <KIn, VIn, KOut, VOut> void addProcessor(final String name, final ProcessorSupplier<KIn, VIn, KOut, VOut> supplier, final String... predecessorNames) { Objects.requireNonNull(name, "name must not be null"); Objects.requireNonNull(supplier, "supplier must not be null"); Objects.requireNonNull(predecessorNames, "predecessor names must not be null"); ApiUtils.checkSupplier(supplier); if (nodeFactories.containsKey(name)) { throw new TopologyException("Processor " + name + " is already added."); } if (predecessorNames.length == 0) { throw new TopologyException("Processor " + name + " must have at least one parent"); } for (final String predecessor : predecessorNames) { Objects.requireNonNull(predecessor, "predecessor name must not be null"); if (predecessor.equals(name)) { throw new TopologyException("Processor " + name + " cannot be a predecessor of itself."); } if (!nodeFactories.containsKey(predecessor)) { throw new TopologyException("Predecessor processor " + predecessor + " is not added yet for " + name); } } nodeFactories.put(name, new ProcessorNodeFactory<>(name, predecessorNames, supplier)); nodeGrouper.add(name); nodeGrouper.unite(name, predecessorNames); nodeGroups = null; }
@Test public void testAddProcessorWithSelfParent() { assertThrows(TopologyException.class, () -> builder.addProcessor("processor", new MockApiProcessorSupplier<>(), "processor")); }
@CheckForNull public Date maxLiveDateOfClosedIssues() { return maxLiveDateOfClosedIssues(new Date(system2.now())); }
@Test void should_delete_only_old_closed_issues() { Date now = DateUtils.parseDate("2013-05-18"); PurgeConfiguration conf = new PurgeConfiguration("root", "project", 30, Optional.empty(), System2.INSTANCE, emptySet(), 0); Date toDate = conf.maxLiveDateOfClosedIssues(now); assertThat(toDate.getYear()).isEqualTo(113);// =2013 assertThat(toDate.getMonth()).isEqualTo(3); // means April assertThat(toDate.getDate()).isEqualTo(18); }
@ConstantFunction(name = "bitShiftLeft", argTypes = {LARGEINT, BIGINT}, returnType = LARGEINT) public static ConstantOperator bitShiftLeftLargeInt(ConstantOperator first, ConstantOperator second) { return ConstantOperator.createLargeInt(first.getLargeInt().shiftLeft((int) second.getBigint())); }
@Test public void bitShiftLeftLargeInt() { assertEquals("800", ScalarOperatorFunctions.bitShiftLeftLargeInt(O_LI_100, O_BI_3).getLargeInt().toString()); }
@Override public Capabilities getCapabilities(String pluginId) { String resolvedExtensionVersion = pluginManager.resolveExtensionVersion(pluginId, CONFIG_REPO_EXTENSION, goSupportedVersions); if (resolvedExtensionVersion.equals("1.0")) { return new Capabilities(false, false, false, false); } return pluginRequestHelper.submitRequest(pluginId, REQUEST_CAPABILITIES, new DefaultPluginInteractionCallback<>() { @Override public Capabilities onSuccess(String responseBody, Map<String, String> responseHeaders, String resolvedExtensionVersion) { return messageHandlerMap.get(resolvedExtensionVersion).getCapabilitiesFromResponse(responseBody); } }); }
@Test public void shouldRequestCapabilities() { Capabilities capabilities = new Capabilities(true, true, false, false); when(jsonMessageHandler2.getCapabilitiesFromResponse(responseBody)).thenReturn(capabilities); when(pluginManager.resolveExtensionVersion(PLUGIN_ID, CONFIG_REPO_EXTENSION, new ArrayList<>(List.of("1.0", "2.0", "3.0")))).thenReturn("2.0"); Capabilities res = extension.getCapabilities(PLUGIN_ID); assertRequest(requestArgumentCaptor.getValue(), CONFIG_REPO_EXTENSION, "2.0", ConfigRepoExtension.REQUEST_CAPABILITIES, null); assertSame(capabilities, res); }
public static String extractAttributeNameNameWithoutArguments(String attributeNameWithArguments) { int start = StringUtil.lastIndexOf(attributeNameWithArguments, '['); int end = StringUtil.lastIndexOf(attributeNameWithArguments, ']'); if (start > 0 && end > 0 && end > start) { return attributeNameWithArguments.substring(0, start); } if (start < 0 && end < 0) { return attributeNameWithArguments; } throw new IllegalArgumentException("Wrong argument input passed " + attributeNameWithArguments); }
@Test public void extractAttributeName_wrongArguments_tooManySquareBrackets_lastExtracted() { assertEquals("car.wheel[2].pressure", extractAttributeNameNameWithoutArguments("car.wheel[2].pressure[BAR]")); }
List<MetricsReporter> metricsReporters() { List<MetricsReporter> reporters = CommonClientConfigs.metricsReporters(this); MetricsContext metricsContext = new KafkaMetricsContext("kafka.connect.mirror"); for (MetricsReporter reporter : reporters) { reporter.contextChange(metricsContext); } return reporters; }
@Test public void testExplicitlyEnableJmxReporter() { String reporters = MockMetricsReporter.class.getName() + "," + JmxReporter.class.getName(); Map<String, String> connectorProps = makeProps("metric.reporters", reporters); MirrorConnectorConfig config = new TestMirrorConnectorConfig(connectorProps); assertEquals(2, config.metricsReporters().size()); }
@Override public OAuth2AccessToken extract(Response response) throws IOException { final String body = response.getBody(); Preconditions.checkEmptyString(body, "Response body is incorrect. Can't extract a token from an empty string"); if (response.getCode() != 200) { generateError(response); } return createToken(body); }
@Test public void shouldThrowExceptionIfResponseIsError() throws IOException { final String responseBody = "{" + "\"error_description\":\"unknown, invalid, or expired refresh token\"," + "\"error\":\"invalid_grant\"" + "}"; try (Response response = error(responseBody)) { final OAuth2AccessTokenErrorResponse oaer = assertThrows(OAuth2AccessTokenErrorResponse.class, new ThrowingRunnable() { @Override public void run() throws Throwable { extractor.extract(response); } }); assertEquals(OAuth2Error.INVALID_GRANT, oaer.getError()); assertEquals("unknown, invalid, or expired refresh token", oaer.getErrorDescription()); } }
public static String format(double amount, boolean isUseTraditional) { return format(amount, isUseTraditional, false); }
@Test public void singleMoneyTest() { String format = NumberChineseFormatter.format(0.01, false, true); assertEquals("一分", format); format = NumberChineseFormatter.format(0.10, false, true); assertEquals("一角", format); format = NumberChineseFormatter.format(0.12, false, true); assertEquals("一角二分", format); format = NumberChineseFormatter.format(1.00, false, true); assertEquals("一元整", format); format = NumberChineseFormatter.format(1.10, false, true); assertEquals("一元一角", format); format = NumberChineseFormatter.format(1.02, false, true); assertEquals("一元零二分", format); }
@VisibleForTesting Map<ExecutionVertexID, Collection<ExecutionAttemptID>> findSlowTasks( final ExecutionGraph executionGraph) { final long currentTimeMillis = System.currentTimeMillis(); final Map<ExecutionVertexID, Collection<ExecutionAttemptID>> slowTasks = new HashMap<>(); final List<ExecutionJobVertex> jobVerticesToCheck = getJobVerticesToCheck(executionGraph); for (ExecutionJobVertex ejv : jobVerticesToCheck) { final ExecutionTimeWithInputBytes baseline = getBaseline(ejv, currentTimeMillis); for (ExecutionVertex ev : ejv.getTaskVertices()) { if (ev.getExecutionState().isTerminal()) { continue; } final List<ExecutionAttemptID> slowExecutions = findExecutionsExceedingBaseline( ev.getCurrentExecutions(), baseline, currentTimeMillis); if (!slowExecutions.isEmpty()) { slowTasks.put(ev.getID(), slowExecutions); } } } return slowTasks; }
@Test void testBalancedInputWithLargeLowerBound() throws Exception { final int parallelism = 3; final JobVertex jobVertex1 = createNoOpVertex(parallelism); final JobVertex jobVertex2 = createNoOpVertex(parallelism); jobVertex2.connectNewDataSetAsInput( jobVertex1, DistributionPattern.ALL_TO_ALL, ResultPartitionType.PIPELINED); final ExecutionGraph executionGraph = createExecutionGraph(jobVertex1, jobVertex2); final ExecutionTimeBasedSlowTaskDetector slowTaskDetector = createSlowTaskDetector(0.3, 1, Integer.MAX_VALUE); final ExecutionVertex ev21 = executionGraph.getJobVertex(jobVertex2.getID()).getTaskVertices()[0]; ev21.setInputBytes(1024); final ExecutionVertex ev22 = executionGraph.getJobVertex(jobVertex2.getID()).getTaskVertices()[1]; ev22.setInputBytes(1024); final ExecutionVertex ev23 = executionGraph.getJobVertex(jobVertex2.getID()).getTaskVertices()[2]; ev23.setInputBytes(1024); ev23.getCurrentExecutionAttempt().markFinished(); final Map<ExecutionVertexID, Collection<ExecutionAttemptID>> slowTasks = slowTaskDetector.findSlowTasks(executionGraph); assertThat(slowTasks).isEmpty(); }
public static int roundToPowerOfTwo(final int value) { if (value > MAX_POW2) { throw new IllegalArgumentException("There is no larger power of 2 int for value:"+value+" since it exceeds 2^31."); } if (value < 0) { throw new IllegalArgumentException("Given value:"+value+". Expecting value >= 0."); } final int nextPow2 = 1 << (32 - Integer.numberOfLeadingZeros(value - 1)); return nextPow2; }
@Test(expected = IllegalArgumentException.class) public void testNegativeRoundException() { Pow2.roundToPowerOfTwo(-1); fail(); }
public Command create( final ConfiguredStatement<? extends Statement> statement, final KsqlExecutionContext context) { return create(statement, context.getServiceContext(), context); }
@Test public void shouldNotRaiseExceptionIfKeyInEditablePropertiesList() { configuredStatement = configuredStatement("ALTER SYSTEM 'ksql.streams.upgrade.from'='TEST';" , alterSystemProperty); when(alterSystemProperty.getPropertyName()).thenReturn("ksql.streams.commit.interval.ms"); when(alterSystemProperty.getPropertyValue()).thenReturn("100"); when(config.getBoolean(KsqlConfig.KSQL_SHARED_RUNTIME_ENABLED)).thenReturn(true); commandFactory.create(configuredStatement, executionContext); }
@Subscribe public void onChatMessage(ChatMessage chatMessage) { if (chatMessage.getType() != ChatMessageType.TRADE && chatMessage.getType() != ChatMessageType.GAMEMESSAGE && chatMessage.getType() != ChatMessageType.SPAM && chatMessage.getType() != ChatMessageType.FRIENDSCHATNOTIFICATION) { return; } String message = chatMessage.getMessage(); Matcher matcher = KILLCOUNT_PATTERN.matcher(message); if (matcher.find()) { final String boss = matcher.group("boss"); final int kc = Integer.parseInt(matcher.group("kc")); final String pre = matcher.group("pre"); final String post = matcher.group("post"); if (Strings.isNullOrEmpty(pre) && Strings.isNullOrEmpty(post)) { unsetKc(boss); return; } String renamedBoss = KILLCOUNT_RENAMES .getOrDefault(boss, boss) // The config service doesn't support keys with colons in them .replace(":", ""); if (boss != renamedBoss) { // Unset old TOB kc unsetKc(boss); unsetPb(boss); unsetKc(boss.replace(":", ".")); unsetPb(boss.replace(":", ".")); // Unset old story mode unsetKc("Theatre of Blood Story Mode"); unsetPb("Theatre of Blood Story Mode"); } setKc(renamedBoss, kc); // We either already have the pb, or need to remember the boss for the upcoming pb if (lastPb > -1) { log.debug("Got out-of-order personal best for {}: {}", renamedBoss, lastPb); if (renamedBoss.contains("Theatre of Blood")) { // TOB team size isn't sent in the kill message, but can be computed from varbits int tobTeamSize = tobTeamSize(); lastTeamSize = tobTeamSize == 1 ? "Solo" : (tobTeamSize + " players"); } else if (renamedBoss.contains("Tombs of Amascut")) { // TOA team size isn't sent in the kill message, but can be computed from varbits int toaTeamSize = toaTeamSize(); lastTeamSize = toaTeamSize == 1 ? "Solo" : (toaTeamSize + " players"); } final double pb = getPb(renamedBoss); // If a raid with a team size, only update the pb if it is lower than the existing pb // so that the pb is the overall lowest of any team size if (lastTeamSize == null || pb == 0 || lastPb < pb) { log.debug("Setting overall pb (old: {})", pb); setPb(renamedBoss, lastPb); } if (lastTeamSize != null) { log.debug("Setting team size pb: {}", lastTeamSize); setPb(renamedBoss + " " + lastTeamSize, lastPb); } lastPb = -1; lastTeamSize = null; } else { lastBossKill = renamedBoss; lastBossTime = client.getTickCount(); } return; } matcher = DUEL_ARENA_WINS_PATTERN.matcher(message); if (matcher.find()) { final int oldWins = getKc("Duel Arena Wins"); final int wins = matcher.group(2).equals("one") ? 1 : Integer.parseInt(matcher.group(2).replace(",", "")); final String result = matcher.group(1); int winningStreak = getKc("Duel Arena Win Streak"); int losingStreak = getKc("Duel Arena Lose Streak"); if (result.equals("won") && wins > oldWins) { losingStreak = 0; winningStreak += 1; } else if (result.equals("were defeated")) { losingStreak += 1; winningStreak = 0; } else { log.warn("unrecognized duel streak chat message: {}", message); } setKc("Duel Arena Wins", wins); setKc("Duel Arena Win Streak", winningStreak); setKc("Duel Arena Lose Streak", losingStreak); } matcher = DUEL_ARENA_LOSSES_PATTERN.matcher(message); if (matcher.find()) { int losses = matcher.group(1).equals("one") ? 1 : Integer.parseInt(matcher.group(1).replace(",", "")); setKc("Duel Arena Losses", losses); } matcher = KILL_DURATION_PATTERN.matcher(message); if (matcher.find()) { matchPb(matcher); } matcher = NEW_PB_PATTERN.matcher(message); if (matcher.find()) { matchPb(matcher); } matcher = RAIDS_PB_PATTERN.matcher(message); if (matcher.find()) { matchPb(matcher); } matcher = RAIDS_DURATION_PATTERN.matcher(message); if (matcher.find()) { matchPb(matcher); } matcher = HS_PB_PATTERN.matcher(message); if (matcher.find()) { int floor = Integer.parseInt(matcher.group("floor")); String floortime = matcher.group("floortime"); String floorpb = matcher.group("floorpb"); String otime = matcher.group("otime"); String opb = matcher.group("opb"); String pb = MoreObjects.firstNonNull(floorpb, floortime); setPb("Hallowed Sepulchre Floor " + floor, timeStringToSeconds(pb)); if (otime != null) { pb = MoreObjects.firstNonNull(opb, otime); setPb("Hallowed Sepulchre", timeStringToSeconds(pb)); } } matcher = HS_KC_FLOOR_PATTERN.matcher(message); if (matcher.find()) { int floor = Integer.parseInt(matcher.group(1)); int kc = Integer.parseInt(matcher.group(2).replaceAll(",", "")); setKc("Hallowed Sepulchre Floor " + floor, kc); } matcher = HS_KC_GHC_PATTERN.matcher(message); if (matcher.find()) { int kc = Integer.parseInt(matcher.group(1).replaceAll(",", "")); setKc("Hallowed Sepulchre", kc); } matcher = HUNTER_RUMOUR_KC_PATTERN.matcher(message); if (matcher.find()) { int kc = Integer.parseInt(matcher.group(1).replaceAll(",", "")); setKc("Hunter Rumours", kc); } if (lastBossKill != null && lastBossTime != client.getTickCount()) { lastBossKill = null; lastBossTime = -1; } matcher = COLLECTION_LOG_ITEM_PATTERN.matcher(message); if (matcher.find()) { String item = matcher.group(1); int petId = findPet(item); if (petId != -1) { final List<Integer> petList = new ArrayList<>(getPetList()); if (!petList.contains(petId)) { log.debug("New pet added: {}/{}", item, petId); petList.add(petId); setPetList(petList); } } } matcher = GUARDIANS_OF_THE_RIFT_PATTERN.matcher(message); if (matcher.find()) { int kc = Integer.parseInt(matcher.group(1)); setKc("Guardians of the Rift", kc); } }
@Test public void testDuelArenaLoss() { ChatMessage chatMessageEvent = new ChatMessage(null, TRADE, "", "You were defeated! You have won 1,909 duels.", null, 0); chatCommandsPlugin.onChatMessage(chatMessageEvent); chatMessageEvent = new ChatMessage(null, TRADE, "", "You have now lost 1999 duels.", null, 0); chatCommandsPlugin.onChatMessage(chatMessageEvent); verify(configManager).setRSProfileConfiguration("killcount", "duel arena wins", 1909); verify(configManager).setRSProfileConfiguration("killcount", "duel arena losses", 1999); }
@Udf public String chr(@UdfParameter( description = "Decimal codepoint") final Integer decimalCode) { if (decimalCode == null) { return null; } if (!Character.isValidCodePoint(decimalCode)) { return null; } final char[] resultChars = Character.toChars(decimalCode); return String.valueOf(resultChars); }
@Test public void shouldReturnTwoCharsForMaxUnicodeDecimal() { final String result = udf.chr(1_114_111); assertThat(result.codePointAt(0), is(1_114_111)); assertThat(result.toCharArray().length, is(2)); }
public Optional<ProjectAlmSettingDto> findProjectBindingByUuid(String uuid) { try (DbSession session = dbClient.openSession(false)) { return dbClient.projectAlmSettingDao().selectByUuid(session, uuid); } }
@Test void findProjectBindingByUuid_whenResult_returnsIt() { ProjectAlmSettingDto projectAlmSettingDto = mock(ProjectAlmSettingDto.class); when(dbClient.projectAlmSettingDao().selectByUuid(dbSession, UUID)).thenReturn(Optional.of(projectAlmSettingDto)); assertThat(underTest.findProjectBindingByUuid(UUID)).contains(projectAlmSettingDto); }
public static int setInts(byte[] bytes, int index, int[] values, int offset, int length) { requireNonNull(bytes, "bytes is null"); requireNonNull(values, "values is null"); checkValidRange(index, length * ARRAY_INT_INDEX_SCALE, bytes.length); checkValidRange(offset, length, values.length); for (int i = offset; i < offset + length; i++) { unsafe.putInt(bytes, (long) index + ARRAY_BYTE_BASE_OFFSET, values[i]); index += ARRAY_INT_INDEX_SCALE; } return index; }
@Test public static void testSetInts() { byte[] destination = new byte[POSITIONS_PER_PAGE * ARRAY_INT_INDEX_SCALE]; int copiedBytes = setInts(destination, 0, IntStream.range(0, POSITIONS_PER_PAGE).toArray(), 0, POSITIONS_PER_PAGE); assertEquals(copiedBytes, POSITIONS_PER_PAGE * ARRAY_INT_INDEX_SCALE); assertCopied( IntStream.range(0, POSITIONS_PER_PAGE).boxed().collect(toImmutableList()), destination, ARRAY_INT_INDEX_SCALE, ByteArrays::getInt); }
public String printMessageToMaster(@NonNull String msg) { return hazelcastClient.requestOnMasterAndDecodeResponse( SeaTunnelPrintMessageCodec.encodeRequest(msg), SeaTunnelPrintMessageCodec::decodeResponse); }
@Test public void testSayHello() { String msg = "Hello world"; SeaTunnelClient seaTunnelClient = createSeaTunnelClient(); String s = seaTunnelClient.printMessageToMaster(msg); Assertions.assertEquals(msg, s); }
@Override public LoggerContext getContext(String fqcn, ClassLoader loader, Object externalContext, boolean currentContext) { return context; }
@Test public void testGetContextAlwaysReturnsTheSameObject() { assertThat(contextFactory.getContext("", ClassLoader.getSystemClassLoader(), null, false)) .isEqualTo(contextFactory.getContext("someRandomValue", null, null, false)) .isEqualTo(contextFactory.getContext("someOtherRandomValue", ClassLoader.getSystemClassLoader(), null, false, URI.create("foo"), "name")); }
private Object buildErrorResponse(Throwable ex, Request request) { if (ex instanceof IOException) { return ex; } Builder builder = new Builder(); builder.code(HttpStatus.SC_INTERNAL_SERVER_ERROR); builder.message(ex.getMessage()); builder.protocol(Protocol.HTTP_1_1); builder.request(request); return builder.build(); }
@Test public void buildErrorResponseTest() throws IOException { Optional<Method> method = ReflectUtils.findMethod(OkHttpClientInterceptor.class, "buildErrorResponse", new Class[] {Exception.class, Request.class}); Request request = createRequest(url); Exception ex = new Exception("error"); if (method.isPresent()) { Optional<Object> response = ReflectUtils .invokeMethod(interceptor, method.get(), new Object[] {ex, request}); Assert.assertEquals(HttpStatus.SC_INTERNAL_SERVER_ERROR, ((Response)response.get()).code()); } }
@Override public String toString() { return "contains(" + fieldName + ", '" + value + "')"; }
@Test void testToString() { var stringContains = new StringContains("name", "Alice"); assertThat(stringContains.toString()).isEqualTo("contains(name, 'Alice')"); }
@Converter(fallback = true) public static <T> T convertTo(Class<T> type, Exchange exchange, Object value, TypeConverterRegistry registry) { if (NodeInfo.class.isAssignableFrom(value.getClass())) { // use a fallback type converter so we can convert the embedded body if the value is NodeInfo NodeInfo ni = (NodeInfo) value; // first try to find a Converter for Node TypeConverter tc = registry.lookup(type, Node.class); if (tc != null) { Node node = NodeOverNodeInfo.wrap(ni); return tc.convertTo(type, exchange, node); } // if this does not exist we can also try NodeList (there are some type converters for that) as // the default Xerces Node implementation also implements NodeList. tc = registry.lookup(type, NodeList.class); if (tc != null) { List<NodeInfo> nil = new LinkedList<>(); nil.add(ni); return tc.convertTo(type, exchange, toDOMNodeList(nil)); } } else if (List.class.isAssignableFrom(value.getClass())) { TypeConverter tc = registry.lookup(type, NodeList.class); if (tc != null) { List<NodeInfo> lion = new LinkedList<>(); for (Object o : (List<?>) value) { if (o instanceof NodeInfo) { lion.add((NodeInfo) o); } } if (!lion.isEmpty()) { NodeList nl = toDOMNodeList(lion); return tc.convertTo(type, exchange, nl); } } } else if (NodeOverNodeInfo.class.isAssignableFrom(value.getClass())) { // NodeOverNode info is a read-only Node implementation from Saxon. In contrast to the JDK // com.sun.org.apache.xerces.internal.dom.NodeImpl class it does not implement NodeList, but // many Camel type converters are based on that interface. Therefore we convert to NodeList and // try type conversion in the fallback type converter. TypeConverter tc = registry.lookup(type, NodeList.class); if (tc != null) { List<Node> domNodeList = new LinkedList<>(); domNodeList.add((NodeOverNodeInfo) value); return tc.convertTo(type, exchange, new DOMNodeList(domNodeList)); } } return null; }
@Test public void convertToNodeList() { List<NodeInfo> nil = new LinkedList<>(); nil.add(doc); NodeList nodeList = context.getTypeConverter().convertTo(NodeList.class, exchange, nil); assertNotNull(nodeList); assertEquals(1, nodeList.getLength()); String string = context.getTypeConverter().convertTo(String.class, exchange, nodeList); assertEquals(CONTENT, string); }
public void setRegistration(Registration registration) { this.registration = registration; this.clientSupportContentFormats = clientSupportContentFormat(registration); this.defaultContentFormat = calculateDefaultContentFormat(registration); this.setSupportedClientObjects(); }
@Test public void setRegistration() { LwM2mClient client = new LwM2mClient("nodeId", "testEndpoint"); Registration registration = new Registration .Builder("testId", "testEndpoint", new IpPeer(new InetSocketAddress(1000)), EndpointUriUtil.createUri("coap://localhost:5685")) .objectLinks(new Link[0]) .build(); Assertions.assertDoesNotThrow(() -> client.setRegistration(registration)); }
public static ChronoLocalDateTimeComparator getInstance() { return INSTANCE; }
@Test void should_have_one_instance() { assertThat(comparator).isSameAs(ChronoLocalDateTimeComparator.getInstance()); }
public Object get(final Object bean) { return get(this.patternParts, bean, false); }
@Test public void beanPathTest3() { final BeanPath pattern = new BeanPath("['userInfo']['examInfoDict'][0]['id']"); assertEquals("userInfo", pattern.patternParts.get(0)); assertEquals("examInfoDict", pattern.patternParts.get(1)); assertEquals("0", pattern.patternParts.get(2)); assertEquals("id", pattern.patternParts.get(3)); }
@Override public String getName() { return principal.getName(); }
@Test public void shouldReturnName() { assertThat(wrappedKsqlPrincipal.getName(), is(KSQL_PRINCIPAL_NAME)); assertThat(wrappedOtherPrincipal.getName(), is(OTHER_PRINCIPAL_NAME)); }
@Override public String doSharding(final Collection<String> availableTargetNames, final PreciseShardingValue<Comparable<?>> shardingValue) { ShardingSpherePreconditions.checkNotNull(shardingValue.getValue(), NullShardingValueException::new); String tableNameSuffix = String.valueOf(doSharding(parseDate(shardingValue.getValue()))); return ShardingAutoTableAlgorithmUtils.findMatchedTargetName(availableTargetNames, tableNameSuffix, shardingValue.getDataNodeInfo()).orElse(null); }
@Test void assertPreciseDoSharding() { List<String> availableTargetNames = Arrays.asList("t_order_0", "t_order_1", "t_order_2", "t_order_3"); assertThat(shardingAlgorithm.doSharding(availableTargetNames, new PreciseShardingValue<>("t_order", "create_time", DATA_NODE_INFO, "2020-01-01 00:00:01")), is("t_order_1")); }
public BaseService(String prefix) { this.prefix = prefix; }
@Test public void baseService() throws Exception { BaseService service = new MyService(); assertNull(service.getInterface()); assertEquals(service.getPrefix(), "myservice"); assertEquals(service.getServiceDependencies().length, 0); Server server = Mockito.mock(Server.class); Configuration conf = new Configuration(false); conf.set("server.myservice.foo", "FOO"); conf.set("server.myservice1.bar", "BAR"); Mockito.when(server.getConfig()).thenReturn(conf); Mockito.when(server.getPrefixedName("myservice.foo")).thenReturn("server.myservice.foo"); Mockito.when(server.getPrefixedName("myservice.")).thenReturn("server.myservice."); service.init(server); assertEquals(service.getPrefixedName("foo"), "server.myservice.foo"); assertEquals(service.getServiceConfig().size(), 1); assertEquals(service.getServiceConfig().get("foo"), "FOO"); assertTrue(MyService.INIT); }
@Override public void deleteFile(String path) { OSSURI location = new OSSURI(path); client().deleteObject(location.bucket(), location.key()); }
@Test public void testDeleteFile() throws IOException { String location = randomLocation(); int dataSize = 1024 * 10; byte[] data = randomData(dataSize); OutputFile out = fileIO().newOutputFile(location); writeOSSData(out, data); InputFile in = fileIO().newInputFile(location); assertThat(in.exists()).as("OSS file should exist").isTrue(); fileIO().deleteFile(in); assertThat(fileIO().newInputFile(location).exists()).as("OSS file should not exist").isFalse(); }
@VisibleForTesting List<Record> getSortedTopRecords(Map<Key, Record> recordsMap, int size) { int numRecords = recordsMap.size(); if (numRecords == 0) { return Collections.emptyList(); } if (numRecords <= size) { // Use quick sort if all the records are top records IntermediateRecord[] intermediateRecords = new IntermediateRecord[numRecords]; int index = 0; for (Map.Entry<Key, Record> entry : recordsMap.entrySet()) { intermediateRecords[index++] = getIntermediateRecord(entry.getKey(), entry.getValue()); } Arrays.sort(intermediateRecords, _intermediateRecordComparator); Record[] sortedTopRecords = new Record[numRecords]; for (int i = 0; i < numRecords; i++) { sortedTopRecords[i] = intermediateRecords[i]._record; } return Arrays.asList(sortedTopRecords); } else { // Use heap sort if only partial records are top records Comparator<IntermediateRecord> comparator = _intermediateRecordComparator.reversed(); IntermediateRecord[] topRecordsHeap = getTopRecordsHeap(recordsMap, size, comparator); Record[] sortedTopRecords = new Record[size]; while (size-- > 0) { sortedTopRecords[size] = topRecordsHeap[0]._record; topRecordsHeap[0] = topRecordsHeap[size]; downHeap(topRecordsHeap, size, 0, comparator); } return Arrays.asList(sortedTopRecords); } }
@Test public void testSortTopRecords() { // d1 asc TableResizer tableResizer = new TableResizer(DATA_SCHEMA, QueryContextConverterUtils.getQueryContext(QUERY_PREFIX + "d1")); Map<Key, Record> recordsMap = new HashMap<>(_recordsMap); List<Record> sortedRecords = tableResizer.getSortedTopRecords(recordsMap, TRIM_TO_SIZE); assertEquals(sortedRecords.size(), TRIM_TO_SIZE); assertEquals(sortedRecords.get(0), _records.get(0)); // a, b assertEquals(sortedRecords.get(1), _records.get(1)); // d1 asc - trim to 1 recordsMap = new HashMap<>(_recordsMap); sortedRecords = tableResizer.getSortedTopRecords(recordsMap, 1); assertEquals(sortedRecords.get(0), _records.get(0)); // a // d1 asc, d3 desc (tie breaking with 2nd comparator) tableResizer = new TableResizer(DATA_SCHEMA, QueryContextConverterUtils.getQueryContext(QUERY_PREFIX + "d1, d3 DESC")); recordsMap = new HashMap<>(_recordsMap); sortedRecords = tableResizer.getSortedTopRecords(recordsMap, TRIM_TO_SIZE); assertEquals(sortedRecords.size(), TRIM_TO_SIZE); assertEquals(sortedRecords.get(0), _records.get(0)); // a, b, c (300) assertEquals(sortedRecords.get(1), _records.get(1)); assertEquals(sortedRecords.get(2), _records.get(4)); // d1 asc, d3 desc (tie breaking with 2nd comparator) - trim to 1 recordsMap = new HashMap<>(_recordsMap); sortedRecords = tableResizer.getSortedTopRecords(recordsMap, 1); assertEquals(sortedRecords.size(), 1); assertEquals(sortedRecords.get(0), _records.get(0)); // a // d1 asc, sum(m1) desc, max(m2) desc tableResizer = new TableResizer(DATA_SCHEMA, QueryContextConverterUtils.getQueryContext(QUERY_PREFIX + "d1, SUM(m1) DESC, max(m2) DESC")); recordsMap = new HashMap<>(_recordsMap); sortedRecords = tableResizer.getSortedTopRecords(recordsMap, TRIM_TO_SIZE); assertEquals(sortedRecords.size(), TRIM_TO_SIZE); assertEquals(sortedRecords.get(0), _records.get(0)); // a, b, c (30, 300) assertEquals(sortedRecords.get(1), _records.get(1)); assertEquals(sortedRecords.get(2), _records.get(2)); // d1 asc, sum(m1) desc, max(m2) desc - trim to 1 recordsMap = new HashMap<>(_recordsMap); sortedRecords = tableResizer.getSortedTopRecords(recordsMap, 1); assertEquals(sortedRecords.size(), 1); assertEquals(sortedRecords.get(0), _records.get(0)); // a // avg(m4) asc (object type) tableResizer = new TableResizer(DATA_SCHEMA, QueryContextConverterUtils.getQueryContext(QUERY_PREFIX + "AVG(m4)")); recordsMap = new HashMap<>(_recordsMap); sortedRecords = tableResizer.getSortedTopRecords(recordsMap, TRIM_TO_SIZE); assertEquals(sortedRecords.size(), TRIM_TO_SIZE); assertEquals(sortedRecords.get(0), _records.get(4)); // 2, 3, 3.33 assertEquals(sortedRecords.get(1), _records.get(3)); assertEquals(sortedRecords.get(2), _records.get(1)); // distinctcount(m3) desc, d1 asc (non-comparable intermediate result) tableResizer = new TableResizer(DATA_SCHEMA, QueryContextConverterUtils.getQueryContext(QUERY_PREFIX + "DISTINCTCOUNT(m3) DESC, d1")); recordsMap = new HashMap<>(_recordsMap); sortedRecords = tableResizer.getSortedTopRecords(recordsMap, TRIM_TO_SIZE); assertEquals(sortedRecords.size(), TRIM_TO_SIZE); assertEquals(sortedRecords.get(0), _records.get(4)); // 4, 3, 2 (b) assertEquals(sortedRecords.get(1), _records.get(3)); assertEquals(sortedRecords.get(2), _records.get(1)); // d2 / (distinctcount(m3) + 1) asc, d1 desc (post-aggregation) tableResizer = new TableResizer(DATA_SCHEMA, QueryContextConverterUtils.getQueryContext(QUERY_PREFIX + "d2 / (DISTINCTCOUNT(m3) + 1), d1 DESC")); recordsMap = new HashMap<>(_recordsMap); sortedRecords = tableResizer.getSortedTopRecords(recordsMap, TRIM_TO_SIZE); assertEquals(sortedRecords.size(), TRIM_TO_SIZE); assertEquals(sortedRecords.get(0), _records.get(1)); // 3.33, 12.5, 5 assertEquals(sortedRecords.get(1), _records.get(0)); assertEquals(sortedRecords.get(2), _records.get(3)); }
public static Deserializer<ICMP6> deserializer() { return (data, offset, length) -> { checkInput(data, offset, length, HEADER_LENGTH); ICMP6 icmp6 = new ICMP6(); ByteBuffer bb = ByteBuffer.wrap(data, offset, length); icmp6.icmpType = bb.get(); icmp6.icmpCode = bb.get(); icmp6.checksum = bb.getShort(); Deserializer<? extends IPacket> deserializer; if (ICMP6.TYPE_DESERIALIZER_MAP.containsKey(icmp6.icmpType)) { deserializer = TYPE_DESERIALIZER_MAP.get(icmp6.icmpType); } else { deserializer = Data.deserializer(); } icmp6.payload = deserializer.deserialize(data, bb.position(), bb.limit() - bb.position()); icmp6.payload.setParent(icmp6); return icmp6; }; }
@Test public void testDeserializeBadInput() throws Exception { PacketTestUtils.testDeserializeBadInput(ICMP6.deserializer()); }
@Subscribe public void onChatMessage(ChatMessage event) { if (event.getType() == ChatMessageType.GAMEMESSAGE || event.getType() == ChatMessageType.SPAM) { String message = Text.removeTags(event.getMessage()); Matcher dodgyCheckMatcher = DODGY_CHECK_PATTERN.matcher(message); Matcher dodgyProtectMatcher = DODGY_PROTECT_PATTERN.matcher(message); Matcher dodgyBreakMatcher = DODGY_BREAK_PATTERN.matcher(message); Matcher bindingNecklaceCheckMatcher = BINDING_CHECK_PATTERN.matcher(message); Matcher bindingNecklaceUsedMatcher = BINDING_USED_PATTERN.matcher(message); Matcher ringOfForgingCheckMatcher = RING_OF_FORGING_CHECK_PATTERN.matcher(message); Matcher amuletOfChemistryCheckMatcher = AMULET_OF_CHEMISTRY_CHECK_PATTERN.matcher(message); Matcher amuletOfChemistryUsedMatcher = AMULET_OF_CHEMISTRY_USED_PATTERN.matcher(message); Matcher amuletOfChemistryBreakMatcher = AMULET_OF_CHEMISTRY_BREAK_PATTERN.matcher(message); Matcher amuletOfBountyCheckMatcher = AMULET_OF_BOUNTY_CHECK_PATTERN.matcher(message); Matcher amuletOfBountyUsedMatcher = AMULET_OF_BOUNTY_USED_PATTERN.matcher(message); Matcher chronicleAddMatcher = CHRONICLE_ADD_PATTERN.matcher(message); Matcher chronicleUseAndCheckMatcher = CHRONICLE_USE_AND_CHECK_PATTERN.matcher(message); Matcher slaughterActivateMatcher = BRACELET_OF_SLAUGHTER_ACTIVATE_PATTERN.matcher(message); Matcher slaughterCheckMatcher = BRACELET_OF_SLAUGHTER_CHECK_PATTERN.matcher(message); Matcher expeditiousActivateMatcher = EXPEDITIOUS_BRACELET_ACTIVATE_PATTERN.matcher(message); Matcher expeditiousCheckMatcher = EXPEDITIOUS_BRACELET_CHECK_PATTERN.matcher(message); Matcher bloodEssenceCheckMatcher = BLOOD_ESSENCE_CHECK_PATTERN.matcher(message); Matcher bloodEssenceExtractMatcher = BLOOD_ESSENCE_EXTRACT_PATTERN.matcher(message); Matcher braceletOfClayCheckMatcher = BRACELET_OF_CLAY_CHECK_PATTERN.matcher(message); if (message.contains(RING_OF_RECOIL_BREAK_MESSAGE)) { notifier.notify(config.recoilNotification(), "Your Ring of Recoil has shattered"); } else if (dodgyBreakMatcher.find()) { notifier.notify(config.dodgyNotification(), "Your dodgy necklace has crumbled to dust."); updateDodgyNecklaceCharges(MAX_DODGY_CHARGES); } else if (dodgyCheckMatcher.find()) { updateDodgyNecklaceCharges(Integer.parseInt(dodgyCheckMatcher.group(1))); } else if (dodgyProtectMatcher.find()) { updateDodgyNecklaceCharges(Integer.parseInt(dodgyProtectMatcher.group(1))); } else if (amuletOfChemistryCheckMatcher.find()) { updateAmuletOfChemistryCharges(Integer.parseInt(amuletOfChemistryCheckMatcher.group(1))); } else if (amuletOfChemistryUsedMatcher.find()) { final String match = amuletOfChemistryUsedMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateAmuletOfChemistryCharges(charges); } else if (amuletOfChemistryBreakMatcher.find()) { notifier.notify(config.amuletOfChemistryNotification(), "Your amulet of chemistry has crumbled to dust."); updateAmuletOfChemistryCharges(MAX_AMULET_OF_CHEMISTRY_CHARGES); } else if (amuletOfBountyCheckMatcher.find()) { updateAmuletOfBountyCharges(Integer.parseInt(amuletOfBountyCheckMatcher.group(1))); } else if (amuletOfBountyUsedMatcher.find()) { updateAmuletOfBountyCharges(Integer.parseInt(amuletOfBountyUsedMatcher.group(1))); } else if (message.equals(AMULET_OF_BOUNTY_BREAK_TEXT)) { updateAmuletOfBountyCharges(MAX_AMULET_OF_BOUNTY_CHARGES); } else if (message.contains(BINDING_BREAK_TEXT)) { notifier.notify(config.bindingNotification(), BINDING_BREAK_TEXT); // This chat message triggers before the used message so add 1 to the max charges to ensure proper sync updateBindingNecklaceCharges(MAX_BINDING_CHARGES + 1); } else if (bindingNecklaceUsedMatcher.find()) { final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); if (equipment.contains(ItemID.BINDING_NECKLACE)) { updateBindingNecklaceCharges(getItemCharges(ItemChargeConfig.KEY_BINDING_NECKLACE) - 1); } } else if (bindingNecklaceCheckMatcher.find()) { final String match = bindingNecklaceCheckMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateBindingNecklaceCharges(charges); } else if (ringOfForgingCheckMatcher.find()) { final String match = ringOfForgingCheckMatcher.group(1); int charges = 1; if (!match.equals("one")) { charges = Integer.parseInt(match); } updateRingOfForgingCharges(charges); } else if (message.equals(RING_OF_FORGING_USED_TEXT) || message.equals(RING_OF_FORGING_VARROCK_PLATEBODY)) { final ItemContainer inventory = client.getItemContainer(InventoryID.INVENTORY); final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); // Determine if the player smelted with a Ring of Forging equipped. if (equipment == null) { return; } if (equipment.contains(ItemID.RING_OF_FORGING) && (message.equals(RING_OF_FORGING_USED_TEXT) || inventory.count(ItemID.IRON_ORE) > 1)) { int charges = Ints.constrainToRange(getItemCharges(ItemChargeConfig.KEY_RING_OF_FORGING) - 1, 0, MAX_RING_OF_FORGING_CHARGES); updateRingOfForgingCharges(charges); } } else if (message.equals(RING_OF_FORGING_BREAK_TEXT)) { notifier.notify(config.ringOfForgingNotification(), "Your ring of forging has melted."); // This chat message triggers before the used message so add 1 to the max charges to ensure proper sync updateRingOfForgingCharges(MAX_RING_OF_FORGING_CHARGES + 1); } else if (chronicleAddMatcher.find()) { final String match = chronicleAddMatcher.group(1); if (match.equals("one")) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1); } else { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, Integer.parseInt(match)); } } else if (chronicleUseAndCheckMatcher.find()) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, Integer.parseInt(chronicleUseAndCheckMatcher.group(1))); } else if (message.equals(CHRONICLE_ONE_CHARGE_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1); } else if (message.equals(CHRONICLE_EMPTY_TEXT) || message.equals(CHRONICLE_NO_CHARGES_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 0); } else if (message.equals(CHRONICLE_FULL_TEXT)) { setItemCharges(ItemChargeConfig.KEY_CHRONICLE, 1000); } else if (slaughterActivateMatcher.find()) { final String found = slaughterActivateMatcher.group(1); if (found == null) { updateBraceletOfSlaughterCharges(MAX_SLAYER_BRACELET_CHARGES); notifier.notify(config.slaughterNotification(), BRACELET_OF_SLAUGHTER_BREAK_TEXT); } else { updateBraceletOfSlaughterCharges(Integer.parseInt(found)); } } else if (slaughterCheckMatcher.find()) { updateBraceletOfSlaughterCharges(Integer.parseInt(slaughterCheckMatcher.group(1))); } else if (expeditiousActivateMatcher.find()) { final String found = expeditiousActivateMatcher.group(1); if (found == null) { updateExpeditiousBraceletCharges(MAX_SLAYER_BRACELET_CHARGES); notifier.notify(config.expeditiousNotification(), EXPEDITIOUS_BRACELET_BREAK_TEXT); } else { updateExpeditiousBraceletCharges(Integer.parseInt(found)); } } else if (expeditiousCheckMatcher.find()) { updateExpeditiousBraceletCharges(Integer.parseInt(expeditiousCheckMatcher.group(1))); } else if (bloodEssenceCheckMatcher.find()) { updateBloodEssenceCharges(Integer.parseInt(bloodEssenceCheckMatcher.group(1))); } else if (bloodEssenceExtractMatcher.find()) { updateBloodEssenceCharges(getItemCharges(ItemChargeConfig.KEY_BLOOD_ESSENCE) - Integer.parseInt(bloodEssenceExtractMatcher.group(1))); } else if (message.contains(BLOOD_ESSENCE_ACTIVATE_TEXT)) { updateBloodEssenceCharges(MAX_BLOOD_ESSENCE_CHARGES); } else if (braceletOfClayCheckMatcher.find()) { updateBraceletOfClayCharges(Integer.parseInt(braceletOfClayCheckMatcher.group(1))); } else if (message.equals(BRACELET_OF_CLAY_USE_TEXT) || message.equals(BRACELET_OF_CLAY_USE_TEXT_TRAHAEARN)) { final ItemContainer equipment = client.getItemContainer(InventoryID.EQUIPMENT); // Determine if the player mined with a Bracelet of Clay equipped. if (equipment != null && equipment.contains(ItemID.BRACELET_OF_CLAY)) { final ItemContainer inventory = client.getItemContainer(InventoryID.INVENTORY); // Charge is not used if only 1 inventory slot is available when mining in Prifddinas boolean ignore = inventory != null && inventory.count() == 27 && message.equals(BRACELET_OF_CLAY_USE_TEXT_TRAHAEARN); if (!ignore) { int charges = Ints.constrainToRange(getItemCharges(ItemChargeConfig.KEY_BRACELET_OF_CLAY) - 1, 0, MAX_BRACELET_OF_CLAY_CHARGES); updateBraceletOfClayCharges(charges); } } } else if (message.equals(BRACELET_OF_CLAY_BREAK_TEXT)) { notifier.notify(config.braceletOfClayNotification(), "Your bracelet of clay has crumbled to dust"); updateBraceletOfClayCharges(MAX_BRACELET_OF_CLAY_CHARGES); } } }
@Test public void testChemistryBreak2() { ChatMessage chatMessage = new ChatMessage(null, ChatMessageType.GAMEMESSAGE, "", BREAK_AMULET_OF_CHEMISTRY_2_DOSES, "", 0); itemChargePlugin.onChatMessage(chatMessage); verify(configManager).setRSProfileConfiguration(ItemChargeConfig.GROUP, ItemChargeConfig.KEY_AMULET_OF_CHEMISTRY, 5); }
boolean valid(Flow flow, List<Condition> list, ConditionContext conditionContext) { return list .stream() .allMatch(condition -> { try { return condition.test(conditionContext); } catch (Exception e) { logException(flow, condition, conditionContext, e); return false; } }); }
@Test void valid() { Flow flow = TestsUtils.mockFlow(); Execution execution = TestsUtils.mockExecution(flow, ImmutableMap.of()); RunContext runContext = runContextFactory.of(flow, execution); ConditionContext conditionContext = conditionService.conditionContext(runContext, flow, execution); List<Condition> conditions = Arrays.asList( ExecutionFlowCondition.builder() .namespace(flow.getNamespace()) .flowId(flow.getId()) .build(), ExecutionNamespaceCondition.builder() .namespace(flow.getNamespace()) .build() ); boolean valid = conditionService.valid(flow, conditions, conditionContext); assertThat(valid, is(true)); }
public static byte[] compress(String urlString) throws MalformedURLException { byte[] compressedBytes = null; if (urlString != null) { // Figure the compressed bytes can't be longer than the original string. byte[] byteBuffer = new byte[urlString.length()]; int byteBufferIndex = 0; Arrays.fill(byteBuffer, (byte) 0x00); Pattern urlPattern = Pattern.compile(EDDYSTONE_URL_REGEX); Matcher urlMatcher = urlPattern.matcher(urlString); if (urlMatcher.matches()) { // www. String wwwdot = urlMatcher.group(EDDYSTONE_URL_WWW_GROUP); boolean haswww = (wwwdot != null); // Protocol. String rawProtocol = urlMatcher.group(EDDYSTONE_URL_PROTOCOL_GROUP); String protocol = rawProtocol.toLowerCase(); if (protocol.equalsIgnoreCase(URL_PROTOCOL_HTTP)) { byteBuffer[byteBufferIndex] = (haswww ? EDDYSTONE_URL_PROTOCOL_HTTP_WWW : EDDYSTONE_URL_PROTOCOL_HTTP); } else { byteBuffer[byteBufferIndex] = (haswww ? EDDYSTONE_URL_PROTOCOL_HTTPS_WWW : EDDYSTONE_URL_PROTOCOL_HTTPS); } byteBufferIndex++; // Fully-qualified domain name (FQDN). This includes the hostname and any other components after the dots // but BEFORE the first single slash in the URL. byte[] hostnameBytes = urlMatcher.group(EDDYSTONE_URL_FQDN_GROUP).getBytes(); String rawHostname = new String(hostnameBytes); String hostname = rawHostname.toLowerCase(); String[] domains = hostname.split(Pattern.quote(".")); boolean consumedSlash = false; if (domains != null) { // Write the hostname/subdomains prior to the last one. If there's only one (e. g. http://localhost) // then that's the only thing to write out. byte[] periodBytes = {'.'}; int writableDomainsCount = (domains.length == 1 ? 1 : domains.length - 1); for (int domainIndex = 0; domainIndex < writableDomainsCount; domainIndex++) { // Write out leading period, if necessary. if (domainIndex > 0) { System.arraycopy(periodBytes, 0, byteBuffer, byteBufferIndex, periodBytes.length); byteBufferIndex += periodBytes.length; } byte[] domainBytes = domains[domainIndex].getBytes(); int domainLength = domainBytes.length; System.arraycopy(domainBytes, 0, byteBuffer, byteBufferIndex, domainLength); byteBufferIndex += domainLength; } // Is the TLD one that we can encode? if (domains.length > 1) { String tld = "." + domains[domains.length - 1]; String slash = urlMatcher.group(EDDYSTONE_URL_SLASH_GROUP); String encodableTLDCandidate = (slash == null ? tld : tld + slash); byte encodedTLDByte = encodedByteForTopLevelDomain(encodableTLDCandidate); if (encodedTLDByte != TLD_NOT_ENCODABLE) { byteBuffer[byteBufferIndex++] = encodedTLDByte; consumedSlash = (slash != null); } else { byte[] tldBytes = tld.getBytes(); int tldLength = tldBytes.length; System.arraycopy(tldBytes, 0, byteBuffer, byteBufferIndex, tldLength); byteBufferIndex += tldLength; } } } // Optional slash. if (! consumedSlash) { String slash = urlMatcher.group(EDDYSTONE_URL_SLASH_GROUP); if (slash != null) { int slashLength = slash.length(); System.arraycopy(slash.getBytes(), 0, byteBuffer, byteBufferIndex, slashLength); byteBufferIndex += slashLength; } } // Path. String path = urlMatcher.group(EDDYSTONE_URL_PATH_GROUP); if (path != null) { int pathLength = path.length(); System.arraycopy(path.getBytes(), 0, byteBuffer, byteBufferIndex, pathLength); byteBufferIndex += pathLength; } // Copy the result. compressedBytes = new byte[byteBufferIndex]; System.arraycopy(byteBuffer, 0, compressedBytes, 0, compressedBytes.length); } else { throw new MalformedURLException(); } } else { throw new MalformedURLException(); } return compressedBytes; }
@Test public void testCompressWithTrailingSlash() throws MalformedURLException { String testURL = "http://google.com/123"; byte[] expectedBytes = {0x02, 'g', 'o', 'o', 'g', 'l', 'e', 0x00, '1', '2', '3'}; assertTrue(Arrays.equals(expectedBytes, UrlBeaconUrlCompressor.compress(testURL))); }
JavaClasses getClassesToAnalyzeFor(Class<?> testClass, ClassAnalysisRequest classAnalysisRequest) { checkNotNull(testClass); checkNotNull(classAnalysisRequest); if (cachedByTest.containsKey(testClass)) { return cachedByTest.get(testClass); } LocationsKey locations = RequestedLocations.by(classAnalysisRequest, testClass).asKey(); JavaClasses classes = classAnalysisRequest.getCacheMode() == FOREVER ? cachedByLocations.getUnchecked(locations).get() : new LazyJavaClasses(locations.locations, locations.importOptionTypes).get(); cachedByTest.put(testClass, classes); return classes; }
@Test public void get_all_classes_by_LocationProvider() { JavaClasses classes = cache.getClassesToAnalyzeFor(TestClass.class, new TestAnalysisRequest() .withPackagesRoots(ClassCacheTest.class) .withLocationProviders(TestLocationProviderOfClass_String.class, TestLocationProviderOfClass_Rule.class)); assertThatTypes(classes).contain(String.class, Rule.class, getClass()); classes = cache.getClassesToAnalyzeFor(TestClassWithLocationProviderUsingTestClass.class, analyzeLocation(LocationOfClass.Provider.class)); assertThatTypes(classes).contain(String.class); assertThatTypes(classes).doNotContain(getClass()); }
@Override public Set<String> getAndListen(URL url, MappingListener mappingListener) { String serviceInterface = url.getServiceInterface(); // randomly pick one metadata report is ok for it's guaranteed all metadata report will have the same mapping // data. String registryCluster = getRegistryCluster(url); MetadataReport metadataReport = metadataReportInstance.getMetadataReport(registryCluster); if (metadataReport == null) { return Collections.emptySet(); } return metadataReport.getServiceAppMapping(serviceInterface, mappingListener, url); }
@Test void testGetAndListen() { // TODO }
@VisibleForTesting public int getMultipleAppsFailedRetrieved() { return numMultipleAppsFailedRetrieved.value(); }
@Test public void testMulipleAppsReportFailed() { long totalBadbefore = metrics.getMultipleAppsFailedRetrieved(); badSubCluster.getApplicationsReport(); Assert.assertEquals(totalBadbefore + 1, metrics.getMultipleAppsFailedRetrieved()); }
@Override public String getScheme() { return "gs"; }
@Test public void testGetScheme() { // Tests gcs paths. assertEquals("gs", toResourceIdentifier("gs://my_bucket/tmp dir/").getScheme()); // Tests bucket with no ending '/'. assertEquals("gs", toResourceIdentifier("gs://my_bucket").getScheme()); }
public static SupportedVersionRange fromMap(Map<String, Short> versionRangeMap) { return new SupportedVersionRange( BaseVersionRange.valueOrThrow(MIN_VERSION_KEY_LABEL, versionRangeMap), BaseVersionRange.valueOrThrow(MAX_VERSION_KEY_LABEL, versionRangeMap)); }
@Test public void testFromToMap() { SupportedVersionRange versionRange = new SupportedVersionRange((short) 1, (short) 2); assertEquals(1, versionRange.min()); assertEquals(2, versionRange.max()); Map<String, Short> versionRangeMap = versionRange.toMap(); assertEquals( mkMap(mkEntry("min_version", versionRange.min()), mkEntry("max_version", versionRange.max())), versionRangeMap); SupportedVersionRange newVersionRange = SupportedVersionRange.fromMap(versionRangeMap); assertEquals(1, newVersionRange.min()); assertEquals(2, newVersionRange.max()); assertEquals(versionRange, newVersionRange); }
@Override public Token refreshToken(TokenRefreshRequest tokenRefreshRequest) { tokenService.verifyAndValidate(tokenRefreshRequest.getRefreshToken()); final String adminId = tokenService .getPayload(tokenRefreshRequest.getRefreshToken()) .get(TokenClaims.USER_ID.getValue()) .toString(); final UserEntity userEntityFromDB = userRepository .findById(adminId) .orElseThrow(UserNotFoundException::new); this.validateUserStatus(userEntityFromDB); return tokenService.generateToken( userEntityFromDB.getClaims(), tokenRefreshRequest.getRefreshToken() ); }
@Test void refreshToken_InvalidRefreshToken_ThrowsException() { // Given String refreshTokenString = "invalidRefreshToken"; TokenRefreshRequest tokenRefreshRequest = TokenRefreshRequest.builder() .refreshToken(refreshTokenString) .build(); // Mock the behavior of verifyAndValidate to throw an exception doThrow(RuntimeException.class).when(tokenService).verifyAndValidate(refreshTokenString); // When, Then & Verify assertThrows(RuntimeException.class, () -> userRefreshTokenService.refreshToken(tokenRefreshRequest)); // Verify that verifyAndValidate method was called with the expected argument verify(tokenService).verifyAndValidate(refreshTokenString); // Ensure no other interactions occurred verifyNoInteractions(userRepository); }
public static Combine.CombineFn<Boolean, ?, Long> combineFn() { return new CountIfFn(); }
@Test public void testMergeAccumulators() { Combine.CombineFn countIfFn = CountIf.combineFn(); List<long[]> accums = Arrays.asList(new long[] {2}, new long[] {2}); long[] accumulator = (long[]) countIfFn.mergeAccumulators(accums); assertEquals(4L, accumulator[0]); }
public <T> void writeTo(T object, OutputStream entityStream) throws IOException { ObjectWriter writer = objectWriterByClass.get(object.getClass()); if (writer == null) { mapper.writeValue(entityStream, object); } else { writer.writeValue(entityStream, object); } }
@Test public void testApplicationJacksonEncodeXStreamDecode() throws Exception { // Encode ByteArrayOutputStream captureStream = new ByteArrayOutputStream(); codec.writeTo(APPLICATION_1, captureStream); byte[] encoded = captureStream.toByteArray(); // Decode InputStream source = new ByteArrayInputStream(encoded); Application decoded = (Application) new EntityBodyConverter().read(source, Application.class, MediaType.APPLICATION_JSON_TYPE); assertTrue(EurekaEntityComparators.equal(decoded, APPLICATION_1)); }
public void delete(final String key) { try { client.delete().guaranteed().deletingChildrenIfNeeded().forPath(key); } catch (Exception e) { throw new ShenyuException(e); } }
@Test void delete() throws Exception { assertThrows(ShenyuException.class, () -> client.delete("/test")); DeleteBuilder deleteBuilder = mock(DeleteBuilder.class); when(curatorFramework.delete()).thenReturn(deleteBuilder); ChildrenDeletable childrenDeletable = mock(ChildrenDeletable.class); when(deleteBuilder.guaranteed()).thenReturn(childrenDeletable); BackgroundVersionable backgroundVersionable = mock(BackgroundVersionable.class); when(childrenDeletable.deletingChildrenIfNeeded()).thenReturn(backgroundVersionable); doNothing().when(backgroundVersionable).forPath(anyString()); assertDoesNotThrow(() -> client.delete("/test")); }
public Blob build() throws IOException { UniqueTarArchiveEntries uniqueTarArchiveEntries = new UniqueTarArchiveEntries(); // Adds all the layer entries as tar entries. for (FileEntry layerEntry : layerEntries) { // Adds the entries to uniqueTarArchiveEntries, which makes sure all entries are unique and // adds parent directories for each extraction path. TarArchiveEntry entry = new TarArchiveEntry( layerEntry.getSourceFile(), layerEntry.getExtractionPath().toString()); // Sets the entry's permissions by masking out the permission bits from the entry's mode (the // lowest 9 bits) then using a bitwise OR to set them to the layerEntry's permissions. entry.setMode((entry.getMode() & ~0777) | layerEntry.getPermissions().getPermissionBits()); setUserAndGroup(entry, layerEntry); clearTimeHeaders(entry, layerEntry.getModificationTime()); uniqueTarArchiveEntries.add(entry); } // Gets the entries sorted by extraction path. List<TarArchiveEntry> sortedFilesystemEntries = uniqueTarArchiveEntries.getSortedEntries(); Set<String> names = new HashSet<>(); // Adds all the files to a tar stream. TarStreamBuilder tarStreamBuilder = new TarStreamBuilder(); for (TarArchiveEntry entry : sortedFilesystemEntries) { Verify.verify(!names.contains(entry.getName())); names.add(entry.getName()); tarStreamBuilder.addTarArchiveEntry(entry); } return Blobs.from(tarStreamBuilder::writeAsTarArchiveTo, false); }
@Test public void testBuild() throws URISyntaxException, IOException { Path layerDirectory = Paths.get(Resources.getResource("core/layer").toURI()); Path blobA = Paths.get(Resources.getResource("core/blobA").toURI()); ReproducibleLayerBuilder layerBuilder = new ReproducibleLayerBuilder( ImmutableList.copyOf( FileEntriesLayer.builder() .addEntryRecursive( layerDirectory, AbsoluteUnixPath.get("/extract/here/apple/layer")) .addEntry(blobA, AbsoluteUnixPath.get("/extract/here/apple/blobA")) .addEntry(blobA, AbsoluteUnixPath.get("/extract/here/banana/blobA")) .build() .getEntries())); // Writes the layer tar to a temporary file. Blob unwrittenBlob = layerBuilder.build(); Path temporaryFile = temporaryFolder.newFile().toPath(); try (OutputStream temporaryFileOutputStream = new BufferedOutputStream(Files.newOutputStream(temporaryFile))) { unwrittenBlob.writeTo(temporaryFileOutputStream); } // Reads the file back. try (TarArchiveInputStream tarArchiveInputStream = new TarArchiveInputStream(Files.newInputStream(temporaryFile))) { verifyNextTarArchiveEntryIsDirectory(tarArchiveInputStream, "extract/"); verifyNextTarArchiveEntryIsDirectory(tarArchiveInputStream, "extract/here/"); verifyNextTarArchiveEntryIsDirectory(tarArchiveInputStream, "extract/here/apple/"); verifyNextTarArchiveEntry(tarArchiveInputStream, "extract/here/apple/blobA", blobA); verifyNextTarArchiveEntryIsDirectory(tarArchiveInputStream, "extract/here/apple/layer/"); verifyNextTarArchiveEntryIsDirectory(tarArchiveInputStream, "extract/here/apple/layer/a/"); verifyNextTarArchiveEntryIsDirectory(tarArchiveInputStream, "extract/here/apple/layer/a/b/"); verifyNextTarArchiveEntry( tarArchiveInputStream, "extract/here/apple/layer/a/b/bar", Paths.get(Resources.getResource("core/layer/a/b/bar").toURI())); verifyNextTarArchiveEntryIsDirectory(tarArchiveInputStream, "extract/here/apple/layer/c/"); verifyNextTarArchiveEntry( tarArchiveInputStream, "extract/here/apple/layer/c/cat", Paths.get(Resources.getResource("core/layer/c/cat").toURI())); verifyNextTarArchiveEntry( tarArchiveInputStream, "extract/here/apple/layer/foo", Paths.get(Resources.getResource("core/layer/foo").toURI())); verifyNextTarArchiveEntryIsDirectory(tarArchiveInputStream, "extract/here/banana/"); verifyNextTarArchiveEntry(tarArchiveInputStream, "extract/here/banana/blobA", blobA); } }
static <T extends Comparable<? super T>> int compareListWithFillValue( List<T> left, List<T> right, T fillValue) { int longest = Math.max(left.size(), right.size()); for (int i = 0; i < longest; i++) { T leftElement = fillValue; T rightElement = fillValue; if (i < left.size()) { leftElement = left.get(i); } if (i < right.size()) { rightElement = right.get(i); } int compareResult = leftElement.compareTo(rightElement); if (compareResult != 0) { return compareResult; } } return 0; }
@Test public void compareWithFillValue_bothEmptyListWithFillValueEqualToZero_returnsZero() { assertThat( ComparisonUtility.compareListWithFillValue( Lists.newArrayList(), Lists.newArrayList(), 0)) .isEqualTo(0); }
@Udf(description = "Returns the hyperbolic cosine of an INT value") public Double cosh( @UdfParameter( value = "value", description = "The value in radians to get the hyperbolic cosine of." ) final Integer value ) { return cosh(value == null ? null : value.doubleValue()); }
@Test public void shouldHandleNull() { assertThat(udf.cosh((Integer) null), is(nullValue())); assertThat(udf.cosh((Long) null), is(nullValue())); assertThat(udf.cosh((Double) null), is(nullValue())); }
public T multiply(BigDecimal multiplier) { return create(value.multiply(multiplier)); }
@Test void testMutiplyNegativeInteger() { final Resource resource = new TestResource(0.3); final int by = -2; assertThatThrownBy(() -> resource.multiply(by)) .isInstanceOf(IllegalArgumentException.class); }
public static int lowerHashCode(String text) { if (text == null) { return 0; } // return text.toLowerCase().hashCode(); int h = 0; for (int i = 0; i < text.length(); ++i) { char ch = text.charAt(i); if (ch >= 'A' && ch <= 'Z') { ch = (char) (ch + 32); } h = 31 * h + ch; } return h; }
@Test public void testLowerHashCode() { Assert.assertEquals(0, StringUtils.lowerHashCode("")); Assert.assertEquals(0, StringUtils.lowerHashCode(null)); Assert.assertEquals(97299, StringUtils.lowerHashCode("Bar")); }
public FEELFnResult<Boolean> invoke(@ParameterName( "range1" ) Range range1, @ParameterName( "range2" ) Range range2) { if ( range1 == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "range1", "cannot be null")); } if ( range2 == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "range2", "cannot be null")); } try { boolean result = range1.getLowBoundary() == Range.RangeBoundary.CLOSED && range2.getHighBoundary() == Range.RangeBoundary.CLOSED && range1.getLowEndPoint().compareTo(range2.getHighEndPoint()) == 0; return FEELFnResult.ofResult( result ); } catch( Exception e ) { // points are not comparable return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "range1", "cannot be compared to range2")); } }
@Test void invokeParamsCantBeCompared() { FunctionTestUtil.assertResultError( metByFunction.invoke( new RangeImpl( Range.RangeBoundary.CLOSED, "a", "f", Range.RangeBoundary.CLOSED ), new RangeImpl( Range.RangeBoundary.CLOSED, 1, 2, Range.RangeBoundary.CLOSED ) ), InvalidParametersEvent.class ); }
public static HoodieRecordMerger loadRecordMerger(String mergerClass) { try { HoodieRecordMerger recordMerger = (HoodieRecordMerger) INSTANCE_CACHE.get(mergerClass); if (null == recordMerger) { synchronized (HoodieRecordMerger.class) { recordMerger = (HoodieRecordMerger) INSTANCE_CACHE.get(mergerClass); if (null == recordMerger) { recordMerger = (HoodieRecordMerger) ReflectionUtils.loadClass(mergerClass, new Object[] {}); INSTANCE_CACHE.put(mergerClass, recordMerger); } } } return recordMerger; } catch (HoodieException e) { throw new HoodieException("Unable to instantiate hoodie merge class ", e); } }
@Test void loadHoodieMerge() { String mergeClassName = HoodieAvroRecordMerger.class.getName(); HoodieRecordMerger recordMerger1 = HoodieRecordUtils.loadRecordMerger(mergeClassName); HoodieRecordMerger recordMerger2 = HoodieRecordUtils.loadRecordMerger(mergeClassName); assertEquals(recordMerger1.getClass().getName(), mergeClassName); assertEquals(recordMerger1, recordMerger2); }
public void put(PropertyKey key, Object value, Source source) { if (!mUserProps.containsKey(key) || source.compareTo(getSource(key)) >= 0) { mUserProps.put(key, Optional.ofNullable(value)); mSources.put(key, source); mHash.markOutdated(); } }
@Test public void put() { mProperties.put(mKeyWithValue, "value1", Source.SYSTEM_PROPERTY); mProperties.put(mKeyWithoutValue, "value2", Source.SYSTEM_PROPERTY); assertEquals("value1", mProperties.get(mKeyWithValue)); assertEquals("value2", mProperties.get(mKeyWithoutValue)); mProperties.put(mKeyWithValue, "valueLowerPriority", Source.siteProperty("")); assertEquals("value1", mProperties.get(mKeyWithValue)); mProperties.put(mKeyWithValue, "valueSamePriority", Source.SYSTEM_PROPERTY); assertEquals("valueSamePriority", mProperties.get(mKeyWithValue)); mProperties.put(mKeyWithValue, "valueHigherPriority", Source.RUNTIME); assertEquals("valueHigherPriority", mProperties.get(mKeyWithValue)); }
@Transactional public Long saveTradeHistory(final TradeHistoryCreateRequest request) { TradeHistory tradeHistory = new TradeHistory( request.buyerId(), request.sellerId(), request.productId(), request.productOriginPrice(), request.productDiscountPrice(), request.usingCouponIds() ); TradeHistory savedTradeHistory = tradeHistoryRepository.save(tradeHistory); return savedTradeHistory.getId(); }
@Test void 거래_내역을_저장한다() { // given TradeHistoryCreateRequest request = new TradeHistoryCreateRequest( 1L, 2L, 1L, 10000, 10, List.of() ); // when Long result = memberService.saveTradeHistory(request); // then assertThat(result).isEqualTo(1L); }
public static URI createURIWithQuery(URI uri, String query) throws URISyntaxException { ObjectHelper.notNull(uri, "uri"); // assemble string as new uri and replace parameters with the query // instead String s = uri.toString(); String before = StringHelper.before(s, "?"); if (before == null) { before = StringHelper.before(s, "#"); } if (before != null) { s = before; } if (query != null) { s = s + "?" + query; } if (!s.contains("#") && uri.getFragment() != null) { s = s + "#" + uri.getFragment(); } return new URI(s); }
@Test public void testCreateURIWithQueryHasOneFragment() throws Exception { URI uri = new URI("smtp://localhost#fragmentOne"); URI resultUri = URISupport.createURIWithQuery(uri, null); assertNotNull(resultUri); assertEquals("smtp://localhost#fragmentOne", resultUri.toString()); }
@ApiOperation(value = "Get a single suspended job", tags = { "Jobs" }) @ApiResponses(value = { @ApiResponse(code = 200, message = "Indicates the suspended job exists and is returned."), @ApiResponse(code = 404, message = "Indicates the requested job does not exist.") }) @GetMapping(value = "/management/suspended-jobs/{jobId}", produces = "application/json") public JobResponse getSuspendedJob(@ApiParam(name = "jobId") @PathVariable String jobId) { Job job = getSuspendedJobById(jobId); return restResponseFactory.createSuspendedJobResponse(job); }
@Test @Deployment(resources = { "org/flowable/rest/service/api/management/JobResourceTest.testTimerProcess.bpmn20.xml" }) public void testGetSuspendedJob() throws Exception { ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("timerProcess"); Job timerJob = managementService.createTimerJobQuery().processInstanceId(processInstance.getId()).singleResult(); assertThat(timerJob).isNotNull(); Job suspendedJob = managementService.createSuspendedJobQuery().processInstanceId(processInstance.getId()).singleResult(); assertThat(suspendedJob).isNull(); runtimeService.suspendProcessInstanceById(processInstance.getId()); timerJob = managementService.createTimerJobQuery().processInstanceId(processInstance.getId()).singleResult(); assertThat(timerJob).isNull(); suspendedJob = managementService.createSuspendedJobQuery().processInstanceId(processInstance.getId()).singleResult(); assertThat(suspendedJob).isNotNull(); CloseableHttpResponse response = executeRequest( new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_SUSPENDED_JOB, suspendedJob.getId())), HttpStatus.SC_OK); JsonNode responseNode = objectMapper.readTree(response.getEntity().getContent()); closeResponse(response); assertThat(responseNode).isNotNull(); assertThatJson(responseNode) .when(Option.IGNORING_EXTRA_FIELDS) .isEqualTo("{" + "id: '" + suspendedJob.getId() + "'," + "correlationId: '" + suspendedJob.getCorrelationId() + "'," + "exceptionMessage: " + suspendedJob.getExceptionMessage() + "," + "executionId: '" + suspendedJob.getExecutionId() + "'," + "processDefinitionId: '" + suspendedJob.getProcessDefinitionId() + "'," + "processInstanceId: '" + suspendedJob.getProcessInstanceId() + "'," + "elementId: 'escalationTimer'," + "elementName: 'Escalation'," + "handlerType: 'trigger-timer'," + "retries: " + suspendedJob.getRetries() + "," + "dueDate: " + new TextNode(getISODateStringWithTZ(suspendedJob.getDuedate())) + "," + "tenantId: ''" + "}"); assertThat(responseNode.path("url").asText(null)) .endsWith(RestUrls.createRelativeResourceUrl(RestUrls.URL_SUSPENDED_JOB, suspendedJob.getId())); // Set tenant on deployment managementService.executeCommand(new ChangeDeploymentTenantIdCmd(deploymentId, "myTenant")); response = executeRequest(new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_SUSPENDED_JOB, suspendedJob.getId())), HttpStatus.SC_OK); responseNode = objectMapper.readTree(response.getEntity().getContent()); closeResponse(response); assertThat(responseNode).isNotNull(); assertThatJson(responseNode) .when(Option.IGNORING_EXTRA_FIELDS) .isEqualTo("{" + "tenantId: 'myTenant'" + "}"); }
@GET @Path("/domain") @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8 /* , MediaType.APPLICATION_XML */}) public TimelineDomains getDomains( @Context HttpServletRequest req, @Context HttpServletResponse res, @QueryParam("owner") String owner) { init(res); owner = parseStr(owner); UserGroupInformation callerUGI = getUser(req); if (owner == null || owner.length() == 0) { if (callerUGI == null) { throw new BadRequestException("Domain owner is not specified."); } else { // By default it's going to list the caller's domains owner = callerUGI.getShortUserName(); } } try { return timelineDataManager.getDomains(owner, callerUGI); } catch (Exception e) { LOG.error("Error getting domains", e); throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR); } }
@Test void testGetDomains() throws Exception { WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("timeline") .path("domain") .queryParam("owner", "owner_1") .accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString()); TimelineDomains domains = response.getEntity(TimelineDomains.class); assertEquals(2, domains.getDomains().size()); for (int i = 0; i < domains.getDomains().size(); ++i) { verifyDomain(domains.getDomains().get(i), i == 0 ? "domain_id_4" : "domain_id_1"); } }
@Override public OptionalLong apply(OptionalLong previousSendTimeNs) { long delayNs; if (previousGlobalFailures > 0) { // If there were global failures (like a response timeout), we want to wait for the // full backoff period. delayNs = backoff.backoff(previousGlobalFailures); } else if ((numReadyRequests > MAX_ASSIGNMENTS_PER_REQUEST) && !hasInflightRequests) { // If there were no previous failures, and we have lots of requests, send it as soon // as possible. delayNs = 0; } else { // Otherwise, use the standard delay period. This helps to promote batching, which // reduces load on the controller. delayNs = backoff.initialInterval(); } long newSendTimeNs = nowNs + delayNs; if (previousSendTimeNs.isPresent() && previousSendTimeNs.getAsLong() < newSendTimeNs) { // If the previous send time was before the new one we calculated, go with the // previous one. return previousSendTimeNs; } // Otherwise, return our new send time. return OptionalLong.of(newSendTimeNs); }
@Test public void scheduleImmediatelyWhenOverloaded() { assertEquals(OptionalLong.of(0), new AssignmentsManagerDeadlineFunction(BACKOFF, 0, 0, false, MAX_ASSIGNMENTS_PER_REQUEST + 1). apply(OptionalLong.of(BACKOFF.initialInterval() / 2))); }
@Override public void onMsg(TbContext ctx, TbMsg msg) { String topic = TbNodeUtils.processPattern(config.getTopicPattern(), msg); String keyPattern = config.getKeyPattern(); var tbMsg = ackIfNeeded(ctx, msg); try { if (initError != null) { ctx.tellFailure(tbMsg, new RuntimeException("Failed to initialize Kafka rule node producer: " + initError.getMessage())); } else { ctx.getExternalCallExecutor().executeAsync(() -> { publish( ctx, tbMsg, topic, keyPattern == null || keyPattern.isEmpty() ? null : TbNodeUtils.processPattern(config.getKeyPattern(), tbMsg) ); return null; }); } } catch (Exception e) { ctx.tellFailure(tbMsg, e); } }
@Test public void givenInitErrorIsNotNull_whenOnMsg_thenTellFailure() { // GIVEN String errorMsg = "Error during kafka initialization!"; ReflectionTestUtils.setField(node, "config", config); ReflectionTestUtils.setField(node, "initError", new ThingsboardKafkaClientError(errorMsg)); // WHEN TbMsg msg = TbMsg.newMsg(TbMsgType.POST_TELEMETRY_REQUEST, DEVICE_ID, TbMsgMetaData.EMPTY, TbMsg.EMPTY_JSON_OBJECT); node.onMsg(ctxMock, msg); // THEN ArgumentCaptor<Throwable> actualError = ArgumentCaptor.forClass(Throwable.class); then(ctxMock).should().tellFailure(eq(msg), actualError.capture()); assertThat(actualError.getValue()) .isInstanceOf(RuntimeException.class) .hasMessage("Failed to initialize Kafka rule node producer: " + errorMsg); }
public CompletableFuture<Account> updateDeviceAsync(final Account account, final byte deviceId, final Consumer<Device> deviceUpdater) { return updateAsync(account, a -> { a.getDevice(deviceId).ifPresent(deviceUpdater); // assume that all updaters passed to the public method actually modify the device return true; }); }
@Test void testUpdateDeviceAsync() { final UUID uuid = UUID.randomUUID(); Account account = AccountsHelper.generateTestAccount("+14152222222", uuid, UUID.randomUUID(), new ArrayList<>(), new byte[UnidentifiedAccessUtil.UNIDENTIFIED_ACCESS_KEY_LENGTH]); when(accounts.getByAccountIdentifierAsync(uuid)).thenReturn(CompletableFuture.completedFuture( Optional.of(AccountsHelper.generateTestAccount("+14152222222", uuid, UUID.randomUUID(), new ArrayList<>(), new byte[UnidentifiedAccessUtil.UNIDENTIFIED_ACCESS_KEY_LENGTH])))); assertTrue(account.getDevices().isEmpty()); Device enabledDevice = new Device(); enabledDevice.setFetchesMessages(true); enabledDevice.setLastSeen(System.currentTimeMillis()); final byte deviceId = account.getNextDeviceId(); enabledDevice.setId(deviceId); account.addDevice(enabledDevice); @SuppressWarnings("unchecked") Consumer<Device> deviceUpdater = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer<Device> unknownDeviceUpdater = mock(Consumer.class); account = accountsManager.updateDeviceAsync(account, deviceId, deviceUpdater).join(); account = accountsManager.updateDeviceAsync(account, deviceId, d -> d.setName("deviceName".getBytes(StandardCharsets.UTF_8))).join(); assertArrayEquals("deviceName".getBytes(StandardCharsets.UTF_8), account.getDevice(deviceId).orElseThrow().getName()); verify(deviceUpdater, times(1)).accept(any(Device.class)); accountsManager.updateDeviceAsync(account, account.getNextDeviceId(), unknownDeviceUpdater).join(); verify(unknownDeviceUpdater, never()).accept(any(Device.class)); }
public int getRMNodeLabelsFailedRetrieved() { return numGetRMNodeLabelsFailedRetrieved.value(); }
@Test public void testGetRMNodeLabelsRetrievedFailed() { long totalBadBefore = metrics.getRMNodeLabelsFailedRetrieved(); badSubCluster.getRMNodeLabelsFailed(); Assert.assertEquals(totalBadBefore + 1, metrics.getRMNodeLabelsFailedRetrieved()); }
@Override public Iterable<Result> buffer( Flowable<I> flowable ) { Flowable<List<I>> buffer = millis > 0 ? batchSize > 0 ? flowable.buffer( millis, MILLISECONDS, Schedulers.io(), batchSize, ArrayList::new, true ) : flowable.buffer( millis, MILLISECONDS ) : flowable.buffer( batchSize ); return buffer .parallel( parallelism, rxBatchCount ) .runOn( sharedStreamingBatchPoolSize > 0 ? Schedulers.from( sharedStreamingBatchPool ) : Schedulers.io(), rxBatchCount ) .filter( list -> !list.isEmpty() ) .map( this.bufferFilter ) // apply any filtering for data that should no longer be processed .filter( list -> !list.isEmpty() ) // ensure at least one record is left before sending to subtrans .map( this::sendBufferToSubtrans ) .filter( Optional::isPresent ) .map( Optional::get ) .sequential() .doOnNext( this::failOnError ) .doOnNext( postProcessor ) .map( Map.Entry::getValue ) .blockingIterable(); }
@Test public void emptyResultShouldNotThrowException() throws KettleException { when( subtransExecutor.execute( any() ) ).thenReturn( Optional.empty() ); when( subtransExecutor.getPrefetchCount() ).thenReturn( 10 ); RowMetaInterface rowMeta = new RowMeta(); rowMeta.addValueMeta( new ValueMetaString( "field" ) ); FixedTimeStreamWindow<List> window = new FixedTimeStreamWindow<>( subtransExecutor, rowMeta, 0, 2, 1 ); window.buffer( Flowable.fromIterable( singletonList( asList( "v1", "v2" ) ) ) ).forEach( result -> { } ); }
private ExecutorService getExecutorService(final int executorSize, final String nameFormat) { ThreadFactory threadFactory = ExecutorThreadFactoryBuilder.build(nameFormat); return 0 == executorSize ? Executors.newCachedThreadPool(threadFactory) : Executors.newFixedThreadPool(executorSize, threadFactory); }
@Test void assertThreadLocalValueChangedForReusedThread() { AtomicBoolean finished = new AtomicBoolean(false); ExecutorService executorService = new ExecutorServiceManager(1).getExecutorService(); executorService.submit(() -> { TRANSMITTABLE_THREAD_LOCAL.set("foo"); executorService.submit(() -> assertThat(TRANSMITTABLE_THREAD_LOCAL.get(), is("foo"))); }); TRANSMITTABLE_THREAD_LOCAL.set("bar"); executorService.submit(() -> { assertValueChangedInConcurrencyThread(); finished.set(true); }); assertTimeout(Duration.ofSeconds(1L), () -> assertFinished(finished)); }
public RunResponse restartDirectly( RunResponse restartStepInfo, RunRequest runRequest, boolean blocking) { WorkflowInstance instance = restartStepInfo.getInstance(); String stepId = restartStepInfo.getStepId(); validateStepId(instance, stepId, Actions.StepInstanceAction.RESTART); StepInstance stepInstance = getStepInstanceAndValidate(instance, stepId, runRequest.getRestartConfig()); // prepare payload and then add to db StepAction stepAction = StepAction.createRestart(stepInstance, runRequest); saveAction(stepInstance, stepAction); if (blocking) { return waitResponseWithTimeout(stepInstance, stepAction); } else { return RunResponse.from(stepInstance, stepAction.toTimelineEvent()); } }
@Test public void testRestartDirectlyFromAggregatedView() { stepInstance.getRuntimeState().setStatus(StepInstance.Status.USER_FAILED); stepInstance.getStepRetry().setRetryable(true); stepInstance.setWorkflowRunId(2); instance .getAggregatedInfo() .getStepAggregatedViews() .put("job1", StepAggregatedView.builder().workflowRunId(2L).build()); ((TypedStep) stepInstance.getDefinition()).setFailureMode(FailureMode.FAIL_IMMEDIATELY); stepInstanceDao.insertOrUpsertStepInstance(stepInstance, true); RunResponse restartStepInfo = RunResponse.builder().instance(instance).stepId("job1").build(); RunRequest runRequest = RunRequest.builder() .requester(user) .currentPolicy(RunPolicy.RESTART_FROM_SPECIFIC) .stepRunParams( Collections.singletonMap( "job1", Collections.singletonMap( "foo", ParamDefinition.buildParamDefinition("foo", "bar")))) .build(); RunResponse response = actionDao.restartDirectly(restartStepInfo, runRequest, false); Assert.assertEquals("sample-dag-test-3", response.getWorkflowId()); Assert.assertEquals(1, response.getWorkflowInstanceId()); Assert.assertEquals(1, response.getWorkflowRunId()); Assert.assertEquals("job1", response.getStepId()); Assert.assertEquals(2L, response.getStepAttemptId().longValue()); Assert.assertEquals( "User [tester] take action [RESTART] on the step", response.getTimelineEvent().getMessage()); Mockito.verify(publisher, Mockito.times(1)).publish(any(StepInstanceWakeUpEvent.class)); }
int getSignedEncodingLength(long n) { return BITS_TO_LENGTH[log2Floor(n < 0 ? ~n : n) + 1]; }
@Test public void testGetSignedEncodingLength() { OrderedCode orderedCode = new OrderedCode(); assertEquals(10, orderedCode.getSignedEncodingLength(Long.MIN_VALUE)); assertEquals(10, orderedCode.getSignedEncodingLength(~(1L << 62))); assertEquals(9, orderedCode.getSignedEncodingLength(~(1L << 62) + 1)); assertEquals(3, orderedCode.getSignedEncodingLength(-8193)); assertEquals(2, orderedCode.getSignedEncodingLength(-8192)); assertEquals(2, orderedCode.getSignedEncodingLength(-65)); assertEquals(1, orderedCode.getSignedEncodingLength(-64)); assertEquals(1, orderedCode.getSignedEncodingLength(-2)); assertEquals(1, orderedCode.getSignedEncodingLength(-1)); assertEquals(1, orderedCode.getSignedEncodingLength(0)); assertEquals(1, orderedCode.getSignedEncodingLength(1)); assertEquals(1, orderedCode.getSignedEncodingLength(63)); assertEquals(2, orderedCode.getSignedEncodingLength(64)); assertEquals(2, orderedCode.getSignedEncodingLength(8191)); assertEquals(3, orderedCode.getSignedEncodingLength(8192)); assertEquals(9, orderedCode.getSignedEncodingLength((1L << 62)) - 1); assertEquals(10, orderedCode.getSignedEncodingLength(1L << 62)); assertEquals(10, orderedCode.getSignedEncodingLength(Long.MAX_VALUE)); }
@Override public void write(InputT element, Context context) throws IOException, InterruptedException { while (bufferedRequestEntries.size() >= maxBufferedRequests) { flush(); } addEntryToBuffer(elementConverter.apply(element, context), false); nonBlockingFlush(); }
@Test public void testThatOnExpiryOfAnOldTimeoutANewOneMayBeRegisteredImmediately() throws Exception { AsyncSinkWriterImpl sink = new AsyncSinkWriterImplBuilder() .context(sinkInitContext) .maxBatchSize(10) .maxInFlightRequests(20) .maxBatchSizeInBytes(10_000) .maxTimeInBufferMS(100) .maxRecordSizeInBytes(10_000) .simulateFailures(true) .build(); TestProcessingTimeService tpts = sinkInitContext.getTestProcessingTimeService(); tpts.setCurrentTime(0L); sink.write("1"); tpts.setCurrentTime(100L); assertThat(res.size()).isEqualTo(1); sink.write("2"); tpts.setCurrentTime(200L); assertThat(res.size()).isEqualTo(2); }
public static boolean isNotEmpty(byte[] data) { return !isEmpty(data); }
@Test void isNotEmpty() { byte[] bytes = ByteUtils.toBytes("google"); assertTrue(ByteUtils.isNotEmpty(bytes)); byte[] bytes2 = ByteUtils.toBytes(""); assertFalse(ByteUtils.isNotEmpty(bytes2)); }
public static UClassType create(CharSequence fullyQualifiedClass, List<UType> typeArguments) { return new AutoValue_UClassType( StringName.of(fullyQualifiedClass), ImmutableList.copyOf(typeArguments)); }
@Test public void unifies() { assertThat( UClassType.create("java.lang.String") .unify(Symtab.instance(context).stringType, unifier)) .isNotNull(); }
@Override public boolean isDirectory(String path) throws IOException { // Root is always a folder if (isRoot(path) || path.equals(PATH_SEPARATOR)) { return true; } String keyAsFolder = convertToFolderName(stripPrefixIfPresent(path)); if (getObjectStatus(keyAsFolder) != null) { return true; } return getObjectListingChunkForPath(path, true) != null; }
@Test public void testIsDirectory() throws IOException { Assert.assertTrue(mCOSUnderFileSystem.isDirectory("/")); }