focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public long getDictDataCountByDictType(String dictType) { return dictDataMapper.selectCountByDictType(dictType); }
@Test public void testGetDictDataCountByDictType() { // mock 数据 dictDataMapper.insert(randomDictDataDO(o -> o.setDictType("yunai"))); dictDataMapper.insert(randomDictDataDO(o -> o.setDictType("tudou"))); dictDataMapper.insert(randomDictDataDO(o -> o.setDictType("yunai"))); // 准备参数 String dictType = "yunai"; // 调用 long count = dictDataService.getDictDataCountByDictType(dictType); // 校验 assertEquals(2L, count); }
void notifyPendingReceivedCallback(final Message<T> message, Exception exception) { if (pendingReceives.isEmpty()) { return; } // fetch receivedCallback from queue final CompletableFuture<Message<T>> receivedFuture = nextPendingReceive(); if (receivedFuture == null) { return; } if (exception != null) { internalPinnedExecutor.execute(() -> receivedFuture.completeExceptionally(exception)); return; } if (message == null) { IllegalStateException e = new IllegalStateException("received message can't be null"); internalPinnedExecutor.execute(() -> receivedFuture.completeExceptionally(e)); return; } if (getCurrentReceiverQueueSize() == 0) { // call interceptor and complete received callback trackMessage(message); interceptAndComplete(message, receivedFuture); return; } // increase permits for available message-queue messageProcessed(message); // call interceptor and complete received callback interceptAndComplete(message, receivedFuture); }
@Test(invocationTimeOut = 1000) public void testNotifyPendingReceivedCallback_EmptyQueueNotThrowsException() { consumer.notifyPendingReceivedCallback(null, null); }
protected boolean tryProcess0(@Nonnull Object item) throws Exception { return tryProcess(0, item); }
@Test public void when_tryProcess0_then_delegatesToTryProcess() throws Exception { // When boolean done = p.tryProcess0(MOCK_ITEM); // Then assertTrue(done); p.validateReceptionOfItem(ORDINAL_0, MOCK_ITEM); }
public static long parseRelativeTime(String relTime) throws IOException { if (relTime.length() < 2) { throw new IOException("Unable to parse relative time value of " + relTime + ": too short"); } String ttlString = relTime.substring(0, relTime.length()-1); long ttl; try { ttl = Long.parseLong(ttlString); } catch (NumberFormatException e) { throw new IOException("Unable to parse relative time value of " + relTime + ": " + ttlString + " is not a number"); } if (relTime.endsWith("s")) { // pass } else if (relTime.endsWith("m")) { ttl *= 60; } else if (relTime.endsWith("h")) { ttl *= 60*60; } else if (relTime.endsWith("d")) { ttl *= 60*60*24; } else { throw new IOException("Unable to parse relative time value of " + relTime + ": unknown time unit " + relTime.charAt(relTime.length() - 1)); } return ttl*1000; }
@Test(timeout=5000) public void testRelativeTimeConversion() throws Exception { try { DFSUtil.parseRelativeTime("1"); } catch (IOException e) { assertExceptionContains("too short", e); } try { DFSUtil.parseRelativeTime("1z"); } catch (IOException e) { assertExceptionContains("unknown time unit", e); } try { DFSUtil.parseRelativeTime("yyz"); } catch (IOException e) { assertExceptionContains("is not a number", e); } assertEquals(61*1000, DFSUtil.parseRelativeTime("61s")); assertEquals(61*60*1000, DFSUtil.parseRelativeTime("61m")); assertEquals(0, DFSUtil.parseRelativeTime("0s")); assertEquals(25*60*60*1000, DFSUtil.parseRelativeTime("25h")); assertEquals(4*24*60*60*1000l, DFSUtil.parseRelativeTime("4d")); assertEquals(999*24*60*60*1000l, DFSUtil.parseRelativeTime("999d")); }
@Override public Column convert(BasicTypeDefine typeDefine) { PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .sourceType(typeDefine.getColumnType()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String hanaType = typeDefine.getDataType().toUpperCase(); if (typeDefine.getColumnType().endsWith(" ARRAY")) { typeDefine.setColumnType(typeDefine.getColumnType().replace(" ARRAY", "")); typeDefine.setDataType(removeColumnSizeIfNeed(typeDefine.getColumnType())); Column arrayColumn = convert(typeDefine); SeaTunnelDataType<?> newType; switch (arrayColumn.getDataType().getSqlType()) { case STRING: newType = ArrayType.STRING_ARRAY_TYPE; break; case BOOLEAN: newType = ArrayType.BOOLEAN_ARRAY_TYPE; break; case TINYINT: newType = ArrayType.BYTE_ARRAY_TYPE; break; case SMALLINT: newType = ArrayType.SHORT_ARRAY_TYPE; break; case INT: newType = ArrayType.INT_ARRAY_TYPE; break; case BIGINT: newType = ArrayType.LONG_ARRAY_TYPE; break; case FLOAT: newType = ArrayType.FLOAT_ARRAY_TYPE; break; case DOUBLE: newType = ArrayType.DOUBLE_ARRAY_TYPE; break; case DATE: newType = ArrayType.LOCAL_DATE_ARRAY_TYPE; break; case TIME: newType = ArrayType.LOCAL_TIME_ARRAY_TYPE; break; case TIMESTAMP: newType = ArrayType.LOCAL_DATE_TIME_ARRAY_TYPE; break; default: throw CommonError.unsupportedDataType( "SeaTunnel", arrayColumn.getDataType().getSqlType().toString(), typeDefine.getName()); } return new PhysicalColumn( arrayColumn.getName(), newType, arrayColumn.getColumnLength(), arrayColumn.getScale(), arrayColumn.isNullable(), arrayColumn.getDefaultValue(), arrayColumn.getComment(), arrayColumn.getSourceType() + " ARRAY", arrayColumn.getOptions()); } switch (hanaType) { case HANA_BINARY: case HANA_VARBINARY: builder.dataType(PrimitiveByteArrayType.INSTANCE); if (typeDefine.getLength() == null || typeDefine.getLength() == 0) { builder.columnLength(MAX_BINARY_LENGTH); } else { builder.columnLength(typeDefine.getLength()); } break; case HANA_BOOLEAN: builder.dataType(BasicType.BOOLEAN_TYPE); break; case HANA_VARCHAR: case HANA_ALPHANUM: case HANA_CLOB: case HANA_NCLOB: case HANA_TEXT: case HANA_BINTEXT: builder.dataType(BasicType.STRING_TYPE); if (typeDefine.getLength() == null || typeDefine.getLength() == 0) { builder.columnLength(MAX_LOB_LENGTH); } else { builder.columnLength(typeDefine.getLength()); } break; case HANA_NVARCHAR: case HANA_SHORTTEXT: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(TypeDefineUtils.charTo4ByteLength(typeDefine.getLength())); break; case HANA_DATE: builder.dataType(LocalTimeType.LOCAL_DATE_TYPE); break; case HANA_TIME: builder.dataType(LocalTimeType.LOCAL_TIME_TYPE); builder.scale(0); break; case HANA_SECONDDATE: builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); builder.scale(0); break; case HANA_TIMESTAMP: builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); if (typeDefine.getScale() == null) { builder.scale(TIMESTAMP_DEFAULT_SCALE); } else { builder.scale(typeDefine.getScale()); } break; case HANA_BLOB: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(typeDefine.getLength()); break; case HANA_TINYINT: case HANA_SMALLINT: builder.dataType(BasicType.SHORT_TYPE); break; case HANA_INTEGER: builder.dataType(BasicType.INT_TYPE); break; case HANA_BIGINT: builder.dataType(BasicType.LONG_TYPE); break; case HANA_DECIMAL: Integer scale = typeDefine.getScale(); long precision = typeDefine.getLength() != null ? typeDefine.getLength().intValue() : MAX_PRECISION - 4; if (scale == null) { builder.dataType(new DecimalType((int) precision, MAX_SCALE)); builder.columnLength(precision); builder.scale(MAX_SCALE); } else if (scale < 0) { int newPrecision = (int) (precision - scale); if (newPrecision == 1) { builder.dataType(BasicType.SHORT_TYPE); } else if (newPrecision <= 9) { builder.dataType(BasicType.INT_TYPE); } else if (newPrecision <= 18) { builder.dataType(BasicType.LONG_TYPE); } else if (newPrecision < 38) { builder.dataType(new DecimalType(newPrecision, 0)); builder.columnLength((long) newPrecision); } else { builder.dataType(new DecimalType(DEFAULT_PRECISION, 0)); builder.columnLength((long) DEFAULT_PRECISION); } } else { builder.dataType(new DecimalType((int) precision, scale)); builder.columnLength(precision); builder.scale(scale); } break; case HANA_SMALLDECIMAL: if (typeDefine.getPrecision() == null) { builder.dataType(new DecimalType(DEFAULT_PRECISION, MAX_SMALL_DECIMAL_SCALE)); builder.columnLength((long) DEFAULT_PRECISION); builder.scale(MAX_SMALL_DECIMAL_SCALE); } else { builder.dataType( new DecimalType( typeDefine.getPrecision().intValue(), MAX_SMALL_DECIMAL_SCALE)); builder.columnLength(typeDefine.getPrecision()); builder.scale(MAX_SMALL_DECIMAL_SCALE); } break; case HANA_REAL: builder.dataType(BasicType.FLOAT_TYPE); break; case HANA_DOUBLE: builder.dataType(BasicType.DOUBLE_TYPE); break; case HANA_ST_POINT: case HANA_ST_GEOMETRY: builder.dataType(PrimitiveByteArrayType.INSTANCE); break; default: throw CommonError.convertToSeaTunnelTypeError( DatabaseIdentifier.SAP_HANA, hanaType, typeDefine.getName()); } return builder.build(); }
@Test public void testConvertBytes() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder().name("test").columnType("BLOB").dataType("BLOB").build(); Column column = SapHanaTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(PrimitiveByteArrayType.INSTANCE, column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); typeDefine = BasicTypeDefine.builder().name("test").columnType("CLOB").dataType("CLOB").build(); column = SapHanaTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.STRING_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("NCLOB") .dataType("NCLOB") .length(10L) .build(); column = SapHanaTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.STRING_TYPE, column.getDataType()); Assertions.assertEquals(10, column.getColumnLength()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); typeDefine = BasicTypeDefine.builder().name("test").columnType("TEXT").dataType("TEXT").build(); column = SapHanaTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.STRING_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("BINTEXT") .dataType("BINTEXT") .build(); column = SapHanaTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.STRING_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("VARBINARY") .dataType("VARBINARY") .build(); column = SapHanaTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(PrimitiveByteArrayType.INSTANCE, column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); }
public static L2ModificationInstruction modVlanId(VlanId vlanId) { checkNotNull(vlanId, "VLAN id cannot be null"); return new L2ModificationInstruction.ModVlanIdInstruction(vlanId); }
@Test public void testModVlanIdMethod() { final Instruction instruction = Instructions.modVlanId(vlanId1); final L2ModificationInstruction.ModVlanIdInstruction modEtherInstruction = checkAndConvert(instruction, Instruction.Type.L2MODIFICATION, L2ModificationInstruction.ModVlanIdInstruction.class); assertThat(modEtherInstruction.vlanId(), is(equalTo(vlanId1))); assertThat(modEtherInstruction.subtype(), is(equalTo(L2ModificationInstruction.L2SubType.VLAN_ID))); }
public void updateGlobalWhiteAddrsConfig(final String addr, final String globalWhiteAddrs, String aclFileFullPath, final long timeoutMillis) throws RemotingException, MQBrokerException, InterruptedException, MQClientException { UpdateGlobalWhiteAddrsConfigRequestHeader requestHeader = new UpdateGlobalWhiteAddrsConfigRequestHeader(); requestHeader.setGlobalWhiteAddrs(globalWhiteAddrs); requestHeader.setAclFileFullPath(aclFileFullPath); RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.UPDATE_GLOBAL_WHITE_ADDRS_CONFIG, requestHeader); RemotingCommand response = this.remotingClient.invokeSync(MixAll.brokerVIPChannel(this.clientConfig.isVipChannelEnabled(), addr), request, timeoutMillis); assert response != null; switch (response.getCode()) { case ResponseCode.SUCCESS: { return; } default: break; } throw new MQClientException(response.getCode(), response.getRemark()); }
@Test(expected = AssertionError.class) public void testUpdateGlobalWhiteAddrsConfig() throws MQBrokerException, RemotingException, InterruptedException, MQClientException { mqClientAPI.updateGlobalWhiteAddrsConfig(defaultNsAddr, "", "", defaultTimeout); }
public static boolean toBoolean(String valueStr) { if (StrUtil.isNotBlank(valueStr)) { valueStr = valueStr.trim().toLowerCase(); return TRUE_SET.contains(valueStr); } return false; }
@Test public void toBooleanTest() { assertTrue(BooleanUtil.toBoolean("true")); assertTrue(BooleanUtil.toBoolean("yes")); assertTrue(BooleanUtil.toBoolean("t")); assertTrue(BooleanUtil.toBoolean("OK")); assertTrue(BooleanUtil.toBoolean("1")); assertTrue(BooleanUtil.toBoolean("On")); assertTrue(BooleanUtil.toBoolean("是")); assertTrue(BooleanUtil.toBoolean("对")); assertTrue(BooleanUtil.toBoolean("真")); assertFalse(BooleanUtil.toBoolean("false")); assertFalse(BooleanUtil.toBoolean("6455434")); assertFalse(BooleanUtil.toBoolean("")); }
static Set<Set<Integer>> computeStronglyConnectedComponents( final int numVertex, final List<List<Integer>> outEdges) { final Set<Set<Integer>> stronglyConnectedComponents = new HashSet<>(); // a vertex will be added into this stack when it is visited for the first time final Deque<Integer> visitingStack = new ArrayDeque<>(numVertex); final boolean[] onVisitingStack = new boolean[numVertex]; // stores the order that a vertex is visited for the first time, -1 indicates it is not // visited yet final int[] vertexIndices = new int[numVertex]; Arrays.fill(vertexIndices, -1); final AtomicInteger indexCounter = new AtomicInteger(0); final int[] vertexLowLinks = new int[numVertex]; for (int vertex = 0; vertex < numVertex; vertex++) { if (!isVisited(vertex, vertexIndices)) { dfs( vertex, outEdges, vertexIndices, vertexLowLinks, visitingStack, onVisitingStack, indexCounter, stronglyConnectedComponents); } } return stronglyConnectedComponents; }
@Test void testLargeGraph() { final int n = 100000; final List<List<Integer>> edges = new ArrayList<>(); for (int i = 0; i < n; i++) { edges.add(Collections.singletonList((i + 1) % n)); } final Set<Set<Integer>> result = computeStronglyConnectedComponents(n, edges); final Set<Integer> singleComponent = IntStream.range(0, n).boxed().collect(Collectors.toSet()); assertThat(result).isEqualTo(Collections.singleton(singleComponent)); }
@Override public void release() { try { ioExecutor.shutdown(); if (!ioExecutor.awaitTermination(5L, TimeUnit.MINUTES)) { throw new TimeoutException("Timeout to shutdown the flush thread."); } dataFileChannel.close(); } catch (Exception e) { ExceptionUtils.rethrow(e); } partitionFileIndex.release(); }
@Test void testRelease() { AtomicBoolean isReleased = new AtomicBoolean(false); TestingProducerMergedPartitionFileIndex partitionFileIndex = new TestingProducerMergedPartitionFileIndex.Builder() .setIndexFilePath(new File(tempFolder.toFile(), "testIndex").toPath()) .setReleaseRunnable(() -> isReleased.set(true)) .build(); ProducerMergedPartitionFileWriter partitionFileWriter = new ProducerMergedPartitionFileWriter( new File(tempFolder.toFile(), "testFile1").toPath(), partitionFileIndex); partitionFileWriter.release(); assertThat(isReleased).isTrue(); }
@Override protected void validateDataImpl(TenantId tenantId, TenantProfile tenantProfile) { validateString("Tenant profile name", tenantProfile.getName()); if (tenantProfile.getProfileData() == null) { throw new DataValidationException("Tenant profile data should be specified!"); } if (tenantProfile.getProfileData().getConfiguration() == null) { throw new DataValidationException("Tenant profile data configuration should be specified!"); } if (tenantProfile.isDefault()) { TenantProfile defaultTenantProfile = tenantProfileService.findDefaultTenantProfile(tenantId); if (defaultTenantProfile != null && !defaultTenantProfile.getId().equals(tenantProfile.getId())) { throw new DataValidationException("Another default tenant profile is present!"); } } if (tenantProfile.isIsolatedTbRuleEngine()) { List<TenantProfileQueueConfiguration> queueConfiguration = tenantProfile.getProfileData().getQueueConfiguration(); if (queueConfiguration == null) { throw new DataValidationException("Tenant profile data queue configuration should be specified!"); } Optional<TenantProfileQueueConfiguration> mainQueueConfig = queueConfiguration .stream() .filter(q -> q.getName().equals(DataConstants.MAIN_QUEUE_NAME)) .findAny(); if (mainQueueConfig.isEmpty()) { throw new DataValidationException("Main queue configuration should be specified!"); } queueConfiguration.forEach(this::validateQueueConfiguration); Set<String> queueNames = new HashSet<>(queueConfiguration.size()); queueConfiguration.forEach(q -> { String name = q.getName(); if (queueNames.contains(name)) { throw new DataValidationException(String.format("Queue configuration name '%s' already present!", name)); } else { queueNames.add(name); } }); } }
@Test void testValidateNameInvocation() { TenantProfile tenantProfile = new TenantProfile(); tenantProfile.setName("Sandbox"); TenantProfileData tenantProfileData = new TenantProfileData(); tenantProfileData.setConfiguration(new DefaultTenantProfileConfiguration()); tenantProfile.setProfileData(tenantProfileData); validator.validateDataImpl(tenantId, tenantProfile); verify(validator).validateString("Tenant profile name", tenantProfile.getName()); }
@Override public TimestampedKeyValueStore<K, V> build() { KeyValueStore<Bytes, byte[]> store = storeSupplier.get(); if (!(store instanceof TimestampedBytesStore)) { if (store.persistent()) { store = new KeyValueToTimestampedKeyValueByteStoreAdapter(store); } else { store = new InMemoryTimestampedKeyValueStoreMarker(store); } } return new MeteredTimestampedKeyValueStore<>( maybeWrapCaching(maybeWrapLogging(store)), storeSupplier.metricsScope(), time, keySerde, valueSerde); }
@Test public void shouldHaveMeteredStoreAsOuterStore() { setUp(); final TimestampedKeyValueStore<String, String> store = builder.build(); assertThat(store, instanceOf(MeteredTimestampedKeyValueStore.class)); }
protected WordComposer getCurrentComposedWord() { return mWord; }
@Test public void testSuggestionsRestartHappyPathWhenDisabled() { simulateFinishInputFlow(); SharedPrefsHelper.setPrefsValue(R.string.settings_key_allow_suggestions_restart, false); simulateOnStartInputFlow(); mAnySoftKeyboardUnderTest.simulateTextTyping("hell yes"); Assert.assertEquals( "hell yes", getCurrentTestInputConnection().getCurrentTextInInputConnection()); mAnySoftKeyboardUnderTest.resetMockCandidateView(); mAnySoftKeyboardUnderTest.moveCursorToPosition(2, true); verifySuggestions(true); Assert.assertEquals( "", mAnySoftKeyboardUnderTest.getCurrentComposedWord().getTypedWord().toString()); Assert.assertEquals(0, mAnySoftKeyboardUnderTest.getCurrentComposedWord().cursorPosition()); Assert.assertEquals(2, getCurrentTestInputConnection().getCurrentStartPosition()); mAnySoftKeyboardUnderTest.simulateKeyPress('r'); Assert.assertEquals( "herll yes", getCurrentTestInputConnection().getCurrentTextInInputConnection()); verifySuggestions(true, "r"); Assert.assertEquals(3, getCurrentTestInputConnection().getCurrentStartPosition()); Assert.assertEquals( "r", mAnySoftKeyboardUnderTest.getCurrentComposedWord().getTypedWord().toString()); mAnySoftKeyboardUnderTest.simulateKeyPress('d'); Assert.assertEquals( "herdll yes", getCurrentTestInputConnection().getCurrentTextInInputConnection()); verifySuggestions(true, "rd"); Assert.assertEquals(4, getCurrentTestInputConnection().getCurrentStartPosition()); Assert.assertEquals( "rd", mAnySoftKeyboardUnderTest.getCurrentComposedWord().getTypedWord().toString()); }
@Override public Stream<HoodieInstant> getCandidateInstants(HoodieTableMetaClient metaClient, HoodieInstant currentInstant, Option<HoodieInstant> lastSuccessfulInstant) { HoodieActiveTimeline activeTimeline = metaClient.reloadActiveTimeline(); if (ClusteringUtils.isClusteringInstant(activeTimeline, currentInstant) || COMPACTION_ACTION.equals(currentInstant.getAction())) { return getCandidateInstantsForTableServicesCommits(activeTimeline, currentInstant); } else { return getCandidateInstantsForNonTableServicesCommits(activeTimeline, currentInstant); } }
@Test public void testConcurrentWritesWithInterleavingSuccessfulReplace() throws Exception { createCommit(metaClient.createNewInstantTime(), metaClient); HoodieActiveTimeline timeline = metaClient.getActiveTimeline(); // consider commits before this are all successful Option<HoodieInstant> lastSuccessfulInstant = timeline.getCommitsTimeline().filterCompletedInstants().lastInstant(); // writer 1 starts String currentWriterInstant = metaClient.createNewInstantTime(); createInflightCommit(currentWriterInstant, metaClient); // TODO: Remove sleep stmt once the modified times issue is fixed. // Sleep thread for at least 1sec for consecutive commits that way they do not have two commits modified times falls on the same millisecond. Thread.sleep(1000); // replace 1 gets scheduled and finished String newInstantTime = metaClient.createNewInstantTime(); createReplace(newInstantTime, WriteOperationType.INSERT_OVERWRITE, metaClient); Option<HoodieInstant> currentInstant = Option.of(new HoodieInstant(HoodieInstant.State.INFLIGHT, HoodieTimeline.COMMIT_ACTION, currentWriterInstant)); PreferWriterConflictResolutionStrategy strategy = new PreferWriterConflictResolutionStrategy(); HoodieCommitMetadata currentMetadata = createCommitMetadata(currentWriterInstant); List<HoodieInstant> candidateInstants = strategy.getCandidateInstants(metaClient, currentInstant.get(), lastSuccessfulInstant).collect( Collectors.toList()); // writer 1 conflicts with replace 1 Assertions.assertEquals(1, candidateInstants.size()); ConcurrentOperation thatCommitOperation = new ConcurrentOperation(candidateInstants.get(0), metaClient); ConcurrentOperation thisCommitOperation = new ConcurrentOperation(currentInstant.get(), currentMetadata); Assertions.assertTrue(strategy.hasConflict(thisCommitOperation, thatCommitOperation)); try { strategy.resolveConflict(null, thisCommitOperation, thatCommitOperation); Assertions.fail("Cannot reach here, should have thrown a conflict"); } catch (HoodieWriteConflictException e) { // expected } }
public static Criterion matchOduSignalType(OduSignalType signalType) { return new OduSignalTypeCriterion(signalType); }
@Test public void testMatchOduSignalTypeMethod() { OduSignalType oduSigType = OduSignalType.ODU2; Criterion matchoduSignalType = Criteria.matchOduSignalType(oduSigType); OduSignalTypeCriterion oduSignalTypeCriterion = checkAndConvert(matchoduSignalType, Criterion.Type.ODU_SIGTYPE, OduSignalTypeCriterion.class); assertThat(oduSignalTypeCriterion.signalType(), is(equalTo(oduSigType))); }
public static TransformExecutorService serial(ExecutorService executor) { return new SerialTransformExecutor(executor); }
@Test public void serialScheduleTwoWaitsForFirstToComplete() { @SuppressWarnings("unchecked") DirectTransformExecutor<Object> first = mock(DirectTransformExecutor.class); @SuppressWarnings("unchecked") DirectTransformExecutor<Object> second = mock(DirectTransformExecutor.class); TransformExecutorService serial = TransformExecutorServices.serial(executorService); serial.schedule(first); verify(first).run(); serial.schedule(second); verify(second, never()).run(); serial.complete(first); verify(second).run(); serial.complete(second); }
public List<SubjectAlternativeName> getSubjectAlternativeNames() { return getExtensions() .map(extensions -> GeneralNames.fromExtensions(extensions, Extension.subjectAlternativeName)) .map(SubjectAlternativeName::fromGeneralNames) .orElse(List.of()); }
@Test void can_read_subject_alternative_names() { X500Principal subject = new X500Principal("CN=subject"); KeyPair keypair = KeyUtils.generateKeypair(KeyAlgorithm.EC, 256); SubjectAlternativeName san1 = new SubjectAlternativeName(DNS, "san1.com"); SubjectAlternativeName san2 = new SubjectAlternativeName(DNS, "san2.com"); Pkcs10Csr csr = Pkcs10CsrBuilder.fromKeypair(subject, keypair, SignatureAlgorithm.SHA512_WITH_ECDSA) .addSubjectAlternativeName(san1) .addSubjectAlternativeName(san2) .build(); assertEquals(List.of(san1, san2), csr.getSubjectAlternativeNames()); }
@Override public BackgroundException map(final IOException e) { final StringBuilder buffer = new StringBuilder(); this.append(buffer, e.getMessage()); if(e instanceof FTPConnectionClosedException) { return new ConnectionRefusedException(buffer.toString(), e); } if(e instanceof FTPException) { return this.handle((FTPException) e, buffer); } if(e instanceof MalformedServerReplyException) { return new InteroperabilityException(buffer.toString(), e); } return new DefaultIOExceptionMappingService().map(e); }
@Test public void testSocketTimeout() { assertEquals(ConnectionTimeoutException.class, new FTPExceptionMappingService() .map(new SocketTimeoutException()).getClass()); assertEquals(ConnectionTimeoutException.class, new FTPExceptionMappingService() .map("message", new SocketTimeoutException()).getClass()); assertEquals(ConnectionTimeoutException.class, new FTPExceptionMappingService() .map("message", new SocketTimeoutException(), new Path("/f", EnumSet.of(Path.Type.file))).getClass()); }
@Override public Set<String> requiredPermissions() { // When there are no streams the event processor will search in all streams so we need to require the // generic stream permission. if (streams().isEmpty()) { return Collections.singleton(RestPermissions.STREAMS_READ); } return streams().stream() .map(streamId -> String.join(":", RestPermissions.STREAMS_READ, streamId)) .collect(Collectors.toSet()); }
@Test @MongoDBFixtures("aggregation-processors.json") public void requiredPermissions() { assertThat(dbService.get("54e3deadbeefdeadbeefaffe")).get().satisfies(definition -> assertThat(definition.config().requiredPermissions()).containsOnly("streams:read:stream-a", "streams:read:stream-b")); }
public ResourceProfile subtract(final ResourceProfile other) { checkNotNull(other, "Cannot subtract with null resources"); if (equals(ANY) || other.equals(ANY)) { return ANY; } if (this.equals(UNKNOWN) || other.equals(UNKNOWN)) { return UNKNOWN; } checkArgument( allFieldsNoLessThan(other), "Try to subtract an unmatched resource profile from this one."); Map<String, ExternalResource> resultExtendedResource = new HashMap<>(extendedResources); other.extendedResources.forEach( (String name, ExternalResource resource) -> resultExtendedResource.compute( name, (ignored, oldResource) -> oldResource.subtract(resource))); return new ResourceProfile( cpuCores.subtract(other.cpuCores), taskHeapMemory.subtract(other.taskHeapMemory), taskOffHeapMemory.subtract(other.taskOffHeapMemory), managedMemory.subtract(other.managedMemory), networkMemory.subtract(other.networkMemory), resultExtendedResource); }
@Test void testSubtract() { final ResourceProfile rp1 = ResourceProfile.newBuilder() .setCpuCores(1.0) .setTaskHeapMemoryMB(100) .setTaskOffHeapMemoryMB(100) .setManagedMemoryMB(100) .setNetworkMemoryMB(100) .build(); final ResourceProfile rp2 = ResourceProfile.newBuilder() .setCpuCores(2.0) .setTaskHeapMemoryMB(200) .setTaskOffHeapMemoryMB(200) .setManagedMemoryMB(200) .setNetworkMemoryMB(200) .build(); final ResourceProfile rp3 = ResourceProfile.newBuilder() .setCpuCores(3.0) .setTaskHeapMemoryMB(300) .setTaskOffHeapMemoryMB(300) .setManagedMemoryMB(300) .setNetworkMemoryMB(300) .build(); assertThat(rp3.subtract(rp2)).isEqualTo(rp1); assertThat(rp2.subtract(rp1)).isEqualTo(rp1); assertThatExceptionOfType(IllegalArgumentException.class) .as("The subtract should failed due to trying to subtract a larger resource") .isThrownBy(() -> rp1.subtract(rp2)); assertThat(ResourceProfile.ANY.subtract(rp3)).isEqualTo(ResourceProfile.ANY); assertThat(ResourceProfile.ANY.subtract(ResourceProfile.ANY)) .isEqualTo(ResourceProfile.ANY); assertThat(rp3.subtract(ResourceProfile.ANY)).isEqualTo(ResourceProfile.ANY); assertThat(ResourceProfile.UNKNOWN.subtract(rp3)).isEqualTo(ResourceProfile.UNKNOWN); assertThat(rp3.subtract(ResourceProfile.UNKNOWN)).isEqualTo(ResourceProfile.UNKNOWN); assertThat(ResourceProfile.UNKNOWN.subtract(ResourceProfile.UNKNOWN)) .isEqualTo(ResourceProfile.UNKNOWN); }
@Override public AttributedList<Path> read(final Path directory, final List<String> replies) throws FTPInvalidListException { final AttributedList<Path> children = new AttributedList<>(); if(replies.isEmpty()) { return children; } // At least one entry successfully parsed boolean success = false; for(String line : replies) { final Map<String, Map<String, String>> file = this.parseFacts(line); if(null == file) { log.error(String.format("Error parsing line %s", line)); continue; } for(Map.Entry<String, Map<String, String>> f : file.entrySet()) { final String name = f.getKey(); // size -- Size in octets // modify -- Last modification time // create -- Creation time // type -- Entry type // unique -- Unique id of file/directory // perm -- File permissions, whether read, write, execute is allowed for the login id. // lang -- Language of the file name per IANA [11] registry. // media-type -- MIME media-type of file contents per IANA registry. // charset -- Character set per IANA registry (if not UTF-8) final Map<String, String> facts = f.getValue(); if(!facts.containsKey("type")) { log.error(String.format("No type fact in line %s", line)); continue; } final Path parsed; if("dir".equals(facts.get("type").toLowerCase(Locale.ROOT))) { parsed = new Path(directory, PathNormalizer.name(f.getKey()), EnumSet.of(Path.Type.directory)); } else if("file".equals(facts.get("type").toLowerCase(Locale.ROOT))) { parsed = new Path(directory, PathNormalizer.name(f.getKey()), EnumSet.of(Path.Type.file)); } else if(facts.get("type").toLowerCase(Locale.ROOT).matches("os\\.unix=slink:.*")) { parsed = new Path(directory, PathNormalizer.name(f.getKey()), EnumSet.of(Path.Type.file, Path.Type.symboliclink)); // Parse symbolic link target in Type=OS.unix=slink:/foobar;Perm=;Unique=keVO1+4G4; foobar final String[] type = facts.get("type").split(":"); if(type.length == 2) { final String target = type[1]; if(target.startsWith(String.valueOf(Path.DELIMITER))) { parsed.setSymlinkTarget(new Path(PathNormalizer.normalize(target), EnumSet.of(Path.Type.file))); } else { parsed.setSymlinkTarget(new Path(PathNormalizer.normalize(String.format("%s/%s", directory.getAbsolute(), target)), EnumSet.of(Path.Type.file))); } } else { log.warn(String.format("Missing symbolic link target for type %s in line %s", facts.get("type"), line)); continue; } } else { log.warn(String.format("Ignored type %s in line %s", facts.get("type"), line)); continue; } if(!success) { if(parsed.isDirectory() && directory.getName().equals(name)) { log.warn(String.format("Possibly bogus response line %s", line)); } else { success = true; } } if(name.equals(".") || name.equals("..")) { if(log.isDebugEnabled()) { log.debug(String.format("Skip %s", name)); } continue; } if(facts.containsKey("size")) { parsed.attributes().setSize(Long.parseLong(facts.get("size"))); } if(facts.containsKey("unix.uid")) { parsed.attributes().setOwner(facts.get("unix.uid")); } if(facts.containsKey("unix.owner")) { parsed.attributes().setOwner(facts.get("unix.owner")); } if(facts.containsKey("unix.gid")) { parsed.attributes().setGroup(facts.get("unix.gid")); } if(facts.containsKey("unix.group")) { parsed.attributes().setGroup(facts.get("unix.group")); } if(facts.containsKey("unix.mode")) { parsed.attributes().setPermission(new Permission(facts.get("unix.mode"))); } else if(facts.containsKey("perm")) { if(PreferencesFactory.get().getBoolean("ftp.parser.mlsd.perm.enable")) { Permission.Action user = Permission.Action.none; final String flags = facts.get("perm"); if(StringUtils.contains(flags, 'r') || StringUtils.contains(flags, 'l')) { // RETR command may be applied to that object // Listing commands, LIST, NLST, and MLSD may be applied user = user.or(Permission.Action.read); } if(StringUtils.contains(flags, 'w') || StringUtils.contains(flags, 'm') || StringUtils.contains(flags, 'c')) { user = user.or(Permission.Action.write); } if(StringUtils.contains(flags, 'e')) { // CWD command naming the object should succeed user = user.or(Permission.Action.execute); if(parsed.isDirectory()) { user = user.or(Permission.Action.read); } } final Permission permission = new Permission(user, Permission.Action.none, Permission.Action.none); parsed.attributes().setPermission(permission); } } if(facts.containsKey("modify")) { // Time values are always represented in UTC parsed.attributes().setModificationDate(this.parseTimestamp(facts.get("modify"))); } if(facts.containsKey("create")) { // Time values are always represented in UTC parsed.attributes().setCreationDate(this.parseTimestamp(facts.get("create"))); } children.add(parsed); } } if(!success) { throw new FTPInvalidListException(children); } return children; }
@Test(expected = FTPInvalidListException.class) public void testMlsdPdir() throws Exception { Path path = new Path( "/www", EnumSet.of(Path.Type.directory)); String[] replies = new String[]{ "Type=pdir;Perm=e;Unique=keVO1+d?3; ..", //skipped }; new FTPMlsdListResponseReader().read(path, Arrays.asList(replies)); }
@SuppressWarnings("unchecked") @Override protected Object createObject(ValueWrapper<Object> initialInstance, String className, Map<List<String>, Object> params, ClassLoader classLoader) { // simple types if (initialInstance.isValid() && !(initialInstance.getValue() instanceof Map)) { return initialInstance.getValue(); } Map<String, Object> toReturn = (Map<String, Object>) initialInstance.orElseGet(HashMap::new); for (Map.Entry<List<String>, Object> listObjectEntry : params.entrySet()) { // direct mapping already considered if (listObjectEntry.getKey().isEmpty()) { continue; } List<String> allSteps = listObjectEntry.getKey(); List<String> steps = allSteps.subList(0, allSteps.size() - 1); String lastStep = allSteps.get(allSteps.size() - 1); Map<String, Object> targetMap = toReturn; for (String step : steps) { targetMap = (Map<String, Object>) targetMap.computeIfAbsent(step, k -> new HashMap<>()); } targetMap.put(lastStep, listObjectEntry.getValue()); } return toReturn; }
@Test public void createObject_directMappingSimpleTypeNull() { Map<List<String>, Object> params = new HashMap<>(); params.put(List.of(), null); ValueWrapper<Object> initialInstance = runnerHelper.getDirectMapping(params); Object objectRaw = runnerHelper.createObject(initialInstance, String.class.getCanonicalName(), params, getClass().getClassLoader()); assertThat(objectRaw).isNull(); }
public static void checkPhysicalLogicalTypeCompatible( LogicalType physicalFieldType, LogicalType logicalFieldType, String physicalFieldName, String logicalFieldName, boolean isSource) { if (isSource) { checkIfCompatible( physicalFieldType, logicalFieldType, (cause) -> new ValidationException( String.format( "Type %s of table field '%s' does not match with " + "the physical type %s of the '%s' field of the TableSource return type.", logicalFieldType, logicalFieldName, physicalFieldType, physicalFieldName), cause)); } else { checkIfCompatible( logicalFieldType, physicalFieldType, (cause) -> new ValidationException( String.format( "Type %s of table field '%s' does not match with " + "the physical type %s of the '%s' field of the TableSink consumed type.", logicalFieldType, logicalFieldName, physicalFieldType, physicalFieldName), cause)); } }
@Test void testCheckPhysicalLogicalTypeCompatible() { TableSchema tableSchema = TableSchema.builder() .field("a", DataTypes.VARCHAR(2)) .field("b", DataTypes.DECIMAL(20, 2)) .build(); TableSink tableSink = new TestTableSink(tableSchema); LegacyTypeInformationType legacyDataType = (LegacyTypeInformationType) tableSink.getConsumedDataType().getLogicalType(); TypeInformation legacyTypeInfo = ((TupleTypeInfo) legacyDataType.getTypeInformation()).getTypeAt(1); DataType physicalType = TypeConversions.fromLegacyInfoToDataType(legacyTypeInfo); ResolvedSchema physicSchema = DataTypeUtils.expandCompositeTypeToSchema(physicalType); DataType[] logicalDataTypes = tableSchema.getFieldDataTypes(); List<DataType> physicalDataTypes = physicSchema.getColumnDataTypes(); for (int i = 0; i < logicalDataTypes.length; i++) { TypeMappingUtils.checkPhysicalLogicalTypeCompatible( physicalDataTypes.get(i).getLogicalType(), logicalDataTypes[i].getLogicalType(), "physicalField", "logicalField", false); } }
public void callLogout() { call("logout", null); }
@Test public void callLogout() { SensorsDataAPI sensorsDataAPI = SAHelper.initSensors(mApplication); sensorsDataAPI.addFunctionListener(new SAFunctionListener() { @Override public void call(String function, JSONObject args) { Assert.assertEquals("logout", function); } }); sensorsDataAPI.logout(); }
@Override public byte[] serialize(final String topic, final List<?> data) { if (data == null) { return null; } try { final StringWriter stringWriter = new StringWriter(); final CSVPrinter csvPrinter = new CSVPrinter(stringWriter, csvFormat); csvPrinter.printRecord(() -> new FieldIterator(data, schema)); final String result = stringWriter.toString(); return result.substring(0, result.length() - 2).getBytes(StandardCharsets.UTF_8); } catch (final Exception e) { throw new SerializationException("Error serializing CSV message", e); } }
@Test public void shouldSerializeNegativeDecimalWithAsStringWithPaddedZeros() { // Given: givenSingleColumnSerializer(SqlTypes.decimal(4, 3)); final List<?> values = Collections.singletonList(new BigDecimal("-1.120")); // When: final byte[] bytes = serializer.serialize("", values); // Then: assertThat(new String(bytes, StandardCharsets.UTF_8), is("\"-1.120\"")); }
public static String generateFileName(String string) { string = StringUtils.stripAccents(string); StringBuilder buf = new StringBuilder(); for (int i = 0; i < string.length(); i++) { char c = string.charAt(i); if (Character.isSpaceChar(c) && (buf.length() == 0 || Character.isSpaceChar(buf.charAt(buf.length() - 1)))) { continue; } if (ArrayUtils.contains(validChars, c)) { buf.append(c); } } String filename = buf.toString().trim(); if (TextUtils.isEmpty(filename)) { return randomString(8); } else if (filename.length() >= MAX_FILENAME_LENGTH) { return filename.substring(0, MAX_FILENAME_LENGTH - MD5_HEX_LENGTH - 1) + "_" + md5(filename); } else { return filename; } }
@Test public void testFeedTitleContainsApostrophe() { String result = FileNameGenerator.generateFileName("Feed's Title ..."); assertEquals("Feeds Title", result); }
@ScalarOperator(SUBTRACT) @SqlType(StandardTypes.TINYINT) public static long subtract(@SqlType(StandardTypes.TINYINT) long left, @SqlType(StandardTypes.TINYINT) long right) { try { return SignedBytes.checkedCast(left - right); } catch (IllegalArgumentException e) { throw new PrestoException(NUMERIC_VALUE_OUT_OF_RANGE, format("tinyint subtraction overflow: %s - %s", left, right), e); } }
@Test public void testSubtract() { assertFunction("TINYINT'37' - TINYINT'37'", TINYINT, (byte) 0); assertFunction("TINYINT'37' - TINYINT'17'", TINYINT, (byte) (37 - 17)); assertFunction("TINYINT'17' - TINYINT'37'", TINYINT, (byte) (17 - 37)); assertFunction("TINYINT'17' - TINYINT'17'", TINYINT, (byte) 0); assertNumericOverflow(format("TINYINT'%s' - TINYINT'1'", Byte.MIN_VALUE), "tinyint subtraction overflow: -128 - 1"); }
public static void checkValidProjectId(String idToCheck) { if (idToCheck.length() < MIN_PROJECT_ID_LENGTH) { throw new IllegalArgumentException("Project ID " + idToCheck + " cannot be empty."); } if (idToCheck.length() > MAX_PROJECT_ID_LENGTH) { throw new IllegalArgumentException( "Project ID " + idToCheck + " cannot be longer than " + MAX_PROJECT_ID_LENGTH + " characters."); } if (ILLEGAL_PROJECT_CHARS.matcher(idToCheck).find()) { throw new IllegalArgumentException( "Project ID " + idToCheck + " is not a valid ID. Only letters, numbers, hyphens, single quotes, colon, dot and" + " exclamation points are allowed."); } }
@Test public void testCheckValidProjectIdWhenIdIsValid() { checkValidProjectId("'project-id-9'!"); }
public static IRubyObject deep(final Ruby runtime, final Object input) { if (input == null) { return runtime.getNil(); } final Class<?> cls = input.getClass(); final Rubyfier.Converter converter = CONVERTER_MAP.get(cls); if (converter != null) { return converter.convert(runtime, input); } return fallbackConvert(runtime, input, cls); }
@Test public void testDeepWithString() { Object result = Rubyfier.deep(RubyUtil.RUBY, "foo"); assertEquals(RubyString.class, result.getClass()); assertEquals("foo", result.toString()); }
public static int calAvgBucketNumOfRecentPartitions(OlapTable olapTable, int recentPartitionNum, boolean enableAutoTabletDistribution) { // 1. If the partition is less than recentPartitionNum, use backendNum to speculate the bucketNum // Or the Config.enable_auto_tablet_distribution is disabled int bucketNum = 0; if (olapTable.getPartitions().size() < recentPartitionNum || !enableAutoTabletDistribution) { bucketNum = CatalogUtils.calBucketNumAccordingToBackends(); // If table is not partitioned, the bucketNum should be at least DEFAULT_UNPARTITIONED_TABLE_BUCKET_NUM if (!olapTable.getPartitionInfo().isPartitioned()) { bucketNum = bucketNum > FeConstants.DEFAULT_UNPARTITIONED_TABLE_BUCKET_NUM ? bucketNum : FeConstants.DEFAULT_UNPARTITIONED_TABLE_BUCKET_NUM; } return bucketNum; } // 2. If the partition is not imported anydata, use backendNum to speculate the bucketNum List<Partition> partitions = (List<Partition>) olapTable.getRecentPartitions(recentPartitionNum); boolean dataImported = true; for (Partition partition : partitions) { if (partition.getVisibleVersion() == 1) { dataImported = false; break; } } bucketNum = CatalogUtils.calBucketNumAccordingToBackends(); if (!dataImported) { return bucketNum; } // 3. Use the totalSize of recentPartitions to speculate the bucketNum long maxDataSize = 0; for (Partition partition : partitions) { maxDataSize = Math.max(maxDataSize, partition.getDataSize()); } // A tablet will be regarded using the 1GB size // And also the number will not be larger than the calBucketNumAccordingToBackends() long speculateTabletNum = (maxDataSize + FeConstants.AUTO_DISTRIBUTION_UNIT - 1) / FeConstants.AUTO_DISTRIBUTION_UNIT; // speculateTabletNum may be not accurate, so we need to take the max value of bucketNum and speculateTabletNum bucketNum = (int) Math.max(bucketNum, speculateTabletNum); if (bucketNum == 0) { bucketNum = 1; } return bucketNum; }
@Test public void testCalAvgBucketNumOfRecentPartitions_CalculateByDataSize() { List<Partition> partitions = new ArrayList<>(); partitions.add(partition); when(olapTable.getPartitions()).thenReturn(partitions); when(olapTable.getRecentPartitions(anyInt())).thenReturn(partitions); when(partition.getVisibleVersion()).thenReturn(2L); when(partition.getDataSize()).thenReturn(2L * FeConstants.AUTO_DISTRIBUTION_UNIT); int bucketNum = CatalogUtils.calAvgBucketNumOfRecentPartitions(olapTable, 1, true); assertEquals(2, bucketNum); // 2 tablets based on 2GB size }
T getFunction(final List<SqlArgument> arguments) { // first try to get the candidates without any implicit casting Optional<T> candidate = findMatchingCandidate(arguments, false); if (candidate.isPresent()) { return candidate.get(); } else if (!supportsImplicitCasts) { throw createNoMatchingFunctionException(arguments); } // if none were found (candidate isn't present) try again with implicit casting candidate = findMatchingCandidate(arguments, true); if (candidate.isPresent()) { return candidate.get(); } throw createNoMatchingFunctionException(arguments); }
@Test public void shouldFindMiddleVarargWithSomeNullValues() { // Given: givenFunctions( function(EXPECTED, 1, INT, STRING_VARARGS, STRING) ); // When: final KsqlScalarFunction fun = udfIndex.getFunction(Arrays.asList( SqlArgument.of(SqlTypes.INTEGER), null, SqlArgument.of(SqlTypes.STRING) )); // Then: assertThat(fun.name(), equalTo(EXPECTED)); }
@Override public int getWorkerMaxCount() { return workerThreadCount; }
@Test public void getWorkerMaxCount_returns_10_whichever_the_value_returned_by_WorkerCountProvider() { int value = randomValidWorkerCount(); workerCountProvider.set(value); assertThat(new CeConfigurationImpl(EMPTY_CONFIGURATION, workerCountProvider).getWorkerMaxCount()).isEqualTo(10); }
public String serial() { return get(SERIAL, null); }
@Test public void testSetSerial() { SW_BDC.serial(SERIAL_NEW); assertEquals("Incorrect serial", SERIAL_NEW, SW_BDC.serial()); }
public XmlStreamInfo information() throws IOException { if (information.problem != null) { return information; } if (XMLStreamConstants.START_DOCUMENT != reader.getEventType()) { information.problem = new IllegalStateException("Expected START_DOCUMENT"); return information; } boolean skipComments = false; try { while (reader.hasNext()) { int ev = reader.next(); switch (ev) { case XMLStreamConstants.COMMENT: if (!skipComments) { // search for modelines String comment = reader.getText(); if (comment != null) { comment.lines().map(String::trim).forEach(l -> { if (l.startsWith("camel-k:")) { information.modelines.add(l); } }); } } break; case XMLStreamConstants.START_ELEMENT: if (information.rootElementName != null) { // only root element is checked. No need to parse more return information; } skipComments = true; information.rootElementName = reader.getLocalName(); information.rootElementNamespace = reader.getNamespaceURI(); for (int ns = 0; ns < reader.getNamespaceCount(); ns++) { String prefix = reader.getNamespacePrefix(ns); information.namespaceMapping.put(prefix == null ? "" : prefix, reader.getNamespaceURI(ns)); } for (int at = 0; at < reader.getAttributeCount(); at++) { QName qn = reader.getAttributeName(at); String prefix = qn.getPrefix() == null ? "" : qn.getPrefix().trim(); String nsURI = qn.getNamespaceURI() == null ? "" : qn.getNamespaceURI().trim(); String value = reader.getAttributeValue(at); String localPart = qn.getLocalPart(); if (nsURI.isEmpty() || prefix.isEmpty()) { // according to XML spec, this attribut is not namespaced, not in default namespace // https://www.w3.org/TR/xml-names/#defaulting // > The namespace name for an unprefixed attribute name always has no value. information.attributes.put(localPart, value); } else { information.attributes.put("{" + nsURI + "}" + localPart, value); information.attributes.put(prefix + ":" + localPart, value); } } break; case XMLStreamConstants.END_ELEMENT: case XMLStreamConstants.END_DOCUMENT: if (information.rootElementName == null) { information.problem = new IllegalArgumentException("XML Stream is empty"); return information; } break; default: break; } } } catch (XMLStreamException e) { information.problem = e; return information; } return information; }
@Test public void emptyDocument() throws IOException { XmlStreamDetector detector = new XmlStreamDetector(new ByteArrayInputStream(new byte[0])); assertFalse(detector.information().isValid()); }
public void checkExecutePrerequisites(final ExecutionContext executionContext) { ShardingSpherePreconditions.checkState(isValidExecutePrerequisites(executionContext), () -> new TableModifyInTransactionException(getTableName(executionContext))); }
@Test void assertCheckExecutePrerequisitesWhenExecuteDDLInXATransaction() { ExecutionContext executionContext = new ExecutionContext( new QueryContext(createMySQLCreateTableStatementContext(), "", Collections.emptyList(), new HintValueContext(), mockConnectionContext(), mock(ShardingSphereMetaData.class)), Collections.emptyList(), mock(RouteContext.class)); assertThrows(TableModifyInTransactionException.class, () -> new ProxySQLExecutor(JDBCDriverType.STATEMENT, databaseConnectionManager, mock(DatabaseConnector.class), mockQueryContext()).checkExecutePrerequisites(executionContext)); }
public Arrangement(String[] datas) { this.datas = datas; }
@Test public void arrangementTest() { long result = Arrangement.count(4, 2); assertEquals(12, result); result = Arrangement.count(4, 1); assertEquals(4, result); result = Arrangement.count(4, 0); assertEquals(1, result); long resultAll = Arrangement.countAll(4); assertEquals(64, resultAll); }
public static PennTreebankPOS tag(String word) { for (int i = 0; i < REGEX.length; i++) { if (REGEX[i].matcher(word).matches()) { return REGEX_POS[i]; } } return null; }
@Test public void testTag() { System.out.println("tag"); assertEquals(PennTreebankPOS.CD, RegexPOSTagger.tag("123")); assertEquals(PennTreebankPOS.CD, RegexPOSTagger.tag("1234567890")); assertEquals(PennTreebankPOS.CD, RegexPOSTagger.tag("123.45")); assertEquals(PennTreebankPOS.CD, RegexPOSTagger.tag("1,234")); assertEquals(PennTreebankPOS.CD, RegexPOSTagger.tag("1,234.5678")); assertEquals(PennTreebankPOS.NN, RegexPOSTagger.tag("914-544-3333")); assertEquals(PennTreebankPOS.NN, RegexPOSTagger.tag("544-3333")); assertEquals(PennTreebankPOS.NN, RegexPOSTagger.tag("x123")); assertEquals(PennTreebankPOS.NN, RegexPOSTagger.tag("x123")); assertEquals(PennTreebankPOS.NN, RegexPOSTagger.tag("http://www.msnbc.msn.com/id/42231726/?GT1=43001")); assertEquals(PennTreebankPOS.NN, RegexPOSTagger.tag("ftp://www.msnbc.msn.com/id/42231726/?GT1=43001")); assertEquals(PennTreebankPOS.NN, RegexPOSTagger.tag("nobody@usc.edu")); assertEquals(PennTreebankPOS.NN, RegexPOSTagger.tag("no.body@usc.edu.cn")); assertEquals(PennTreebankPOS.NN, RegexPOSTagger.tag("no_body@usc.edu.cn")); }
public static TableElements parse(final String schema, final TypeRegistry typeRegistry) { return new SchemaParser(typeRegistry).parse(schema); }
@Test public void shouldThrowOnReservedWord() { // Given: final String schema = "CREATE INTEGER"; // Expect: // When: final Exception e = assertThrows( KsqlException.class, () -> parser.parse(schema) ); // Then: assertThat(e.getMessage(), containsString("Error parsing schema \"CREATE INTEGER\" at 1:1: extraneous input 'CREATE' ")); }
@ProcessElement public ProcessContinuation processElement( @Element PartitionMetadata partition, RestrictionTracker<TimestampRange, com.google.cloud.Timestamp> tracker, OutputReceiver<DataChangeRecord> receiver, ManualWatermarkEstimator<Instant> watermarkEstimator, BundleFinalizer bundleFinalizer) { final String token = partition.getPartitionToken(); LOG.debug("[{}] Processing element with restriction {}", token, tracker.currentRestriction()); return queryChangeStreamAction.run( partition, tracker, receiver, watermarkEstimator, bundleFinalizer); }
@Test public void testQueryChangeStreamMode() { when(queryChangeStreamAction.run(any(), any(), any(), any(), any())) .thenReturn(ProcessContinuation.stop()); final ProcessContinuation result = doFn.processElement(partition, tracker, receiver, watermarkEstimator, bundleFinalizer); assertEquals(ProcessContinuation.stop(), result); verify(queryChangeStreamAction) .run(partition, tracker, receiver, watermarkEstimator, bundleFinalizer); verify(dataChangeRecordAction, never()).run(any(), any(), any(), any(), any()); verify(heartbeatRecordAction, never()).run(any(), any(), any(), any()); verify(childPartitionsRecordAction, never()).run(any(), any(), any(), any()); verify(tracker, never()).tryClaim(any()); }
@Override public ConfigDef config() { return CONFIG_DEF; }
@Test public void testConnectorConfigValidation() { List<ConfigValue> configValues = connector.config().validate(sinkProperties); for (ConfigValue val : configValues) { assertEquals(0, val.errorMessages().size(), "Config property errors: " + val.errorMessages()); } }
@Override public MongoPaginationHelper<T> grandTotalFilter(Bson grandTotalFilter) { return new DefaultMongoPaginationHelper<>(collection, filter, sort, perPage, includeGrandTotal, grandTotalFilter, collation); }
@Test void testGrandTotalFilter() { final Bson filter = Filters.in("name", "A", "B", "C"); assertThat(paginationHelper.includeGrandTotal(true).grandTotalFilter(filter).page(1).grandTotal()) .isEqualTo(paginationHelper.includeGrandTotal(true).grandTotalFilter(filter).page(1, alwaysTrue()).grandTotal()) .contains(3L); }
protected Map<String, String[]> generateParameterMap(MultiValuedTreeMap<String, String> qs, ContainerConfig config) { Map<String, String[]> output; Map<String, List<String>> formEncodedParams = getFormUrlEncodedParametersMap(); if (qs == null) { // Just transform the List<String> values to String[] output = formEncodedParams.entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, (e) -> e.getValue().toArray(new String[0]))); } else { Map<String, List<String>> queryStringParams; if (config.isQueryStringCaseSensitive()) { queryStringParams = qs; } else { // If it's case insensitive, we check the entire map on every parameter queryStringParams = qs.entrySet().stream().parallel().collect( Collectors.toMap( Map.Entry::getKey, e -> getQueryParamValuesAsList(qs, e.getKey(), false) )); } // Merge formEncodedParams and queryStringParams Maps output = Stream.of(formEncodedParams, queryStringParams).flatMap(m -> m.entrySet().stream()) .collect( Collectors.toMap( Map.Entry::getKey, e -> e.getValue().toArray(new String[0]), // If a parameter is in both Maps, we merge the list of values (and ultimately transform to String[]) (formParam, queryParam) -> Stream.of(formParam, queryParam).flatMap(Stream::of).toArray(String[]::new) )); } return output; }
@Test void parameterMap_generateParameterMap_formEncodedAndQueryString() { AwsProxyHttpServletRequest request = new AwsProxyHttpServletRequest(formEncodedAndQueryString, mockContext, null, config); Map<String, String[]> paramMap = null; try { paramMap = request.generateParameterMap(request.getAwsProxyRequest().getMultiValueQueryStringParameters(), config); } catch (Exception e) { e.printStackTrace(); fail("Could not generate parameter map"); } // Combines form encoded parameters (one=four) with query string (one=two,three) // The order between them is not officially guaranteed (it could be four,two,three or two,three,four) // Current implementation gives form encoded parameters first assertArrayEquals(new String[]{"four", "two", "three"}, paramMap.get("one")); assertArrayEquals(new String[]{"six"}, paramMap.get("five")); assertArrayEquals(new String[]{"eight"}, paramMap.get("seven")); assertTrue(paramMap.size() == 3); }
public Printed<K, V> withKeyValueMapper(final KeyValueMapper<? super K, ? super V, String> mapper) { Objects.requireNonNull(mapper, "mapper can't be null"); this.mapper = mapper; return this; }
@Test public void shouldThrowNullPointerExceptionIfMapperIsNull() { assertThrows(NullPointerException.class, () -> sysOutPrinter.withKeyValueMapper(null)); }
@Override public AuthorizationPluginInfo pluginInfoFor(GoPluginDescriptor descriptor) { Capabilities capabilities = capabilities(descriptor.id()); PluggableInstanceSettings authConfigSettings = authConfigSettings(descriptor.id()); PluggableInstanceSettings roleSettings = roleSettings(descriptor.id(), capabilities); Image image = image(descriptor.id()); return new AuthorizationPluginInfo(descriptor, authConfigSettings, roleSettings, image, capabilities); }
@Test public void shouldBuildPluginInfoWithImage() { GoPluginDescriptor descriptor = GoPluginDescriptor.builder().id("plugin1").build(); Image icon = new Image("content_type", "data", "hash"); when(extension.getIcon(descriptor.id())).thenReturn(icon); AuthorizationPluginInfo pluginInfo = new AuthorizationPluginInfoBuilder(extension).pluginInfoFor(descriptor); assertThat(pluginInfo.getImage(), is(icon)); }
public static StatementExecutorResponse execute( final ConfiguredStatement<ListQueries> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext ) { final RemoteHostExecutor remoteHostExecutor = RemoteHostExecutor.create( statement, sessionProperties, executionContext, serviceContext.getKsqlClient() ); return statement.getStatement().getShowExtended() ? executeExtended(statement, sessionProperties, executionContext, remoteHostExecutor) : executeSimple(statement, executionContext, remoteHostExecutor); }
@Test public void shouldIncludeUnresponsiveIfShowQueriesExtendedFutureThrowsException() { // Given when(sessionProperties.getInternalRequest()).thenReturn(false); final ConfiguredStatement<?> showQueries = engine.configure("SHOW QUERIES EXTENDED;"); final PersistentQueryMetadata metadata = givenPersistentQuery("id", RUNNING_QUERY_STATE); when(mockKsqlEngine.getAllLiveQueries()).thenReturn(ImmutableList.of(metadata)); when(mockKsqlEngine.getPersistentQueries()).thenReturn(ImmutableList.of(metadata)); when(ksqlClient.makeKsqlRequest(any(), any(), any())).thenThrow(new KsqlRestClientException("error")); when(serviceContext.getKsqlClient()).thenReturn(ksqlClient); // When final Map<KsqlHostInfoEntity, KsqlQueryStatus> map = new HashMap<>(); map.put(LOCAL_KSQL_HOST_INFO_ENTITY, KsqlQueryStatus.RUNNING); map.put(REMOTE_KSQL_HOST_INFO_ENTITY, KsqlQueryStatus.UNRESPONSIVE); // When final QueryDescriptionList queries = (QueryDescriptionList) CustomExecutors.LIST_QUERIES.execute( showQueries, sessionProperties, mockKsqlEngine, serviceContext ).getEntity().orElseThrow(IllegalStateException::new); // Then assertThat(queries.getQueryDescriptions(), containsInAnyOrder(QueryDescriptionFactory.forQueryMetadata(metadata, map))); }
public boolean isAuthCredentials() { return StringUtils.isNotBlank(this.username) && StringUtils.isNotBlank(this.password); }
@Test void authenticated_cluster_connection() { // when ClusterConnection connection = ClusterConnection.builder() .contactPoints("127.0.0.1") .port(29042) .localDatacenter("dc1") .username("will") .password("will-password") .build(); // then assertThat(connection.isAuthCredentials()).isTrue(); assertThat(connection.getContactPoints()).isEqualTo("127.0.0.1"); assertThat(connection.getPort()).isEqualTo(29042); assertThat(connection.getUsername()).isEqualTo("will"); assertThat(connection.getPassword()).isEqualTo("will-password"); assertThat(connection.getLocalDatacenter()).isEqualTo("dc1"); }
@Udf(description = "Returns a new string encoded using the outputEncoding ") public String encode( @UdfParameter( description = "The source string. If null, then function returns null.") final String str, @UdfParameter( description = "The input encoding." + " If null, then function returns null.") final String inputEncoding, @UdfParameter( description = "The output encoding." + " If null, then function returns null.") final String outputEncoding) { if (str == null || inputEncoding == null || outputEncoding == null) { return null; } final String encodedString = inputEncoding.toLowerCase() + outputEncoding.toLowerCase(); final Encode.Encoder encoder = ENCODER_MAP.get(encodedString); if (encoder == null) { throw new KsqlFunctionException("Supported input and output encodings are: " + "hex, utf8, ascii and base64"); } return encoder.apply(str); }
@Test public void shouldEncodeUtf8ToAscii() { assertThat(udf.encode("Example!", "utf8", "ascii"), is("Example!")); assertThat(udf.encode("Plant trees", "utf8", "ascii"), is("Plant trees")); assertThat(udf.encode("1 + 1 = 1", "utf8", "ascii"), is("1 + 1 = 1")); assertThat(udf.encode("Ελλάδα", "utf8", "ascii"), is("������������")); assertThat(udf.encode("Übermensch", "utf8", "ascii"), is("��bermensch")); }
@Override public T readFrom( Class<T> cls, Type type, Annotation[] anns, MediaType mt, MultivaluedMap<String, String> headers, InputStream is) throws IOException, WebApplicationException { DataFormat format = getValidDataFormat(mt); try { @SuppressWarnings("unchecked") T result = (T) format.unmarshal(null, is); return result; } catch (Exception ex) { throw new BadRequestException(ex); } }
@Test public void testReadFrom() throws Exception { DataFormatProvider<Book> p = new DataFormatProvider<>(); p.setFormat("text/plain", new TestDataFormat()); ByteArrayInputStream bis = new ByteArrayInputStream("dataformat".getBytes()); Book b = p.readFrom(Book.class, Book.class, new Annotation[] {}, MediaType.TEXT_PLAIN_TYPE, new MetadataMap<String, String>(), bis); assertEquals("dataformat", b.getName()); }
public GoConfigHolder loadConfigHolder(final String content, Callback callback) throws Exception { CruiseConfig configForEdit; CruiseConfig config; LOGGER.debug("[Config Save] Loading config holder"); configForEdit = deserializeConfig(content); if (callback != null) callback.call(configForEdit); config = preprocessAndValidate(configForEdit); return new GoConfigHolder(config, configForEdit); }
@Test void shouldLoadHgConfigWithBranchAttributePostSchemaVersion123() throws Exception { String content = config( """ <config-repos> <config-repo id="Test" pluginId="cd.go.json"> <hg url="https://domain.com" branch="feature" /> </config-repo> </config-repos> <pipelines group="first"> <pipeline name="Test" template="test_template"> <materials> <hg url="https://domain.com" branch="feature" /> </materials> </pipeline> </pipelines> <templates> <pipeline name="test_template"> <stage name="Functional"> <jobs> <job name="Functional"> <tasks> <exec command="echo" args="Hello World!!!" /> </tasks> </job> </jobs> </stage> </pipeline> </templates>""", CONFIG_SCHEMA_VERSION); CruiseConfig config = xmlLoader.loadConfigHolder(content).config; PipelineConfig pipelineConfig = config.getPipelineConfigByName(new CaseInsensitiveString("Test")); assertThat(pipelineConfig.materialConfigs()).hasSize(1); assertThat(((HgMaterialConfig) pipelineConfig.materialConfigs().get(0)).getBranch()).isEqualTo("feature"); assertThat(config.getConfigRepos()).hasSize(1); assertThat(((HgMaterialConfig) config.getConfigRepos().get(0).getRepo()).getBranch()).isEqualTo("feature"); }
public Database getDb() throws MetaNotFoundException { // get db Database db = GlobalStateMgr.getCurrentState().getDb(dbId); if (db == null) { throw new MetaNotFoundException("Database " + dbId + " already has been deleted"); } return db; }
@Test public void testGetDbNotExists(@Mocked GlobalStateMgr globalStateMgr) { LoadJob loadJob = new BrokerLoadJob(); Deencapsulation.setField(loadJob, "dbId", 1L); new Expectations() { { globalStateMgr.getDb(1L); minTimes = 0; result = null; } }; try { loadJob.getDb(); Assert.fail(); } catch (MetaNotFoundException e) { } }
private RemotingCommand getAllSubscriptionGroup(ChannelHandlerContext ctx, RemotingCommand request) throws RemotingCommandException { final RemotingCommand response = RemotingCommand.createResponseCommand(null); String content = this.brokerController.getSubscriptionGroupManager().encode(); if (content != null && content.length() > 0) { try { response.setBody(content.getBytes(MixAll.DEFAULT_CHARSET)); } catch (UnsupportedEncodingException e) { LOGGER.error("UnsupportedEncodingException getAllSubscriptionGroup", e); response.setCode(ResponseCode.SYSTEM_ERROR); response.setRemark("UnsupportedEncodingException " + e.getMessage()); return response; } } else { LOGGER.error("No subscription group in this broker, client:{} ", ctx.channel().remoteAddress()); response.setCode(ResponseCode.SYSTEM_ERROR); response.setRemark("No subscription group in this broker"); return response; } response.setCode(ResponseCode.SUCCESS); response.setRemark(null); return response; }
@Test public void testGetAllSubscriptionGroup() throws RemotingCommandException { RemotingCommand request = RemotingCommand.createRequestCommand(RequestCode.GET_ALL_SUBSCRIPTIONGROUP_CONFIG, null); RemotingCommand response = adminBrokerProcessor.processRequest(handlerContext, request); assertThat(response.getCode()).isEqualTo(ResponseCode.SUCCESS); }
@Override public void doRun() { if (isServerInPreflightMode.get()) { // we don't want to automatically trigger CSRs during preflight, don't run it if the preflight is still not finished or skipped LOG.debug("Datanode still in preflight mode, skipping cert renewal task"); return; } // always check if there are any certificates that we can accept getRenewalPolicy() .filter(this::needsNewCertificate) .ifPresent(renewalPolicy -> { switch (renewalPolicy.mode()) { case AUTOMATIC -> automaticRenewal(); case MANUAL -> manualRenewal(); } }); }
@Test void testExpiringInFarFuture() throws Exception { final DatanodeKeystore datanodeKeystore = datanodeKeystore(Duration.ofDays(30)); final CsrRequester csrRequester = Mockito.mock(CsrRequester.class); final DataNodeCertRenewalPeriodical periodical = new DataNodeCertRenewalPeriodical(datanodeKeystore, autoRenewalPolicy("P3M"), csrRequester, () -> false); periodical.doRun(); Mockito.verify(csrRequester, Mockito.never()).triggerCertificateSigningRequest(); }
@Nullable @Override public Message decode(@Nonnull RawMessage rawMessage) { final byte[] payload = rawMessage.getPayload(); final JsonNode event; try { event = objectMapper.readTree(payload); if (event == null || event.isMissingNode()) { throw new IOException("null result"); } } catch (IOException e) { LOG.error("Couldn't decode raw message {}", rawMessage); return null; } return parseEvent(event); }
@Test public void decodeMessagesHandlesGenericBeatWithKubernetes() throws Exception { final Message message = codec.decode(messageFromJson("generic-with-kubernetes.json")); assertThat(message).isNotNull(); assertThat(message.getMessage()).isEqualTo("-"); assertThat(message.getSource()).isEqualTo("unknown"); assertThat(message.getTimestamp()).isEqualTo(new DateTime(2016, 4, 1, 0, 0, DateTimeZone.UTC)); assertThat(message.getField("beats_type")).isEqualTo("beat"); assertThat(message.getField("beat_foo")).isEqualTo("bar"); assertThat(message.getField("beat_kubernetes_pod_name")).isEqualTo("testpod"); assertThat(message.getField("beat_kubernetes_namespace")).isEqualTo("testns"); assertThat(message.getField("beat_kubernetes_labels_labelkey")).isEqualTo("labelvalue"); }
AwsCredentials credentials() { if (!StringUtil.isNullOrEmptyAfterTrim(awsConfig.getAccessKey())) { return AwsCredentials.builder() .setAccessKey(awsConfig.getAccessKey()) .setSecretKey(awsConfig.getSecretKey()) .build(); } if (!StringUtil.isNullOrEmptyAfterTrim(ec2IamRole)) { return fetchCredentialsFromEc2(); } if (environment.isRunningOnEcs()) { return fetchCredentialsFromEcs(); } throw new NoCredentialsException(); }
@Test public void credentialsEcs() { // given AwsConfig awsConfig = AwsConfig.builder().build(); given(awsMetadataApi.credentialsEcs()).willReturn(CREDENTIALS); given(environment.isRunningOnEcs()).willReturn(true); AwsCredentialsProvider credentialsProvider = new AwsCredentialsProvider(awsConfig, awsMetadataApi, environment); // when AwsCredentials credentials = credentialsProvider.credentials(); // then assertEquals(CREDENTIALS, credentials); }
public <T extends S3ClientBuilder> void applyServiceConfigurations(T builder) { builder .dualstackEnabled(isDualStackEnabled) .serviceConfiguration( S3Configuration.builder() .pathStyleAccessEnabled(isPathStyleAccess) .useArnRegionEnabled(isUseArnRegionEnabled) .accelerateModeEnabled(isAccelerationEnabled) .build()); }
@Test public void testApplyS3ServiceConfigurations() { Map<String, String> properties = Maps.newHashMap(); properties.put(S3FileIOProperties.DUALSTACK_ENABLED, "true"); properties.put(S3FileIOProperties.PATH_STYLE_ACCESS, "true"); properties.put(S3FileIOProperties.USE_ARN_REGION_ENABLED, "true"); // acceleration enabled has to be set to false if path style is true properties.put(S3FileIOProperties.ACCELERATION_ENABLED, "false"); S3FileIOProperties s3FileIOProperties = new S3FileIOProperties(properties); S3ClientBuilder mockA = Mockito.mock(S3ClientBuilder.class); ArgumentCaptor<S3Configuration> s3ConfigurationCaptor = ArgumentCaptor.forClass(S3Configuration.class); Mockito.doReturn(mockA).when(mockA).dualstackEnabled(Mockito.anyBoolean()); Mockito.doReturn(mockA).when(mockA).serviceConfiguration(Mockito.any(S3Configuration.class)); s3FileIOProperties.applyServiceConfigurations(mockA); Mockito.verify(mockA).serviceConfiguration(s3ConfigurationCaptor.capture()); S3Configuration s3Configuration = s3ConfigurationCaptor.getValue(); assertThat(s3Configuration.pathStyleAccessEnabled()) .as("s3 path style access enabled parameter should be set to true") .isTrue(); assertThat(s3Configuration.useArnRegionEnabled()) .as("s3 use arn region enabled parameter should be set to true") .isTrue(); assertThat(s3Configuration.accelerateModeEnabled()) .as("s3 acceleration mode enabled parameter should be set to true") .isFalse(); }
public T send() throws IOException { return web3jService.send(this, responseType); }
@Test public void testEthGetUncleCountByBlockNumber() throws Exception { web3j.ethGetUncleCountByBlockNumber(DefaultBlockParameter.valueOf(Numeric.toBigInt("0xe8"))) .send(); verifyResult( "{\"jsonrpc\":\"2.0\",\"method\":\"eth_getUncleCountByBlockNumber\"," + "\"params\":[\"0xe8\"],\"id\":1}"); }
public boolean[] decodeBoolArray(final byte[] parameterBytes, final boolean isBinary) { ShardingSpherePreconditions.checkState(!isBinary, () -> new UnsupportedSQLOperationException("binary mode")); String parameterValue = new String(parameterBytes, StandardCharsets.UTF_8); Collection<String> parameterElements = decodeText(parameterValue); boolean[] result = new boolean[parameterElements.size()]; int index = 0; for (String each : parameterElements) { result[index++] = Boolean.parseBoolean(each); } return result; }
@Test void assertParseBoolArrayNormalTextMode() { boolean[] actual = DECODER.decodeBoolArray("{\"true\",\"false\"}".getBytes(), false); assertThat(actual.length, is(2)); assertTrue(actual[0]); assertFalse(actual[1]); }
@Override public HttpResponseOutputStream<File> write(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { final DelayedHttpEntityCallable<File> command = new DelayedHttpEntityCallable<File>(file) { @Override public File call(final HttpEntity entity) throws BackgroundException { // Initiate a resumable upload String location; try { location = start(file, status); } catch(InteroperabilityException e) { if(null == status.getLockId()) { throw e; } location = start(file, status.withLockId(null)); } final StoregateApiClient client = session.getClient(); try { // Upload the file final HttpPut put = new HttpPut(location); put.setEntity(entity); final String header; if(status.getLength() == 0) { // Touch header = "*/0"; } else { final HttpRange range = HttpRange.byLength(0, status.getLength()); header = String.format("%d-%d/%d", range.getStart(), range.getEnd(), status.getLength()); } put.addHeader(HttpHeaders.CONTENT_RANGE, String.format("bytes %s", header)); final HttpResponse putResponse = client.getClient().execute(put); try { switch(putResponse.getStatusLine().getStatusCode()) { case HttpStatus.SC_OK: case HttpStatus.SC_CREATED: final File result = new JSON().getContext(FileMetadata.class).readValue( new InputStreamReader(putResponse.getEntity().getContent(), StandardCharsets.UTF_8), File.class); fileid.cache(file, result.getId()); return result; default: throw new StoregateExceptionMappingService(fileid).map("Upload {0} failed", new ApiException(putResponse.getStatusLine().getStatusCode(), putResponse.getStatusLine().getReasonPhrase(), Collections.emptyMap(), EntityUtils.toString(putResponse.getEntity())), file); } } catch(BackgroundException e) { // Cancel upload on error reply cancel(file, location); throw e; } finally { EntityUtils.consume(putResponse.getEntity()); } } catch(IOException e) { // Cancel upload on I/O failure cancel(file, location); throw new HttpExceptionMappingService().map("Upload {0} failed", e, file); } } @Override public long getContentLength() { return status.getLength(); } }; return this.write(file, status, command); }
@Test public void testWriteWithLock() throws Exception { final StoregateIdProvider nodeid = new StoregateIdProvider(session); final Path room = new StoregateDirectoryFeature(session, nodeid).mkdir( new Path(String.format("/My files/%s", new AlphanumericRandomStringService().random()), EnumSet.of(Path.Type.directory, Path.Type.volume)), new TransferStatus()); final byte[] content = RandomUtils.nextBytes(32769); final Path test = new StoregateTouchFeature(session, nodeid).touch( new Path(room, String.format("%s", new AlphanumericRandomStringService().random()), EnumSet.of(Path.Type.file)), new TransferStatus()); final String lockId = new StoregateLockFeature(session, nodeid).lock(test); final TransferStatus status = new TransferStatus(); status.setLength(content.length); final StoregateWriteFeature writer = new StoregateWriteFeature(session, nodeid); try { final HttpResponseOutputStream<File> out = writer.write(test, status, new DisabledConnectionCallback()); out.close(); fail(); } catch(IOException e) { assertTrue(e.getCause() instanceof LockedException); } status.setLockId(lockId); final HttpResponseOutputStream<File> out = writer.write(test, status, new DisabledConnectionCallback()); assertNotNull(out); new StreamCopier(status, status).transfer(new ByteArrayInputStream(content), out); out.close(); new StoregateLockFeature(session, nodeid).unlock(test, lockId); new StoregateDeleteFeature(session, nodeid).delete(Collections.singletonList(room), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public void createRouter(Router osRouter) { checkNotNull(osRouter, ERR_NULL_ROUTER); checkArgument(!Strings.isNullOrEmpty(osRouter.getId()), ERR_NULL_ROUTER_ID); osRouterStore.createRouter(osRouter); log.info(String.format(MSG_ROUTER, deriveResourceName(osRouter), MSG_CREATED)); }
@Test(expected = IllegalArgumentException.class) public void testCreateDuplicateRouter() { target.createRouter(ROUTER); target.createRouter(ROUTER); }
@Override public void define(WebService.NewController controller) { WebService.NewAction action = controller .createAction("response_example") .setDescription("Display web service response example") .setResponseExample(getClass().getResource("response_example-example.json")) .setSince("4.4") .setHandler(this); action.createParam("controller") .setRequired(true) .setDescription("Controller of the web service") .setExampleValue("api/issues"); action.createParam("action") .setRequired(true) .setDescription("Action of the web service") .setExampleValue("search"); }
@Test public void response_example() { MetricWs metricWs = new MetricWs(); metricWs.define(context); newRequest() .setParam("controller", "api/metric") .setParam("action", "create") .execute() .assertJson(getClass(), "response_example.json"); }
@ConstantFunction(name = "hours_sub", argTypes = {DATETIME, INT}, returnType = DATETIME, isMonotonic = true) public static ConstantOperator hoursSub(ConstantOperator date, ConstantOperator hour) { return ConstantOperator.createDatetimeOrNull(date.getDatetime().minusHours(hour.getInt())); }
@Test public void hoursSub() { assertEquals("2015-03-22T23:23:55", ScalarOperatorFunctions.hoursSub(O_DT_20150323_092355, O_INT_10).getDatetime().toString()); }
@Override public OUT nextRecord(OUT record) throws IOException { OUT returnRecord = null; do { returnRecord = super.nextRecord(record); } while (returnRecord == null && !reachedEnd()); return returnRecord; }
@Test void testReadSparseWithMask() { try { final String fileContent = "111&&222&&333&&444&&555&&666&&777&&888&&999&&000&&\n" + "000&&999&&888&&777&&666&&555&&444&&333&&222&&111&&"; final FileInputSplit split = createTempFile(fileContent); final TupleTypeInfo<Tuple3<Integer, Integer, Integer>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo( Integer.class, Integer.class, Integer.class); final CsvInputFormat<Tuple3<Integer, Integer, Integer>> format = new TupleCsvInputFormat<>( PATH, typeInfo, new boolean[] {true, false, false, true, false, false, false, true}); format.setFieldDelimiter("&&"); format.configure(new Configuration()); format.open(split); Tuple3<Integer, Integer, Integer> result = new Tuple3<>(); result = format.nextRecord(result); assertThat(result.f0).isEqualTo(Integer.valueOf(111)); assertThat(result.f1).isEqualTo(Integer.valueOf(444)); assertThat(result.f2).isEqualTo(Integer.valueOf(888)); result = format.nextRecord(result); assertThat(result.f0).isEqualTo(Integer.valueOf(000)); assertThat(result.f1).isEqualTo(Integer.valueOf(777)); assertThat(result.f2).isEqualTo(Integer.valueOf(333)); result = format.nextRecord(result); assertThat(result).isNull(); assertThat(format.reachedEnd()).isTrue(); } catch (Exception ex) { fail("Test failed due to a " + ex.getClass().getName() + ": " + ex.getMessage()); } }
@Override public VplsData getVpls(String vplsName) { requireNonNull(vplsName); return vplsStore.getVpls(vplsName); }
@Test public void testGetVpls() { VplsData vplsData = VplsData.of(VPLS1); vplsData.state(ADDED); vplsStore.addVpls(vplsData); VplsData result = vplsManager.getVpls(VPLS1); assertEquals(vplsData, result); result = vplsManager.getVpls(VPLS2); assertNull(result); }
@Override public String upgradeFirmwareOndemand(String target) { DriverHandler handler = handler(); NetconfController controller = handler.get(NetconfController.class); MastershipService mastershipService = handler.get(MastershipService.class); DeviceId ncDeviceId = handler.data().deviceId(); checkNotNull(controller, "Netconf controller is null"); String reply = null; int count; if (!mastershipService.isLocalMaster(ncDeviceId)) { log.warn("Not master for {} Use {} to execute command", ncDeviceId, mastershipService.getMasterFor(ncDeviceId)); return null; } String[] data = target.split(COLON); if ((data.length < TWO) || (data.length > THREE)) { log.error("Invalid number of arguments"); return null; } String[] onuList = data[SECOND_PART].split(COMMA); if (onuList.length == ZERO) { log.error("No ONU listed"); return null; } if ((data.length > TWO) && (!AUTO.equals(data[THIRD_PART]))) { log.error("Invalid reboot-mode {}", data[THIRD_PART]); return null; } try { StringBuilder request = new StringBuilder(); request.append(ANGLE_LEFT + ONDEMAND_FIRMWARE_UPGRADE + SPACE); request.append(VOLT_NE_NAMESPACE + ANGLE_RIGHT + NEW_LINE); request.append(buildStartTag(PARTICIPANT_LIST)); for (count = ZERO; count < onuList.length; count++) { String[] onuId = onuList[count].split(HYPHEN); if (onuId.length != TWO) { log.error("Invalid ONU identifier"); return null; } try { int pon; pon = Integer.parseInt(onuId[FIRST_PART]); if (pon <= ZERO) { log.error("Invalid integer for ponlink-id:{}", onuId[FIRST_PART]); return null; } int onu; onu = Integer.parseInt(onuId[SECOND_PART]); if (onu <= ZERO) { log.error("Invalid integer for onu-id:{}", onuId[SECOND_PART]); return null; } } catch (NumberFormatException e) { log.error("Non-number input"); return null; } request.append(buildStartTag(MEMBER)) .append(buildStartTag(PONLINK_ID)) .append(onuId[FIRST_PART]) .append(buildEndTag(PONLINK_ID)) .append(buildStartTag(ONU_ID)) .append(onuId[SECOND_PART]) .append(buildEndTag(ONU_ID)) .append(buildEndTag(MEMBER)); } request.append(buildEndTag(PARTICIPANT_LIST)) .append(buildStartTag(IMAGE_NAME)) .append(data[FIRST_PART]) .append(buildEndTag(IMAGE_NAME)); if (data.length == THREE) { request.append(buildStartTag(REBOOT_MODE)) .append(data[THIRD_PART]) .append(buildEndTag(REBOOT_MODE)); } request.append(buildEndTag(ONDEMAND_FIRMWARE_UPGRADE)); reply = controller .getDevicesMap() .get(ncDeviceId) .getSession() .doWrappedRpc(request.toString()); } catch (NetconfException e) { log.error("Cannot communicate to device {} exception {}", ncDeviceId, e); } return reply; }
@Test public void testValidOndemandFirmwareUpgrade() throws Exception { String reply; String target; for (int i = ZERO; i < VALID_ONDEMAND_FWDL_TCS.length; i++) { target = VALID_ONDEMAND_FWDL_TCS[i]; currentKey = i; reply = voltConfig.upgradeFirmwareOndemand(target); assertNotNull("Incorrect response for VALID_ONDEMAND_FWDL_TCS", reply); } }
public static JibContainerBuilder toJibContainerBuilder( ArtifactProcessor processor, CommonCliOptions commonCliOptions, CommonContainerConfigCliOptions commonContainerConfigCliOptions, ConsoleLogger logger) throws IOException, InvalidImageReferenceException { String baseImage = commonContainerConfigCliOptions.getFrom().orElse("jetty"); JibContainerBuilder containerBuilder = ContainerBuilders.create(baseImage, Collections.emptySet(), commonCliOptions, logger); List<String> programArguments = commonContainerConfigCliOptions.getProgramArguments(); if (!commonContainerConfigCliOptions.getProgramArguments().isEmpty()) { containerBuilder.setProgramArguments(programArguments); } containerBuilder .setEntrypoint(computeEntrypoint(commonContainerConfigCliOptions)) .setFileEntriesLayers(processor.createLayers()) .setExposedPorts(commonContainerConfigCliOptions.getExposedPorts()) .setVolumes(commonContainerConfigCliOptions.getVolumes()) .setEnvironment(commonContainerConfigCliOptions.getEnvironment()) .setLabels(commonContainerConfigCliOptions.getLabels()); commonContainerConfigCliOptions.getUser().ifPresent(containerBuilder::setUser); commonContainerConfigCliOptions.getFormat().ifPresent(containerBuilder::setFormat); commonContainerConfigCliOptions.getCreationTime().ifPresent(containerBuilder::setCreationTime); return containerBuilder; }
@Test public void testToJibContainerBuilder_nonJettyBaseImageSpecifiedAndNoEntrypoint() throws IOException, InvalidImageReferenceException { JibContainerBuilder containerBuilder = WarFiles.toJibContainerBuilder( mockStandardWarExplodedProcessor, mockCommonCliOptions, mockCommonContainerConfigCliOptions, mockLogger); ContainerBuildPlan buildPlan = containerBuilder.toContainerBuildPlan(); assertThat(mockCommonContainerConfigCliOptions.isJettyBaseimage()).isFalse(); assertThat(buildPlan.getEntrypoint()).isNull(); }
public Future<Void> singlePodDeploymentRollingUpdate(Reconciliation reconciliation, String namespace, String name, long operationTimeoutMs) { return podOperations.listAsync(namespace, Labels.EMPTY.withStrimziName(name)) .compose(pods -> { if (pods != null && !pods.isEmpty()) { return podOperations.deleteAsync(reconciliation, namespace, pods.get(0).getMetadata().getName(), true); } else { LOGGER.warnCr(reconciliation, "No Pods were found for Deployment {} in namespace {}", name, namespace); // We return success as there is nothing to roll return Future.succeededFuture(); } }) .compose(ignored -> readiness(reconciliation, namespace, name, 1_000, operationTimeoutMs)); }
@SuppressWarnings({"unchecked", "rawtypes"}) @Test public void testSinglePodDeploymentRollingUpdateWithMissingPod(VertxTestContext context) { String depName = "my-dep"; String podName = depName + "-123456"; // Mock Pod handling KubernetesResourceList mockPodResourceList = mock(KubernetesResourceList.class); when(mockPodResourceList.getItems()).thenReturn(List.of()); Resource mockPodResource = mock(Resource.class); NonNamespaceOperation mockPodNonNamespaceOp = mock(NonNamespaceOperation.class); when(mockPodNonNamespaceOp.list(any())).thenReturn(mockPodResourceList); when(mockPodNonNamespaceOp.withLabels(any())).thenReturn(mockPodNonNamespaceOp); when(mockPodNonNamespaceOp.withName(eq(podName))).thenReturn(mockPodResource); MixedOperation mockPods = mock(MixedOperation.class); when(mockPods.inNamespace(eq(NAMESPACE))).thenReturn(mockPodNonNamespaceOp); // Mock Deployment handling Resource mockDeploymentResource = mock(resourceType()); when(mockDeploymentResource.get()).thenReturn(new Deployment()); when(mockDeploymentResource.isReady()).thenReturn(true); NonNamespaceOperation mockDeploymentNonNamespaceOp = mock(NonNamespaceOperation.class); when(mockDeploymentNonNamespaceOp.withName(eq(depName))).thenReturn(mockDeploymentResource); MixedOperation mockDeployments = mock(MixedOperation.class); when(mockDeployments.inNamespace(eq(NAMESPACE))).thenReturn(mockDeploymentNonNamespaceOp); AppsAPIGroupDSL mockApps = mock(AppsAPIGroupDSL.class); when(mockApps.deployments()).thenReturn(mockDeployments); // Mock Kube Client KubernetesClient mockClient = mock(KubernetesClient.class); when(mockClient.pods()).thenReturn(mockPods); when(mockClient.apps()).thenReturn(mockApps); DeploymentOperator op = new DeploymentOperator(vertx, mockClient); Checkpoint async = context.checkpoint(); op.singlePodDeploymentRollingUpdate(Reconciliation.DUMMY_RECONCILIATION, NAMESPACE, depName, 5_000) .onComplete(context.succeeding(v -> { verify(mockPodResource, never()).delete(); async.flag(); })); }
public String getPassword() { if(StringUtils.isEmpty(password)) { if(this.isAnonymousLogin()) { return PreferencesFactory.get().getProperty("connection.login.anon.pass"); } } return password; }
@Test public void testAnonymous() { Credentials c = new Credentials("anonymous", ""); assertEquals("cyberduck@example.net", c.getPassword()); }
@Udf(description = "Converts a string representation of a date in the given format" + " into the number of milliseconds since 1970-01-01 00:00:00 UTC/GMT." + " Single quotes in the timestamp format can be escaped with ''," + " for example: 'yyyy-MM-dd''T''HH:mm:ssX'." + " The system default time zone is used when no time zone is explicitly provided.") public long stringToTimestamp( @UdfParameter( description = "The string representation of a date.") final String formattedTimestamp, @UdfParameter( description = "The format pattern should be in the format expected by" + " java.time.format.DateTimeFormatter.") final String formatPattern) { // NB: We do not perform a null here preferring to throw an exception as // there is no sentinel value for a "null" Date. try { final StringToTimestampParser timestampParser = parsers.get(formatPattern); return timestampParser.parse(formattedTimestamp); } catch (final ExecutionException | RuntimeException e) { throw new KsqlFunctionException("Failed to parse timestamp '" + formattedTimestamp + "' with formatter '" + formatPattern + "': " + e.getMessage(), e); } }
@Test public void shouldThrowOnNullDateFormat() { final Exception e = assertThrows( KsqlFunctionException.class, () -> udf.stringToTimestamp("2021-12-01", null) ); // Then: assertThat(e.getMessage(), Matchers.containsString("Failed to parse timestamp '2021-12-01' with formatter 'null'")); }
public static <K, V> Cache<K, V> noop() { return forMaximumBytes(0L); }
@Test public void testNoopCache() throws Exception { Cache<String, String> cache = Caches.noop(); cache.put("key", "value"); assertNull(cache.peek("key")); assertEquals("value", cache.computeIfAbsent("key", (unused) -> "value")); assertNull(cache.peek("key")); }
public static boolean isGetDeviceInfo(String androidId, String oaid) { try { return !TextUtils.isEmpty(androidId) || !TextUtils.isEmpty(oaid); } catch (Exception e) { SALog.printStackTrace(e); } return false; }
@Test public void isGetDeviceInfo() { }
public String delayedServiceResponse() { try { return this.delayedService.attemptRequest(); } catch (RemoteServiceException e) { return e.getMessage(); } }
@Test void testDelayedRemoteResponseSuccess() { var delayedService = new DelayedRemoteService(System.nanoTime()-2*1000*1000*1000, 2); var delayedServiceCircuitBreaker = new DefaultCircuitBreaker(delayedService, 3000, 1, 2 * 1000 * 1000 * 1000); var monitoringService = new MonitoringService(delayedServiceCircuitBreaker,null); //Set time in past to make the server work var response = monitoringService.delayedServiceResponse(); assertEquals(response, "Delayed service is working"); }
@VisibleForTesting protected void tombstone() { var now = System.currentTimeMillis(); if (now - lastTombstonedAt < tombstoneDelayInMillis) { return; } var lastSuccessfulTombstonedAt = lastTombstonedAt; lastTombstonedAt = now; // dedup first brokerLoadDataStore.removeAsync(brokerId) .whenComplete((__, e) -> { if (e != null) { log.error("Failed to clean broker load data.", e); lastTombstonedAt = lastSuccessfulTombstonedAt; } else { boolean debug = ExtensibleLoadManagerImpl.debug(conf, log); if (debug) { log.info("Cleaned broker load data."); } } } ); }
@Test public void testTombstone() throws IllegalAccessException, InterruptedException { var target = spy(new BrokerLoadDataReporter(pulsar, broker, store)); target.handleEvent(bundle, new ServiceUnitStateData(ServiceUnitState.Assigning, broker, VERSION_ID_INIT), null); verify(store, times(0)).removeAsync(eq(broker)); verify(target, times(0)).tombstone(); target.handleEvent(bundle, new ServiceUnitStateData(ServiceUnitState.Deleted, broker, VERSION_ID_INIT), null); verify(store, times(0)).removeAsync(eq(broker)); verify(target, times(0)).tombstone(); target.handleEvent(bundle, new ServiceUnitStateData(ServiceUnitState.Init, broker, VERSION_ID_INIT), null); verify(store, times(0)).removeAsync(eq(broker)); verify(target, times(0)).tombstone(); target.handleEvent(bundle, new ServiceUnitStateData(ServiceUnitState.Free, broker, VERSION_ID_INIT), null); verify(store, times(0)).removeAsync(eq(broker)); verify(target, times(0)).tombstone(); target.handleEvent(bundle, new ServiceUnitStateData(ServiceUnitState.Releasing, "broker-2", broker, VERSION_ID_INIT), new RuntimeException()); verify(store, times(0)).removeAsync(eq(broker)); verify(target, times(0)).tombstone(); target.handleEvent(bundle, new ServiceUnitStateData(ServiceUnitState.Releasing, "broker-2", broker, VERSION_ID_INIT), null); Awaitility.waitAtMost(3, TimeUnit.SECONDS).untilAsserted(() -> { verify(target, times(1)).tombstone(); verify(store, times(1)).removeAsync(eq(broker)); var localData = (BrokerLoadData) FieldUtils.readDeclaredField(target, "localData", true); assertEquals(localData, new BrokerLoadData()); }); target.handleEvent(bundle, new ServiceUnitStateData(ServiceUnitState.Releasing, "broker-2", broker, VERSION_ID_INIT), null); Awaitility.waitAtMost(3, TimeUnit.SECONDS).untilAsserted(() -> { verify(target, times(2)).tombstone(); verify(store, times(1)).removeAsync(eq(broker)); var localData = (BrokerLoadData) FieldUtils.readDeclaredField(target, "localData", true); assertEquals(localData, new BrokerLoadData()); }); FieldUtils.writeDeclaredField(target, "tombstoneDelayInMillis", 0, true); target.handleEvent(bundle, new ServiceUnitStateData(ServiceUnitState.Splitting, "broker-2", broker, VERSION_ID_INIT), null); Awaitility.waitAtMost(3, TimeUnit.SECONDS).untilAsserted(() -> { verify(target, times(3)).tombstone(); verify(store, times(2)).removeAsync(eq(broker)); var localData = (BrokerLoadData) FieldUtils.readDeclaredField(target, "localData", true); assertEquals(localData, new BrokerLoadData()); }); target.handleEvent(bundle, new ServiceUnitStateData(ServiceUnitState.Owned, broker, VERSION_ID_INIT), null); Awaitility.waitAtMost(3, TimeUnit.SECONDS).untilAsserted(() -> { verify(target, times(4)).tombstone(); verify(store, times(3)).removeAsync(eq(broker)); var localData = (BrokerLoadData) FieldUtils.readDeclaredField(target, "localData", true); assertEquals(localData, new BrokerLoadData()); }); }
@Override public boolean find(final Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return true; } try { new SwiftAttributesFinderFeature(session).find(file, listener); return true; } catch(NotfoundException e) { return false; } }
@Test public void testFindContainer() throws Exception { final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume)); container.attributes().setRegion("IAD"); assertTrue(new SwiftFindFeature(session).find(container)); }
@Override public int compareTo(DateTimeStamp dateTimeStamp) { return comparator.compare(this,dateTimeStamp); }
@Test void testCompareLessThan() { DateTimeStamp smaller = new DateTimeStamp("2018-04-04T09:10:00.586-0100"); DateTimeStamp greater = new DateTimeStamp("2018-04-04T10:10:00.587-0100"); assertEquals(-1, smaller.compareTo(greater)); }
public void markAsUnchanged(DefaultInputFile file) { if (isFeatureActive()) { if (file.status() != InputFile.Status.SAME) { LOG.error("File '{}' was marked as unchanged but its status is {}", file.getProjectRelativePath(), file.status()); } else { LOG.debug("File '{}' marked as unchanged", file.getProjectRelativePath()); file.setMarkedAsUnchanged(true); } } }
@Test public void not_active_if_property_not_defined() { UnchangedFilesHandler handler = new UnchangedFilesHandler(new MapSettings().asConfig(), defaultBranchConfig, executingSensorContext); handler.markAsUnchanged(file); verifyNoInteractions(file); assertThat(logTester.logs()).isEmpty(); }
@Override public void audit(final QueryContext queryContext, final RuleMetaData globalRuleMetaData, final ShardingSphereDatabase database, final ShardingRule rule) { Collection<ShardingAuditStrategyConfiguration> auditStrategies = getShardingAuditStrategies(queryContext.getSqlStatementContext(), rule); if (auditStrategies.isEmpty()) { return; } Collection<String> disableAuditNames = queryContext.getHintValueContext().getDisableAuditNames(); for (ShardingAuditStrategyConfiguration auditStrategy : auditStrategies) { for (String auditorName : auditStrategy.getAuditorNames()) { if (!auditStrategy.isAllowHintDisable() || !disableAuditNames.contains(auditorName.toLowerCase())) { rule.getAuditors().get(auditorName).check(queryContext.getSqlStatementContext(), queryContext.getParameters(), globalRuleMetaData, database); } } } }
@Test void assertCheckSuccess() { RuleMetaData globalRuleMetaData = mock(RuleMetaData.class); new ShardingSQLAuditor().audit(new QueryContext(sqlStatementContext, "", Collections.emptyList(), hintValueContext, mockConnectionContext(), mock(ShardingSphereMetaData.class)), globalRuleMetaData, databases.get("foo_db"), rule); verify(rule.getAuditors().get("auditor_1")).check(sqlStatementContext, Collections.emptyList(), globalRuleMetaData, databases.get("foo_db")); }
public static String getDoneFileName(JobIndexInfo indexInfo) throws IOException { return getDoneFileName(indexInfo, JHAdminConfig.DEFAULT_MR_HS_JOBNAME_LIMIT); }
@Test public void testUserNamePercentEncoding() throws IOException { JobIndexInfo info = new JobIndexInfo(); JobID oldJobId = JobID.forName(JOB_ID); JobId jobId = TypeConverter.toYarn(oldJobId); info.setJobId(jobId); info.setSubmitTime(Long.parseLong(SUBMIT_TIME)); info.setUser(USER_NAME_WITH_DELIMITER); info.setJobName(JOB_NAME); info.setFinishTime(Long.parseLong(FINISH_TIME)); info.setNumMaps(Integer.parseInt(NUM_MAPS)); info.setNumReduces(Integer.parseInt(NUM_REDUCES)); info.setJobStatus(JOB_STATUS); info.setQueueName(QUEUE_NAME); info.setJobStartTime(Long.parseLong(JOB_START_TIME)); String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info); assertTrue(jobHistoryFile.contains(USER_NAME_WITH_DELIMITER_ESCAPE), "User name not encoded correctly into job history file"); }
CompletableFuture<Void> beginExecute( @Nonnull List<? extends Tasklet> tasklets, @Nonnull CompletableFuture<Void> cancellationFuture, @Nonnull ClassLoader jobClassLoader ) { final ExecutionTracker executionTracker = new ExecutionTracker(tasklets.size(), cancellationFuture); try { final Map<Boolean, List<Tasklet>> byCooperation = tasklets.stream().collect(partitioningBy( tasklet -> doWithClassLoader(jobClassLoader, tasklet::isCooperative) )); submitCooperativeTasklets(executionTracker, jobClassLoader, byCooperation.get(true)); submitBlockingTasklets(executionTracker, jobClassLoader, byCooperation.get(false)); } catch (Throwable t) { executionTracker.future.internalCompleteExceptionally(t); } return executionTracker.future; }
@Test public void when_tryCompleteOnReturnedFuture_then_fails() { // Given final MockTasklet t = new MockTasklet().callsBeforeDone(Integer.MAX_VALUE); CompletableFuture<Void> f = tes.beginExecute(singletonList(t), cancellationFuture, classLoader); // When - Then assertThrows(UnsupportedOperationException.class, () -> f.complete(null)); }
protected boolean isLoggerSafe(ILoggingEvent event) { for (String safeLogger : SAFE_LOGGERS) { if (event.getLoggerName().startsWith(safeLogger)) { return true; } } return false; }
@Test void isLoggerSafeShouldReturnFalseWhenLoggerNameDoesNotStartWithSafeLogger() { ILoggingEvent event = mock(ILoggingEvent.class); when(event.getLoggerName()).thenReturn("com.mycompany.myapp.example.Logger"); CRLFLogConverter converter = new CRLFLogConverter(); boolean result = converter.isLoggerSafe(event); assertFalse(result); }
@Override public Optional<ProtobufSystemInfo.SystemInfo> retrieveSystemInfo() { return call(SystemInfoActionClient.INSTANCE); }
@Test public void retrieveSystemInfo_returns_absent_if_process_is_down() { Optional<ProtobufSystemInfo.SystemInfo> info = underTest.retrieveSystemInfo(); assertThat(info).isEmpty(); }
@Override public ProtobufSystemInfo.Section toProtobuf() { ProtobufSystemInfo.Section.Builder protobuf = ProtobufSystemInfo.Section.newBuilder(); protobuf.setName("System"); setAttribute(protobuf, "Server ID", server.getId()); setAttribute(protobuf, "Edition", sonarRuntime.getEdition().getLabel()); setAttribute(protobuf, NCLOC.getName() ,statisticsSupport.getLinesOfCode()); setAttribute(protobuf, "Container", containerSupport.isRunningInContainer()); setAttribute(protobuf, "High Availability", true); setAttribute(protobuf, "External Users and Groups Provisioning", commonSystemInformation.getManagedInstanceProviderName()); setAttribute(protobuf, "External User Authentication", commonSystemInformation.getExternalUserAuthentication()); addIfNotEmpty(protobuf, "Accepted external identity providers", commonSystemInformation.getEnabledIdentityProviders()); addIfNotEmpty(protobuf, "External identity providers whose users are allowed to sign themselves up", commonSystemInformation.getAllowsToSignUpEnabledIdentityProviders()); setAttribute(protobuf, "Force authentication", commonSystemInformation.getForceAuthentication()); return protobuf.build(); }
@Test public void toProtobuf_whenNoAllowsToSignUpEnabledIdentityProviders_shouldWriteNothing() { when(commonSystemInformation.getAllowsToSignUpEnabledIdentityProviders()).thenReturn(emptyList()); ProtobufSystemInfo.Section protobuf = underTest.toProtobuf(); assertThatAttributeDoesNotExist(protobuf, "External identity providers whose users are allowed to sign themselves up"); }
@Override public void remove(String name) { this.indexSpecs.remove(name); }
@Test void remove() { var specs = new DefaultIndexSpecs(); var nameSpec = primaryKeyIndexSpec(FakeExtension.class); specs.add(nameSpec); assertThat(specs.contains(PrimaryKeySpecUtils.PRIMARY_INDEX_NAME)).isTrue(); specs.remove(PrimaryKeySpecUtils.PRIMARY_INDEX_NAME); assertThat(specs.contains(PrimaryKeySpecUtils.PRIMARY_INDEX_NAME)).isFalse(); }
@Override public void uncaughtException(Thread t, Throwable e) { if(ShutdownHookManager.get().isShutdownInProgress()) { LOG.error("Thread " + t + " threw an Throwable, but we are shutting " + "down, so ignoring this", e); } else if(e instanceof Error) { try { LOG.error(FATAL, "Thread " + t + " threw an Error. Shutting down now...", e); } catch (Throwable err) { //We don't want to not exit because of an issue with logging } if(e instanceof OutOfMemoryError) { //After catching an OOM java says it is undefined behavior, so don't //even try to clean up or we can get stuck on shutdown. try { System.err.println("Halting due to Out Of Memory Error..."); } catch (Throwable err) { //Again we done want to exit because of logging issues. } ExitUtil.halt(-1); } else { ExitUtil.terminate(-1); } } else { LOG.error("Thread " + t + " threw an Exception.", e); } }
@Test void testUncaughtExceptionHandlerWithRuntimeException() throws InterruptedException { final YarnUncaughtExceptionHandler spyYarnHandler = spy(exHandler); final YarnRuntimeException yarnException = new YarnRuntimeException( "test-yarn-runtime-exception"); final Thread yarnThread = new Thread(new Runnable() { @Override public void run() { throw yarnException; } }); yarnThread.setUncaughtExceptionHandler(spyYarnHandler); assertSame(spyYarnHandler, yarnThread.getUncaughtExceptionHandler()); yarnThread.start(); yarnThread.join(); verify(spyYarnHandler).uncaughtException(yarnThread, yarnException); }
public void updateTopicRouteInfoFromNameServer() { Set<String> topicList = new HashSet<>(); // Consumer { for (Entry<String, MQConsumerInner> entry : this.consumerTable.entrySet()) { MQConsumerInner impl = entry.getValue(); if (impl != null) { Set<SubscriptionData> subList = impl.subscriptions(); if (subList != null) { for (SubscriptionData subData : subList) { topicList.add(subData.getTopic()); } } } } } // Producer { for (Entry<String, MQProducerInner> entry : this.producerTable.entrySet()) { MQProducerInner impl = entry.getValue(); if (impl != null) { Set<String> lst = impl.getPublishTopicList(); topicList.addAll(lst); } } } for (String topic : topicList) { this.updateTopicRouteInfoFromNameServer(topic); } }
@Test public void testUpdateTopicRouteInfoFromNameServer() throws RemotingException, InterruptedException, MQClientException { brokerAddrTable.put(defaultBroker, createBrokerAddrMap()); consumerTable.put(group, createMQConsumerInner()); DefaultMQProducer defaultMQProducer = mock(DefaultMQProducer.class); TopicRouteData topicRouteData = createTopicRouteData(); when(mQClientAPIImpl.getDefaultTopicRouteInfoFromNameServer(anyLong())).thenReturn(topicRouteData); assertFalse(mqClientInstance.updateTopicRouteInfoFromNameServer(topic, true, defaultMQProducer)); }
public static void addShutdownHook(Runnable runnable) { Runtime.getRuntime().addShutdownHook(new Thread(runnable)); }
@Test void testAddShutdownHook() { Runnable shutdownHook = () -> { }; ThreadUtils.addShutdownHook(shutdownHook); // It seems no way to check it. }
@Override public Map<RedisClusterNode, Collection<RedisClusterNode>> clusterGetMasterSlaveMap() { Iterable<RedisClusterNode> res = clusterGetNodes(); Set<RedisClusterNode> masters = new HashSet<RedisClusterNode>(); for (Iterator<RedisClusterNode> iterator = res.iterator(); iterator.hasNext();) { RedisClusterNode redisClusterNode = iterator.next(); if (redisClusterNode.isMaster()) { masters.add(redisClusterNode); } } Map<RedisClusterNode, Collection<RedisClusterNode>> result = new HashMap<RedisClusterNode, Collection<RedisClusterNode>>(); for (Iterator<RedisClusterNode> iterator = res.iterator(); iterator.hasNext();) { RedisClusterNode redisClusterNode = iterator.next(); for (RedisClusterNode masterNode : masters) { if (redisClusterNode.getMasterId() != null && redisClusterNode.getMasterId().equals(masterNode.getId())) { Collection<RedisClusterNode> list = result.get(masterNode); if (list == null) { list = new ArrayList<RedisClusterNode>(); result.put(masterNode, list); } list.add(redisClusterNode); } } } return result; }
@Test public void testClusterGetMasterSlaveMap() { Map<RedisClusterNode, Collection<RedisClusterNode>> map = connection.clusterGetMasterSlaveMap(); assertThat(map).hasSize(3); for (Collection<RedisClusterNode> slaves : map.values()) { assertThat(slaves).hasSize(1); } }
@ConstantFunction.List(list = { @ConstantFunction(name = "years_add", argTypes = {DATETIME, INT}, returnType = DATETIME, isMonotonic = true), @ConstantFunction(name = "years_add", argTypes = {DATE, INT}, returnType = DATE, isMonotonic = true) }) public static ConstantOperator yearsAdd(ConstantOperator date, ConstantOperator year) { if (date.getType().isDate()) { return ConstantOperator.createDateOrNull(date.getDatetime().plusYears(year.getInt())); } else { return ConstantOperator.createDatetimeOrNull(date.getDatetime().plusYears(year.getInt())); } }
@Test public void yearsAdd() { assertEquals("2025-03-23T09:23:55", ScalarOperatorFunctions.yearsAdd(O_DT_20150323_092355, O_INT_10).getDatetime().toString()); }
public final void isAtMost(int other) { isAtMost((long) other); }
@Test public void isAtMost_int() { expectFailureWhenTestingThat(2L).isAtMost(1); assertThat(2L).isAtMost(2); assertThat(2L).isAtMost(3); }
public Properties createProperties(Props props, File logDir) { Log4JPropertiesBuilder log4JPropertiesBuilder = new Log4JPropertiesBuilder(props); RootLoggerConfig config = newRootLoggerConfigBuilder() .setNodeNameField(getNodeNameWhenCluster(props)) .setProcessId(ProcessId.ELASTICSEARCH) .build(); String logPattern = log4JPropertiesBuilder.buildLogPattern(config); return log4JPropertiesBuilder.internalLogLevel(Level.ERROR) .rootLoggerConfig(config) .logPattern(logPattern) .enableAllLogsToConsole(isAllLogsToConsoleEnabled(props)) .jsonOutput(isJsonOutput(props)) .logDir(logDir) .logLevelConfig( LogLevelConfig.newBuilder(log4JPropertiesBuilder.getRootLoggerName()) .rootLevelFor(ProcessId.ELASTICSEARCH) .build()) .build(); }
@Test public void createProperties_sets_root_logger_to_process_property_over_global_property_if_both_set() throws IOException { File logDir = temporaryFolder.newFolder(); Properties properties = underTest.createProperties( newProps( "sonar.log.level", "DEBUG", "sonar.log.level.es", "TRACE"), logDir); assertThat(properties.getProperty("rootLogger.level")).isEqualTo("TRACE"); }
@Subscribe public void onChatMessage(ChatMessage event) { if (event.getType() != ChatMessageType.SPAM && event.getType() != ChatMessageType.GAMEMESSAGE && event.getType() != ChatMessageType.MESBOX) { return; } final var msg = event.getMessage(); if (WOOD_CUT_PATTERN.matcher(msg).matches()) { if (session == null) { session = new WoodcuttingSession(); } session.setLastChopping(); session.incrementLogsCut(); } var matcher = ANIMA_BARK_PATTERN.matcher(msg); if (matcher.matches()) { if (session == null) { session = new WoodcuttingSession(); } session.setLastChopping(); int num = Integer.parseInt(matcher.group(1)); session.incrementBark(num); } if (msg.contains("A bird's nest falls out of the tree")) { if (clueTierSpawned == null || clueTierSpawned.ordinal() >= config.clueNestNotifyTier().ordinal()) { notifier.notify(config.showNestNotification(), "A bird nest has spawned!"); } // Clear the clue tier that has previously spawned clueTierSpawned = null; } if (msg.startsWith("The sapling seems to love")) { int ingredientNum = msg.contains("first") ? 1 : (msg.contains("second") ? 2 : (msg.contains("third") ? 3 : -1)); if (ingredientNum == -1) { log.debug("unable to find ingredient index from message: {}", msg); return; } GameObject ingredientObj = saplingIngredients.stream() .filter(obj -> msg.contains(client.getObjectDefinition(obj.getId()).getName().toLowerCase())) .findAny() .orElse(null); if (ingredientObj == null) { log.debug("unable to find ingredient from message: {}", msg); return; } saplingOrder[ingredientNum - 1] = ingredientObj; } if (msg.equals("There are no open, unpollinated flowers on this bush yet.") || msg.equals("The flowers on this bush have not yet opened enough to harvest pollen.") || msg.equals("<col=06600c>The bush is already fruiting and won't benefit from <col=06600c>any more pollen.</col>")) { if (activeFlowers.contains(lastInteractFlower)) { log.debug("Flowers reset"); activeFlowers.clear(); } } }
@Test public void testBirdsNest() { ChatMessage chatMessage = new ChatMessage(null, ChatMessageType.GAMEMESSAGE, "", BIRDS_NEST_MESSAGE, "", 0); when(woodcuttingConfig.showNestNotification()).thenReturn(Notification.ON); woodcuttingPlugin.onChatMessage(chatMessage); verify(notifier).notify(Notification.ON, "A bird nest has spawned!"); }
public static List<Integer> asIntegerList(@Nonnull int[] array) { checkNotNull(array, "null array"); return new AbstractList<>() { @Override public Integer get(int index) { return array[index]; } @Override public int size() { return array.length; } }; }
@Test(expected = NullPointerException.class) public void testToIntegerList_whenNull() { asIntegerList(null); }
public void triggerRemoveAll(final String selectorId) { synchronized (lock) { healthyUpstream.remove(selectorId); unhealthyUpstream.remove(selectorId); } }
@Test public void testTriggerRemoveAll() { final String selectorId = "s1"; Upstream upstream = mock(Upstream.class); healthCheckTask.triggerAddOne(selectorId, upstream); healthCheckTask.triggerRemoveAll(selectorId); assertFalse(healthCheckTask.getHealthyUpstream().containsKey(selectorId)); healthCheckTask.triggerAddOne(selectorId, upstream); healthCheckTask.triggerRemoveAll(selectorId); assertFalse(healthCheckTask.getHealthyUpstream().containsKey(selectorId)); }
public static String formatDate(Date date, String format) { if (date == null) { return null; } SimpleDateFormat sdf = new SimpleDateFormat(format); return sdf.format(date); }
@Test public void testFormatDate() { Date currentDate = DateUtil.getCurrentDate(); String dateStr = DateUtil.formatDate(currentDate, "yyyy-MM-dd HH:mm:ss"); Assertions.assertNotNull(dateStr); }
public static boolean substringMatch(CharSequence str, int index, CharSequence substring) { if (index + substring.length() > str.length()) { return false; } for (int i = 0; i < substring.length(); i++) { if (str.charAt(index + i) != substring.charAt(i)) { return false; } } return true; }
@Test public void testSubstringMatchReturningTrue() { StringBuffer stringBuffer = new StringBuffer("ZP~>xz1;"); assertTrue(StringUtil.substringMatch(stringBuffer, 0, stringBuffer)); }
public static boolean isCollectionOrMap(String className) { return isCollection(className) || isMap(className); }
@Test public void isCollectionOrMap() { assertThat(listValues).allMatch(ScenarioSimulationSharedUtils::isCollectionOrMap); assertThat(mapValues).allMatch(ScenarioSimulationSharedUtils::isCollectionOrMap); assertThat(ScenarioSimulationSharedUtils.isCollectionOrMap(Collection.class.getCanonicalName())).isTrue(); }
@Override public void ignoreAutoTrackFragment(Class<?> fragment) { }
@Test public void ignoreAutoTrackFragment() { mSensorsAPI.ignoreAutoTrackFragment(DialogFragment.class); Assert.assertFalse(mSensorsAPI.isFragmentAutoTrackAppViewScreen(DialogFragment.class)); }
@Bean public HiddenHttpMethodFilter hiddenHttpMethodFilter() { return new HiddenHttpMethodFilter() { @Override @NonNull public Mono<Void> filter(@NonNull final ServerWebExchange exchange, @NonNull final WebFilterChain chain) { return chain.filter(exchange); } }; }
@Test public void testHiddenHttpMethodFilter() { applicationContextRunner.run(context -> { HiddenHttpMethodFilter hiddenHttpMethodFilter = context.getBean("hiddenHttpMethodFilter", HiddenHttpMethodFilter.class); hiddenHttpMethodFilter.filter(mock(ServerWebExchange.class), mock(WebFilterChain.class)); assertNotNull(hiddenHttpMethodFilter); }); }
@NonNull public String processShownotes() { String shownotes = rawShownotes; if (TextUtils.isEmpty(shownotes)) { Log.d(TAG, "shownotesProvider contained no shownotes. Returning 'no shownotes' message"); shownotes = "<html><head></head><body><p id='apNoShownotes'>" + noShownotesLabel + "</p></body></html>"; } // replace ASCII line breaks with HTML ones if shownotes don't contain HTML line breaks already if (!LINE_BREAK_REGEX.matcher(shownotes).find() && !shownotes.contains("<p>")) { shownotes = shownotes.replace("\n", "<br />"); } Document document = Jsoup.parse(shownotes); cleanCss(document); document.head().appendElement("style").attr("type", "text/css").text(webviewStyle); addTimecodes(document); return document.toString(); }
@Test public void testProcessShownotesAddTimecodeBrackets() { final String timeStr = "10:11"; final long time = 3600 * 1000 * 10 + 60 * 1000 * 11; String shownotes = "<p> Some test text with a timecode [" + timeStr + "] here.</p>"; ShownotesCleaner t = new ShownotesCleaner(context, shownotes, Integer.MAX_VALUE); String res = t.processShownotes(); checkLinkCorrect(res, new long[]{time}, new String[]{timeStr}); }
public TreeCache start() throws Exception { Preconditions.checkState(treeState.compareAndSet(TreeState.LATENT, TreeState.STARTED), "already started"); if (createParentNodes) { client.createContainers(root.path); } client.getConnectionStateListenable().addListener(connectionStateListener); if (client.getZookeeperClient().isConnected()) { root.wasCreated(); } return this; }
@Test public void testDeleteThenCreateRoot() throws Exception { client.create().forPath("/test"); client.create().forPath("/test/foo", "one".getBytes()); cache = newTreeCacheWithListeners(client, "/test/foo"); cache.start(); assertEvent(TreeCacheEvent.Type.NODE_ADDED, "/test/foo"); assertEvent(TreeCacheEvent.Type.INITIALIZED); client.delete().forPath("/test/foo"); assertEvent(TreeCacheEvent.Type.NODE_REMOVED, "/test/foo"); client.create().forPath("/test/foo", "two".getBytes()); assertEvent(TreeCacheEvent.Type.NODE_ADDED, "/test/foo"); client.delete().forPath("/test/foo"); assertEvent(TreeCacheEvent.Type.NODE_REMOVED, "/test/foo"); client.create().forPath("/test/foo", "two".getBytes()); assertEvent(TreeCacheEvent.Type.NODE_ADDED, "/test/foo"); assertNoMoreEvents(); }
@Override protected boolean isSecure(String key) { ArtifactPluginInfo pluginInfo = metadataStore().getPluginInfo(getPluginId()); if (pluginInfo == null || pluginInfo.getStoreConfigSettings() == null || pluginInfo.getStoreConfigSettings().getConfiguration(key) == null) { return false; } return pluginInfo.getStoreConfigSettings().getConfiguration(key).isSecure(); }
@Test public void postConstruct_shouldEncryptSecureConfigurations() { final PluggableInstanceSettings storeConfig = new PluggableInstanceSettings( List.of(new PluginConfiguration("password", new Metadata(true, true))) ); final ArtifactPluginInfo pluginInfo = new ArtifactPluginInfo(pluginDescriptor("plugin_id"), storeConfig, null, null, null, null); store.setPluginInfo(pluginInfo); ArtifactStore artifactStore = new ArtifactStore("id", "plugin_id", new ConfigurationProperty(new ConfigurationKey("password"), new ConfigurationValue("pass"))); artifactStore.encryptSecureConfigurations(); assertThat(artifactStore.size(), is(1)); assertTrue(artifactStore.first().isSecure()); }