focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public IcebergEnumeratorState snapshotState(long checkpointId) { return new IcebergEnumeratorState( enumeratorPosition.get(), assigner.state(), enumerationHistory.snapshot()); }
@Test public void testPlanningIgnoringErrors() throws Exception { int expectedFailures = 3; TestingSplitEnumeratorContext<IcebergSourceSplit> enumeratorContext = new TestingSplitEnumeratorContext<>(4); ScanContext scanContext = ScanContext.builder() .streaming(true) .startingStrategy(StreamingStartingStrategy.INCREMENTAL_FROM_EARLIEST_SNAPSHOT) .maxPlanningSnapshotCount(1) .maxAllowedPlanningFailures(-1) .build(); ManualContinuousSplitPlanner splitPlanner = new ManualContinuousSplitPlanner(scanContext, expectedFailures); ContinuousIcebergEnumerator enumerator = createEnumerator(enumeratorContext, scanContext, splitPlanner); // Make one split available and trigger the periodic discovery List<IcebergSourceSplit> splits = SplitHelpers.createSplitsFromTransientHadoopTable(temporaryFolder, 1, 1); splitPlanner.addSplits(splits); Collection<IcebergSourceSplitState> pendingSplits; // Can not discover the new split with planning failures for (int i = 0; i < expectedFailures; ++i) { enumeratorContext.triggerAllActions(); pendingSplits = enumerator.snapshotState(i).pendingSplits(); assertThat(pendingSplits).isEmpty(); } // Discovered the new split after a successful scan planning enumeratorContext.triggerAllActions(); pendingSplits = enumerator.snapshotState(expectedFailures + 1).pendingSplits(); assertThat(pendingSplits).hasSize(1); IcebergSourceSplitState pendingSplit = pendingSplits.iterator().next(); assertThat(pendingSplit.split().splitId()).isEqualTo(splits.get(0).splitId()); assertThat(pendingSplit.status()).isEqualTo(IcebergSourceSplitStatus.UNASSIGNED); }
@Override public void clear() { RedissonKeys keys = new RedissonKeys(commandExecutor); keys.delete(queueName, mapName); }
@Test public void testClear() throws ExecutionException, InterruptedException { RTransferQueue<String> queue = redisson.getTransferQueue("queue"); queue.add("1"); queue.add("4"); queue.add("2"); queue.add("5"); queue.add("3"); ScheduledFuture<?> f = Executors.newSingleThreadScheduledExecutor().schedule(() -> { RTransferQueue<Integer> queue1 = redisson.getTransferQueue("queue"); try { queue1.take(); } catch (InterruptedException e) { e.printStackTrace(); } }, 1, TimeUnit.SECONDS); f.get(); queue.clear(); Awaitility.waitAtMost(Duration.ofSeconds(1)).untilAsserted(() -> { assertThat(redisson.getKeys().count()).isZero(); }); }
@Udf(description = "Converts a string representation of a date in the given format" + " into a DATE value.") public Date parseDate( @UdfParameter( description = "The string representation of a date.") final String formattedDate, @UdfParameter( description = "The format pattern should be in the format expected by" + " java.text.SimpleDateFormat.") final String formatPattern) { if (formattedDate == null || formatPattern == null) { return null; } try { final long time = formatters.get(formatPattern).parse(formattedDate).getTime(); if (time % MILLIS_IN_DAY != 0) { throw new KsqlFunctionException("Date format contains time field."); } return new Date(time); } catch (final ExecutionException | RuntimeException | ParseException e) { throw new KsqlFunctionException("Failed to parse date '" + formattedDate + "' with formatter '" + formatPattern + "': " + e.getMessage(), e); } }
@Test public void shouldConvertCaseInsensitiveStringToDate() { // When: final Date result = udf.parseDate("01-dec-2021", "dd-MMM-yyyy"); // Then: assertThat(result.getTime(), is(1638316800000L)); }
public boolean addPendingSyncMember(String memberId) { if (!hasMember(memberId)) { throw new IllegalStateException("Attempt to add pending sync member " + memberId + " which is already a stable member of the group."); } return pendingSyncMembers.add(memberId); }
@Test public void testCannotAddPendingSyncOfUnknownMember() { assertThrows(IllegalStateException.class, () -> group.addPendingSyncMember(memberId)); }
@Override public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveStepAttribute( id_transformation, id_step, "passing_through_fields", passingThruFields ); rep.saveStepAttribute( id_transformation, id_step, "dataLocation", getDataLocation() ); rep.saveStepAttribute( id_transformation, id_step, "sourceFormat", getFormat() ); rep.saveStepAttribute( id_transformation, id_step, "dataLocationType", dataLocationType ); rep.saveStepAttribute( id_transformation, id_step, "schemaLocation", getSchemaLocation() ); rep.saveStepAttribute( id_transformation, id_step, "schemaLocationType", schemaLocationType ); rep.saveStepAttribute( id_transformation, id_step, "isCacheSchemas", isCacheSchemas() ); rep.saveStepAttribute( id_transformation, id_step, "allowNullForMissingFields", isAllowNullForMissingFields() ); for ( int i = 0; i < inputFields.length; i++ ) { AvroInputField field = inputFields[ i ]; rep.saveStepAttribute( id_transformation, id_step, i, "path", field.getAvroFieldName() ); rep.saveStepAttribute( id_transformation, id_step, i, "name", field.getPentahoFieldName() ); rep.saveStepAttribute( id_transformation, id_step, i, "type", field.getTypeDesc() ); AvroSpec.DataType avroDataType = field.getAvroType(); if ( avroDataType != null && !avroDataType.equals( AvroSpec.DataType.NULL ) ) { rep.saveStepAttribute( id_transformation, id_step, i, "avro_type", avroDataType.getName() ); } else { rep.saveStepAttribute( id_transformation, id_step, i, "avro_type", AvroTypeConverter.convertToAvroType( field.getTypeDesc() ) ); } if ( field.getStringFormat() != null ) { rep.saveStepAttribute( id_transformation, id_step, i, "format", field.getStringFormat() ); } String indexedValues = field.getIndexedValues(); if ( indexedValues != null && indexedValues.length() > 0 ) { rep.saveStepAttribute( id_transformation, id_step, i, "indexed_vals", indexedValues ); } } for ( int i = 0; i < lookupFields.size(); i++ ) { AvroLookupField field = lookupFields.get( i ); rep.saveStepAttribute( id_transformation, id_step, i, "fieldName", field.getFieldName() ); rep.saveStepAttribute( id_transformation, id_step, i, "variableName", field.getVariableName() ); rep.saveStepAttribute( id_transformation, id_step, i, "defaultValue", field.getDefaultValue() ); } super.saveRep( rep, metaStore, id_transformation, id_step ); } catch ( Exception e ) { throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e ); } }
@Test public void testSaveRep() throws KettleException { meta.setInputFields( Arrays.asList( field ) ); meta.saveRep( rep, metaStore, id_transformation, id_step ); verify( meta ).getDataLocation(); verify( meta ).getSchemaLocation(); verify( field ).getAvroFieldName(); verify( field ).getPentahoFieldName(); verify( field ).getTypeDesc(); }
@Override public Column convert(BasicTypeDefine typeDefine) { Long typeDefineLength = typeDefine.getLength(); PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .sourceType(typeDefine.getColumnType()) .columnLength(typeDefineLength) .scale(typeDefine.getScale()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String irisDataType = typeDefine.getDataType().toUpperCase(); long charOrBinaryLength = Objects.nonNull(typeDefineLength) && typeDefineLength > 0 ? typeDefineLength : 1; switch (irisDataType) { case IRIS_NULL: builder.dataType(BasicType.VOID_TYPE); break; case IRIS_BIT: builder.dataType(BasicType.BOOLEAN_TYPE); break; case IRIS_NUMERIC: case IRIS_MONEY: case IRIS_SMALLMONEY: case IRIS_NUMBER: case IRIS_DEC: case IRIS_DECIMAL: DecimalType decimalType; if (typeDefine.getPrecision() != null && typeDefine.getPrecision() > 0) { decimalType = new DecimalType( typeDefine.getPrecision().intValue(), typeDefine.getScale()); } else { decimalType = new DecimalType(DEFAULT_PRECISION, DEFAULT_SCALE); } builder.dataType(decimalType); builder.columnLength(Long.valueOf(decimalType.getPrecision())); builder.scale(decimalType.getScale()); break; case IRIS_INT: case IRIS_INTEGER: case IRIS_MEDIUMINT: builder.dataType(BasicType.INT_TYPE); break; case IRIS_ROWVERSION: case IRIS_BIGINT: case IRIS_SERIAL: builder.dataType(BasicType.LONG_TYPE); break; case IRIS_TINYINT: builder.dataType(BasicType.BYTE_TYPE); break; case IRIS_SMALLINT: builder.dataType(BasicType.SHORT_TYPE); break; case IRIS_FLOAT: builder.dataType(BasicType.FLOAT_TYPE); break; case IRIS_DOUBLE: case IRIS_REAL: case IRIS_DOUBLE_PRECISION: builder.dataType(BasicType.DOUBLE_TYPE); break; case IRIS_CHAR: case IRIS_CHAR_VARYING: case IRIS_CHARACTER_VARYING: case IRIS_NATIONAL_CHAR: case IRIS_NATIONAL_CHAR_VARYING: case IRIS_NATIONAL_CHARACTER: case IRIS_NATIONAL_CHARACTER_VARYING: case IRIS_NATIONAL_VARCHAR: case IRIS_NCHAR: case IRIS_SYSNAME: case IRIS_VARCHAR2: case IRIS_VARCHAR: case IRIS_NVARCHAR: case IRIS_UNIQUEIDENTIFIER: case IRIS_GUID: case IRIS_CHARACTER: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(charOrBinaryLength); break; case IRIS_NTEXT: case IRIS_CLOB: case IRIS_LONG_VARCHAR: case IRIS_LONG: case IRIS_LONGTEXT: case IRIS_MEDIUMTEXT: case IRIS_TEXT: case IRIS_LONGVARCHAR: builder.dataType(BasicType.STRING_TYPE); builder.columnLength(Long.valueOf(Integer.MAX_VALUE)); break; case IRIS_DATE: builder.dataType(LocalTimeType.LOCAL_DATE_TYPE); break; case IRIS_TIME: builder.dataType(LocalTimeType.LOCAL_TIME_TYPE); break; case IRIS_DATETIME: case IRIS_DATETIME2: case IRIS_SMALLDATETIME: case IRIS_TIMESTAMP: case IRIS_TIMESTAMP2: case IRIS_POSIXTIME: builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); break; case IRIS_BINARY: case IRIS_BINARY_VARYING: case IRIS_RAW: case IRIS_VARBINARY: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(charOrBinaryLength); break; case IRIS_LONGVARBINARY: case IRIS_BLOB: case IRIS_IMAGE: case IRIS_LONG_BINARY: case IRIS_LONG_RAW: builder.dataType(PrimitiveByteArrayType.INSTANCE); builder.columnLength(Long.valueOf(Integer.MAX_VALUE)); break; default: throw CommonError.convertToSeaTunnelTypeError( DatabaseIdentifier.IRIS, irisDataType, typeDefine.getName()); } return builder.build(); }
@Test public void testConvertDouble() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder() .name("test") .columnType("double") .dataType("double") .build(); Column column = IrisTypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.DOUBLE_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); }
public boolean getUnwrapPrimitives() { return UNWRAP.equalsIgnoreCase(properties.getOrDefault(UNWRAP_PRIMITIVES, WRAP)); }
@Test public void shouldGetDefaultUnwrapPrimitives() { // Given: final ProtobufProperties properties = new ProtobufProperties(ImmutableMap.of()); // When/Then: assertThat(properties.getUnwrapPrimitives(), is(false)); }
@Override public boolean retainAll(Collection<?> c) { boolean changed = false; for (Iterator<?> iterator = iterator(); iterator.hasNext();) { Object object = iterator.next(); if (!c.contains(object)) { iterator.remove(); changed = true; } } return changed; }
@Test public void testRetainAll() { Set<Integer> set = redisson.getSortedSet("set"); for (int i = 0; i < 200; i++) { set.add(i); } Assertions.assertTrue(set.retainAll(Arrays.asList(1, 2))); Assertions.assertEquals(2, set.size()); }
@PublicAPI(usage = ACCESS) public JavaClasses importUrl(URL url) { return importUrls(singletonList(url)); }
@Test public void handles_static_modifier_of_nested_classes() { JavaClasses classes = new ClassFileImporter().importUrl(getClass().getResource("testexamples/nestedimport")); assertThat(classes.get(ClassWithNestedClass.class).getModifiers()).as("modifiers of ClassWithNestedClass").doesNotContain(STATIC); assertThat(classes.get(ClassWithNestedClass.NestedClass.class).getModifiers()).as("modifiers of ClassWithNestedClass.NestedClass").doesNotContain(STATIC); assertThat(classes.get(ClassWithNestedClass.StaticNestedClass.class).getModifiers()).as("modifiers of ClassWithNestedClass.StaticNestedClass").contains(STATIC); assertThat(classes.get(ClassWithNestedClass.NestedInterface.class).getModifiers()).as("modifiers of ClassWithNestedClass.NestedInterface").contains(STATIC); assertThat(classes.get(ClassWithNestedClass.StaticNestedInterface.class).getModifiers()).as("modifiers of ClassWithNestedClass.StaticNestedInterface").contains(STATIC); }
public static void popupViewAnimationWithIds(View rootView, @IdRes int... viewIds) { View[] views = new View[viewIds.length]; for (int viewIndex = 0; viewIndex < viewIds.length; viewIndex++) { int viewId = viewIds[viewIndex]; if (viewId != 0) { views[viewIndex] = rootView.findViewById(viewId); } } popupViewAnimation(views); }
@Test public void testPopupViewAnimationWithIds() { View v1 = Mockito.mock(View.class); View v2 = Mockito.mock(View.class); Mockito.doReturn(mApplication).when(v1).getContext(); Mockito.doReturn(mApplication).when(v2).getContext(); View rootView = Mockito.mock(View.class); Mockito.doReturn(v1).when(rootView).findViewById(1); Mockito.doReturn(v2).when(rootView).findViewById(2); SetupSupport.popupViewAnimationWithIds(rootView, 1, 0, 2); ArgumentCaptor<Animation> animation1Captor = ArgumentCaptor.forClass(Animation.class); ArgumentCaptor<Animation> animation2Captor = ArgumentCaptor.forClass(Animation.class); Mockito.verify(v1).startAnimation(animation1Captor.capture()); Mockito.verify(v2).startAnimation(animation2Captor.capture()); Animation animation1 = animation1Captor.getValue(); Animation animation2 = animation2Captor.getValue(); Assert.assertEquals(500, animation1.getStartOffset()); Assert.assertEquals(900, animation2.getStartOffset()); }
public static MySQLCommandPacket newInstance(final MySQLCommandPacketType commandPacketType, final MySQLPacketPayload payload, final ConnectionSession connectionSession) { switch (commandPacketType) { case COM_QUIT: return new MySQLComQuitPacket(); case COM_INIT_DB: return new MySQLComInitDbPacket(payload); case COM_FIELD_LIST: return new MySQLComFieldListPacket(payload); case COM_QUERY: return new MySQLComQueryPacket(payload); case COM_STMT_PREPARE: return new MySQLComStmtPreparePacket(payload); case COM_STMT_EXECUTE: MySQLServerPreparedStatement serverPreparedStatement = connectionSession.getServerPreparedStatementRegistry().getPreparedStatement(payload.getByteBuf().getIntLE(payload.getByteBuf().readerIndex())); return new MySQLComStmtExecutePacket(payload, serverPreparedStatement.getSqlStatementContext().getSqlStatement().getParameterCount()); case COM_STMT_SEND_LONG_DATA: return new MySQLComStmtSendLongDataPacket(payload); case COM_STMT_RESET: return new MySQLComStmtResetPacket(payload); case COM_STMT_CLOSE: return new MySQLComStmtClosePacket(payload); case COM_SET_OPTION: return new MySQLComSetOptionPacket(payload); case COM_PING: return new MySQLComPingPacket(); case COM_RESET_CONNECTION: return new MySQLComResetConnectionPacket(); default: return new MySQLUnsupportedCommandPacket(commandPacketType); } }
@Test void assertNewInstanceWithComQuitPacket() { assertThat(MySQLCommandPacketFactory.newInstance(MySQLCommandPacketType.COM_QUIT, payload, connectionSession), instanceOf(MySQLComQuitPacket.class)); }
@Override public DataflowPipelineJob run(Pipeline pipeline) { // Multi-language pipelines and pipelines that include upgrades should automatically be upgraded // to Runner v2. if (DataflowRunner.isMultiLanguagePipeline(pipeline) || includesTransformUpgrades(pipeline)) { List<String> experiments = firstNonNull(options.getExperiments(), Collections.emptyList()); if (!experiments.contains("use_runner_v2")) { LOG.info( "Automatically enabling Dataflow Runner v2 since the pipeline used cross-language" + " transforms or pipeline needed a transform upgrade."); options.setExperiments( ImmutableList.<String>builder().addAll(experiments).add("use_runner_v2").build()); } } if (useUnifiedWorker(options)) { if (hasExperiment(options, "disable_runner_v2") || hasExperiment(options, "disable_runner_v2_until_2023") || hasExperiment(options, "disable_prime_runner_v2")) { throw new IllegalArgumentException( "Runner V2 both disabled and enabled: at least one of ['beam_fn_api', 'use_unified_worker', 'use_runner_v2', 'use_portable_job_submission'] is set and also one of ['disable_runner_v2', 'disable_runner_v2_until_2023', 'disable_prime_runner_v2'] is set."); } List<String> experiments = new ArrayList<>(options.getExperiments()); // non-null if useUnifiedWorker is true if (!experiments.contains("use_runner_v2")) { experiments.add("use_runner_v2"); } if (!experiments.contains("use_unified_worker")) { experiments.add("use_unified_worker"); } if (!experiments.contains("beam_fn_api")) { experiments.add("beam_fn_api"); } if (!experiments.contains("use_portable_job_submission")) { experiments.add("use_portable_job_submission"); } options.setExperiments(ImmutableList.copyOf(experiments)); } logWarningIfPCollectionViewHasNonDeterministicKeyCoder(pipeline); logWarningIfBigqueryDLQUnused(pipeline); if (shouldActAsStreaming(pipeline)) { options.setStreaming(true); if (useUnifiedWorker(options)) { options.setEnableStreamingEngine(true); List<String> experiments = new ArrayList<>(options.getExperiments()); // non-null if useUnifiedWorker is true if (!experiments.contains("enable_streaming_engine")) { experiments.add("enable_streaming_engine"); } if (!experiments.contains("enable_windmill_service")) { experiments.add("enable_windmill_service"); } } } if (!ExperimentalOptions.hasExperiment(options, "disable_projection_pushdown")) { ProjectionPushdownOptimizer.optimize(pipeline); } LOG.info( "Executing pipeline on the Dataflow Service, which will have billing implications " + "related to Google Compute Engine usage and other Google Cloud Services."); DataflowPipelineOptions dataflowOptions = options.as(DataflowPipelineOptions.class); String workerHarnessContainerImageURL = DataflowRunner.getContainerImageForJob(dataflowOptions); // This incorrectly puns the worker harness container image (which implements v1beta3 API) // with the SDK harness image (which implements Fn API). // // The same Environment is used in different and contradictory ways, depending on whether // it is a v1 or v2 job submission. RunnerApi.Environment defaultEnvironmentForDataflow = Environments.createDockerEnvironment(workerHarnessContainerImageURL); // The SdkComponents for portable an non-portable job submission must be kept distinct. Both // need the default environment. SdkComponents portableComponents = SdkComponents.create(); portableComponents.registerEnvironment( defaultEnvironmentForDataflow .toBuilder() .addAllDependencies(getDefaultArtifacts()) .addAllCapabilities(Environments.getJavaCapabilities()) .build()); RunnerApi.Pipeline portablePipelineProto = PipelineTranslation.toProto(pipeline, portableComponents, false); // Note that `stageArtifacts` has to be called before `resolveArtifact` because // `resolveArtifact` updates local paths to staged paths in pipeline proto. portablePipelineProto = resolveAnyOfEnvironments(portablePipelineProto); List<DataflowPackage> packages = stageArtifacts(portablePipelineProto); portablePipelineProto = resolveArtifacts(portablePipelineProto); portablePipelineProto = applySdkEnvironmentOverrides(portablePipelineProto, options); if (LOG.isDebugEnabled()) { LOG.debug( "Portable pipeline proto:\n{}", TextFormat.printer().printToString(portablePipelineProto)); } // Stage the portable pipeline proto, retrieving the staged pipeline path, then update // the options on the new job // TODO: add an explicit `pipeline` parameter to the submission instead of pipeline options LOG.info("Staging portable pipeline proto to {}", options.getStagingLocation()); byte[] serializedProtoPipeline = portablePipelineProto.toByteArray(); DataflowPackage stagedPipeline = options.getStager().stageToFile(serializedProtoPipeline, PIPELINE_FILE_NAME); dataflowOptions.setPipelineUrl(stagedPipeline.getLocation()); if (useUnifiedWorker(options)) { LOG.info("Skipping v1 transform replacements since job will run on v2."); } else { // Now rewrite things to be as needed for v1 (mutates the pipeline) // This way the job submitted is valid for v1 and v2, simultaneously replaceV1Transforms(pipeline); } // Capture the SdkComponents for look up during step translations SdkComponents dataflowV1Components = SdkComponents.create(); dataflowV1Components.registerEnvironment( defaultEnvironmentForDataflow .toBuilder() .addAllDependencies(getDefaultArtifacts()) .addAllCapabilities(Environments.getJavaCapabilities()) .build()); // No need to perform transform upgrading for the Runner v1 proto. RunnerApi.Pipeline dataflowV1PipelineProto = PipelineTranslation.toProto(pipeline, dataflowV1Components, true, false); if (LOG.isDebugEnabled()) { LOG.debug( "Dataflow v1 pipeline proto:\n{}", TextFormat.printer().printToString(dataflowV1PipelineProto)); } // Set a unique client_request_id in the CreateJob request. // This is used to ensure idempotence of job creation across retried // attempts to create a job. Specifically, if the service returns a job with // a different client_request_id, it means the returned one is a different // job previously created with the same job name, and that the job creation // has been effectively rejected. The SDK should return // Error::Already_Exists to user in that case. int randomNum = new Random().nextInt(9000) + 1000; String requestId = DateTimeFormat.forPattern("YYYYMMddHHmmssmmm") .withZone(DateTimeZone.UTC) .print(DateTimeUtils.currentTimeMillis()) + "_" + randomNum; JobSpecification jobSpecification = translator.translate( pipeline, dataflowV1PipelineProto, dataflowV1Components, this, packages); if (!isNullOrEmpty(dataflowOptions.getDataflowWorkerJar()) && !useUnifiedWorker(options)) { List<String> experiments = firstNonNull(dataflowOptions.getExperiments(), Collections.emptyList()); if (!experiments.contains("use_staged_dataflow_worker_jar")) { dataflowOptions.setExperiments( ImmutableList.<String>builder() .addAll(experiments) .add("use_staged_dataflow_worker_jar") .build()); } } Job newJob = jobSpecification.getJob(); try { newJob .getEnvironment() .setSdkPipelineOptions( MAPPER.readValue(MAPPER_WITH_MODULES.writeValueAsBytes(options), Map.class)); } catch (IOException e) { throw new IllegalArgumentException( "PipelineOptions specified failed to serialize to JSON.", e); } newJob.setClientRequestId(requestId); DataflowRunnerInfo dataflowRunnerInfo = DataflowRunnerInfo.getDataflowRunnerInfo(); String version = dataflowRunnerInfo.getVersion(); checkState( !"${pom.version}".equals(version), "Unable to submit a job to the Dataflow service with unset version ${pom.version}"); LOG.info("Dataflow SDK version: {}", version); newJob.getEnvironment().setUserAgent((Map) dataflowRunnerInfo.getProperties()); // The Dataflow Service may write to the temporary directory directly, so // must be verified. if (!isNullOrEmpty(options.getGcpTempLocation())) { newJob .getEnvironment() .setTempStoragePrefix( dataflowOptions.getPathValidator().verifyPath(options.getGcpTempLocation())); } newJob.getEnvironment().setDataset(options.getTempDatasetId()); if (options.getWorkerRegion() != null) { newJob.getEnvironment().setWorkerRegion(options.getWorkerRegion()); } if (options.getWorkerZone() != null) { newJob.getEnvironment().setWorkerZone(options.getWorkerZone()); } if (options.getFlexRSGoal() == DataflowPipelineOptions.FlexResourceSchedulingGoal.COST_OPTIMIZED) { newJob.getEnvironment().setFlexResourceSchedulingGoal("FLEXRS_COST_OPTIMIZED"); } else if (options.getFlexRSGoal() == DataflowPipelineOptions.FlexResourceSchedulingGoal.SPEED_OPTIMIZED) { newJob.getEnvironment().setFlexResourceSchedulingGoal("FLEXRS_SPEED_OPTIMIZED"); } // Represent the minCpuPlatform pipeline option as an experiment, if not already present. if (!isNullOrEmpty(dataflowOptions.getMinCpuPlatform())) { List<String> experiments = firstNonNull(dataflowOptions.getExperiments(), Collections.emptyList()); List<String> minCpuFlags = experiments.stream() .filter(p -> p.startsWith("min_cpu_platform")) .collect(Collectors.toList()); if (minCpuFlags.isEmpty()) { dataflowOptions.setExperiments( ImmutableList.<String>builder() .addAll(experiments) .add("min_cpu_platform=" + dataflowOptions.getMinCpuPlatform()) .build()); } else { LOG.warn( "Flag min_cpu_platform is defined in both top level PipelineOption, " + "as well as under experiments. Proceed using {}.", minCpuFlags.get(0)); } } newJob .getEnvironment() .setExperiments( ImmutableList.copyOf( firstNonNull(dataflowOptions.getExperiments(), Collections.emptyList()))); // Set the Docker container image that executes Dataflow worker harness, residing in Google // Container Registry. Translator is guaranteed to create a worker pool prior to this point. // For runner_v1, only worker_harness_container is set. // For runner_v2, both worker_harness_container and sdk_harness_container are set to the same // value. String containerImage = getContainerImageForJob(options); for (WorkerPool workerPool : newJob.getEnvironment().getWorkerPools()) { workerPool.setWorkerHarnessContainerImage(containerImage); } configureSdkHarnessContainerImages(options, portablePipelineProto, newJob); newJob.getEnvironment().setVersion(getEnvironmentVersion(options)); if (hooks != null) { hooks.modifyEnvironmentBeforeSubmission(newJob.getEnvironment()); } // enable upload_graph when the graph is too large byte[] jobGraphBytes = DataflowPipelineTranslator.jobToString(newJob).getBytes(UTF_8); int jobGraphByteSize = jobGraphBytes.length; if (jobGraphByteSize >= CREATE_JOB_REQUEST_LIMIT_BYTES && !hasExperiment(options, "upload_graph") && !useUnifiedWorker(options)) { List<String> experiments = firstNonNull(options.getExperiments(), Collections.emptyList()); options.setExperiments( ImmutableList.<String>builder().addAll(experiments).add("upload_graph").build()); LOG.info( "The job graph size ({} in bytes) is larger than {}. Automatically add " + "the upload_graph option to experiments.", jobGraphByteSize, CREATE_JOB_REQUEST_LIMIT_BYTES); } if (hasExperiment(options, "upload_graph") && useUnifiedWorker(options)) { ArrayList<String> experiments = new ArrayList<>(options.getExperiments()); while (experiments.remove("upload_graph")) {} options.setExperiments(experiments); LOG.warn( "The upload_graph experiment was specified, but it does not apply " + "to runner v2 jobs. Option has been automatically removed."); } // Upload the job to GCS and remove the graph object from the API call. The graph // will be downloaded from GCS by the service. if (hasExperiment(options, "upload_graph")) { DataflowPackage stagedGraph = options.getStager().stageToFile(jobGraphBytes, DATAFLOW_GRAPH_FILE_NAME); newJob.getSteps().clear(); newJob.setStepsLocation(stagedGraph.getLocation()); } if (!isNullOrEmpty(options.getDataflowJobFile()) || !isNullOrEmpty(options.getTemplateLocation())) { boolean isTemplate = !isNullOrEmpty(options.getTemplateLocation()); if (isTemplate) { checkArgument( isNullOrEmpty(options.getDataflowJobFile()), "--dataflowJobFile and --templateLocation are mutually exclusive."); } String fileLocation = firstNonNull(options.getTemplateLocation(), options.getDataflowJobFile()); checkArgument( fileLocation.startsWith("/") || fileLocation.startsWith("gs://"), "Location must be local or on Cloud Storage, got %s.", fileLocation); ResourceId fileResource = FileSystems.matchNewResource(fileLocation, false /* isDirectory */); String workSpecJson = DataflowPipelineTranslator.jobToString(newJob); try (PrintWriter printWriter = new PrintWriter( new BufferedWriter( new OutputStreamWriter( Channels.newOutputStream(FileSystems.create(fileResource, MimeTypes.TEXT)), UTF_8)))) { printWriter.print(workSpecJson); LOG.info("Printed job specification to {}", fileLocation); } catch (IOException ex) { String error = String.format("Cannot create output file at %s", fileLocation); if (isTemplate) { throw new RuntimeException(error, ex); } else { LOG.warn(error, ex); } } if (isTemplate) { LOG.info("Template successfully created."); return new DataflowTemplateJob(); } } String jobIdToUpdate = null; if (options.isUpdate()) { jobIdToUpdate = getJobIdFromName(options.getJobName()); newJob.setTransformNameMapping(options.getTransformNameMapping()); newJob.setReplaceJobId(jobIdToUpdate); } if (options.getCreateFromSnapshot() != null && !options.getCreateFromSnapshot().isEmpty()) { newJob.setTransformNameMapping(options.getTransformNameMapping()); newJob.setCreatedFromSnapshotId(options.getCreateFromSnapshot()); } Job jobResult; try { jobResult = dataflowClient.createJob(newJob); } catch (GoogleJsonResponseException e) { String errorMessages = "Unexpected errors"; if (e.getDetails() != null) { if (jobGraphByteSize >= CREATE_JOB_REQUEST_LIMIT_BYTES) { errorMessages = "The size of the serialized JSON representation of the pipeline " + "exceeds the allowable limit. " + "For more information, please see the documentation on job submission:\n" + "https://cloud.google.com/dataflow/docs/guides/deploying-a-pipeline#jobs"; } else { errorMessages = e.getDetails().getMessage(); } } throw new RuntimeException("Failed to create a workflow job: " + errorMessages, e); } catch (IOException e) { throw new RuntimeException("Failed to create a workflow job", e); } // Use a raw client for post-launch monitoring, as status calls may fail // regularly and need not be retried automatically. DataflowPipelineJob dataflowPipelineJob = new DataflowPipelineJob( DataflowClient.create(options), jobResult.getId(), options, jobSpecification != null ? jobSpecification.getStepNames() : Collections.emptyMap(), portablePipelineProto); // If the service returned client request id, the SDK needs to compare it // with the original id generated in the request, if they are not the same // (i.e., the returned job is not created by this request), throw // DataflowJobAlreadyExistsException or DataflowJobAlreadyUpdatedException // depending on whether this is a reload or not. if (jobResult.getClientRequestId() != null && !jobResult.getClientRequestId().isEmpty() && !jobResult.getClientRequestId().equals(requestId)) { // If updating a job. if (options.isUpdate()) { throw new DataflowJobAlreadyUpdatedException( dataflowPipelineJob, String.format( "The job named %s with id: %s has already been updated into job id: %s " + "and cannot be updated again.", newJob.getName(), jobIdToUpdate, jobResult.getId())); } else { throw new DataflowJobAlreadyExistsException( dataflowPipelineJob, String.format( "There is already an active job named %s with id: %s. If you want to submit a" + " second job, try again by setting a different name using --jobName.", newJob.getName(), jobResult.getId())); } } LOG.info( "To access the Dataflow monitoring console, please navigate to {}", MonitoringUtil.getJobMonitoringPageURL( options.getProject(), options.getRegion(), jobResult.getId())); LOG.info("Submitted job: {}", jobResult.getId()); LOG.info( "To cancel the job using the 'gcloud' tool, run:\n> {}", MonitoringUtil.getGcloudCancelCommand(options, jobResult.getId())); return dataflowPipelineJob; }
@Test public void testPathExistsValidation() { String[] args = new String[] { "--runner=DataflowRunner", "--region=some-region-1", "--tempLocation=gs://does/not/exist", "--project=test-project", "--credentialFactoryClass=" + NoopCredentialFactory.class.getName(), }; thrown.expect(IllegalArgumentException.class); thrown.expectMessage("gcpTempLocation"); thrown.expectCause(hasProperty("message", containsString("gs://does/not/exist"))); Pipeline.create(PipelineOptionsFactory.fromArgs(args).create()).run(); }
public static @CheckForNull String getActionUrl(String itUrl, Action action) { String urlName = action.getUrlName(); if (urlName == null) return null; // Should not be displayed try { if (new URI(urlName).isAbsolute()) { return urlName; } } catch (URISyntaxException x) { Logger.getLogger(Functions.class.getName()).log(Level.WARNING, "Failed to parse URL for {0}: {1}", new Object[] {action, x}); return null; } if (urlName.startsWith("/")) return joinPath(Stapler.getCurrentRequest().getContextPath(), urlName); else // relative URL name return joinPath(Stapler.getCurrentRequest().getContextPath() + '/' + itUrl, urlName); }
@Test @Issue("JENKINS-7725") public void testGetActionUrl_absoluteUriWithoutAuthority() { String[] uris = { "mailto:nobody@example.com", "mailto:nobody@example.com?subject=hello", "javascript:alert('hello')", }; for (String uri : uris) { String result = Functions.getActionUrl(null, createMockAction(uri)); assertEquals(uri, result); } }
public static TableSchema of(Column... columns) { return new AutoValue_TableSchema(Arrays.asList(columns)); }
@Test public void testParseNullableEnum16() { Map<String, Integer> enumValues = ImmutableMap.of( "a", -1, "b", 0, "c", 42); assertEquals( ColumnType.enum16(enumValues).withNullable(true), ColumnType.parse("Nullable(Enum16('a' = -1, 'b' = 0, 'c' = 42))")); }
@Override public MetadataStore create(String metadataURL, MetadataStoreConfig metadataStoreConfig, boolean enableSessionWatcher) throws MetadataStoreException { return new EtcdMetadataStore(metadataURL, metadataStoreConfig, enableSessionWatcher); }
@Test public void testTlsInstance() throws Exception { @Cleanup EtcdCluster etcdCluster = EtcdClusterExtension.builder().withClusterName("test-tls").withNodes(1) .withSsl(true).build().cluster(); etcdCluster.start(); EtcdConfig etcdConfig = EtcdConfig.builder().useTls(true) .tlsProvider(null) .authority("etcd0") .tlsTrustCertsFilePath(Resources.getResource("ssl/cert/ca.pem").getPath()) .tlsKeyFilePath(Resources.getResource("ssl/cert/client-key-pk8.pem").getPath()) .tlsCertificateFilePath(Resources.getResource("ssl/cert/client.pem").getPath()) .build(); Path etcdConfigPath = Files.createTempFile("etcd_config", ".yml"); new ObjectMapper(new YAMLFactory()).writeValue(etcdConfigPath.toFile(), etcdConfig); String metadataURL = "etcd:" + etcdCluster.clientEndpoints().stream().map(URI::toString).collect(Collectors.joining(",")); @Cleanup MetadataStore store = MetadataStoreFactory.create(metadataURL, MetadataStoreConfig.builder().configFilePath(etcdConfigPath.toString()).build()); store.put("/test", "value".getBytes(StandardCharsets.UTF_8), Optional.empty()).join(); assertTrue(store.exists("/test").join()); }
@Override public <VO, VR> KStream<K, VR> join(final KStream<K, VO> otherStream, final ValueJoiner<? super V, ? super VO, ? extends VR> joiner, final JoinWindows windows) { return join(otherStream, toValueJoinerWithKey(joiner), windows); }
@Test public void shouldNotAllowNullMapperOnJoinWithGlobalTable() { final NullPointerException exception = assertThrows( NullPointerException.class, () -> testStream.join(testGlobalTable, null, MockValueJoiner.TOSTRING_JOINER)); assertThat(exception.getMessage(), equalTo("keySelector can't be null")); }
@Override public InterpreterResult interpret(String st, InterpreterContext context) { return helper.interpret(session, st, context); }
@Test void should_interpret_select_statement_with_formatting_options() { // When Map<String, String> props = intrContext.getLocalProperties(); props.put("outputFormat", "human"); props.put("locale", "de_DE"); props.put("floatPrecision", "2"); props.put("doublePrecision", "4"); props.put("decimalPrecision", "5"); props.put("timeFormat", "hh:mm"); props.put("timestampFormat", "MM/dd/yy HH:mm"); props.put("dateFormat", "EEEE, d MMMM yy"); props.put("timezone", "Etc/GMT+2"); String query = "select date,time,timestamp,dec,double,float,tuple,udt from zeppelin.test_format;"; final InterpreterResult actual = interpreter.interpret(query, intrContext); props.remove("outputFormat"); props.remove("locale"); props.remove("floatPrecision"); props.remove("doublePrecision"); props.remove("decimalPrecision"); props.remove("timeFormat"); props.remove("timestampFormat"); props.remove("dateFormat"); props.remove("timezone"); // Then assertNotNull(actual); assertEquals(Code.SUCCESS, actual.code()); String expected = "date\ttime\ttimestamp\tdec\tdouble\tfloat\ttuple\tudt\n" + "Dienstag, 29 Januar 19\t04:05\t06/16/20 21:59\t123562352352,12346\t10,0153\t20,03\t" + "(1, text, 10)\t{id: 1, t: text, lst: [1, 2, 3]}\n"; assertEquals(expected, actual.message().get(0).getData()); }
@PublicAPI(usage = ACCESS) public String getPropertyOrDefault(String propertyName, String defaultValue) { return properties.getProperty(propertyName, defaultValue); }
@Test public void returns_property_if_property_is_set() { writeProperties("configured.property", "explicitlySet"); ArchConfiguration configuration = testConfiguration(PROPERTIES_FILE_NAME); assertThat(configuration.getPropertyOrDefault("configured.property", "default")).isEqualTo("explicitlySet"); }
public IMetaStoreClient get(final HiveConf hiveConf) throws MetaException, IOException, LoginException { final HiveClientCacheKey cacheKey = HiveClientCacheKey.fromHiveConf(hiveConf, getThreadId()); ICacheableMetaStoreClient cacheableHiveMetaStoreClient = null; // the hmsc is not shared across threads. So the only way it could get closed while we are doing healthcheck // is if removalListener closes it. The synchronization takes care that removalListener won't do it synchronized (CACHE_TEARDOWN_LOCK) { cacheableHiveMetaStoreClient = getOrCreate(cacheKey); cacheableHiveMetaStoreClient.acquire(); } if (!cacheableHiveMetaStoreClient.isOpen()) { synchronized (CACHE_TEARDOWN_LOCK) { hiveCache.invalidate(cacheKey); cacheableHiveMetaStoreClient.close(); cacheableHiveMetaStoreClient = getOrCreate(cacheKey); cacheableHiveMetaStoreClient.acquire(); } } return cacheableHiveMetaStoreClient; }
@Test public void testCacheHit() throws IOException, MetaException, LoginException { HiveClientCache cache = new HiveClientCache(1000); HiveClientCache.ICacheableMetaStoreClient client = (HiveClientCache.ICacheableMetaStoreClient) cache.get(hiveConf); assertNotNull(client); client.close(); // close shouldn't matter // Setting a non important configuration should return the same client only hiveConf.setIntVar(HiveConf.ConfVars.DYNAMIC_PARTITION_MAX_PARTS, 10); HiveClientCache.ICacheableMetaStoreClient client2 = (HiveClientCache.ICacheableMetaStoreClient) cache.get(hiveConf); assertNotNull(client2); assertSame(client, client2); assertEquals(client.getUsers(), client2.getUsers()); client2.close(); }
public static boolean isCompositeType(LogicalType logicalType) { if (logicalType instanceof DistinctType) { return isCompositeType(((DistinctType) logicalType).getSourceType()); } LogicalTypeRoot typeRoot = logicalType.getTypeRoot(); return typeRoot == STRUCTURED_TYPE || typeRoot == ROW; }
@Test void testIsCompositeTypeRowType() { DataType dataType = ROW(FIELD("f0", INT()), FIELD("f1", STRING())); assertThat(LogicalTypeChecks.isCompositeType(dataType.getLogicalType())).isTrue(); }
public static RedissonClient create() { Config config = new Config(); config.useSingleServer() .setAddress("redis://127.0.0.1:6379"); return create(config); }
@Test public void testReplicatedConnectionFail() { Assertions.assertThrows(RedisConnectionException.class, () -> { Config config = new Config(); config.useReplicatedServers().addNodeAddress("redis://127.99.0.1:1111"); Redisson.create(config); Thread.sleep(1500); }); }
private static Set<String> getTableNames(DataSource dataSource) { if (dataSource == null) { return null; } else if (dataSource.getSubquery() != null) { return getTableNames(dataSource.getSubquery()); } else if (dataSource.isSetJoin()) { return ImmutableSet.<String>builder().addAll(getTableNames(dataSource.getJoin().getLeft())) .addAll(getTableNames(dataSource.getJoin().getLeft())).build(); } return ImmutableSet.of(dataSource.getTableName()); }
@Test(dataProvider = "queryProvider") public void testResolveTableNames(String query, Set<String> expectedSet) { SqlNodeAndOptions sqlNodeAndOptions = CalciteSqlParser.compileToSqlNodeAndOptions(query); Set<String> tableNames = RequestUtils.getTableNames(CalciteSqlParser.compileSqlNodeToPinotQuery(sqlNodeAndOptions.getSqlNode())); if (expectedSet == null) { assertNull(tableNames); } else { assertEquals(tableNames, expectedSet); } }
void validateFileLength(final Location location) throws IOException { final long recordEnd = location.getOffset() + location.getSize(); //Check if the end of the record will go past the file length if (recordEnd > dataFile.length) { /* * AMQ-9254 if the read request is outside expected dataFile length, * perform expensive OS file length lookup operation to allow read * operation if it will succeed */ final long osFileLength = dataFile.getFile().length(); if(recordEnd > osFileLength) { throw new IOException("Invalid location size: " + location + ", size: " + location.getSize()); } else { LOG.warn("DataFile:{} actual length:{} larger than expected:{} for readRecord location:{} size:{}", dataFile.file.getName(), osFileLength, dataFile.length, location, location.getSize()); } } }
@Test public void testValidLocation() throws IOException { //Create file of size 1024 final DataFileAccessor accessor = new DataFileAccessor(mock(Journal.class), newTestDataFile(1024)); //The read check will add the offset and location size and will be 612 //so should be fine as it's less than the set file size of 1024 final Location location = new Location(0, 100); location.setSize(512); accessor.validateFileLength(location); }
public static Task buildMvTask(MaterializedView materializedView, String dbName) { Task task = new Task(getMvTaskName(materializedView.getId())); task.setSource(Constants.TaskSource.MV); task.setDbName(dbName); Map<String, String> taskProperties = Maps.newHashMap(); taskProperties.put(PartitionBasedMvRefreshProcessor.MV_ID, String.valueOf(materializedView.getId())); taskProperties.putAll(materializedView.getProperties()); // In PropertyAnalyzer.analyzeMVProperties, it removed the warehouse property, because // it only keeps session started properties Warehouse warehouse = GlobalStateMgr.getCurrentState().getWarehouseMgr() .getWarehouse(materializedView.getWarehouseId()); taskProperties.put(PropertyAnalyzer.PROPERTIES_WAREHOUSE, warehouse.getName()); task.setProperties(taskProperties); task.setDefinition(materializedView.getTaskDefinition()); task.setPostRun(getAnalyzeMVStmt(materializedView.getName())); task.setExpireTime(0L); if (ConnectContext.get() != null) { task.setCreateUser(ConnectContext.get().getCurrentUserIdentity().getUser()); task.setUserIdentity(ConnectContext.get().getCurrentUserIdentity()); } handleSpecialTaskProperties(task); return task; }
@Test public void testTaskBuilderForMv() { // mock the warehouse of MaterializedView for creating task new MockUp<WarehouseManager>() { @Mock public Warehouse getWarehouse(long warehouseId) { return new DefaultWarehouse(WarehouseManager.DEFAULT_WAREHOUSE_ID, WarehouseManager.DEFAULT_WAREHOUSE_NAME); } @Mock public Warehouse getWarehouse(String warehouse) { return new DefaultWarehouse(WarehouseManager.DEFAULT_WAREHOUSE_ID, WarehouseManager.DEFAULT_WAREHOUSE_NAME); } }; MaterializedView mv = new MaterializedView(); mv.setName("aa.bb.cc"); mv.setViewDefineSql("select * from table1"); mv.setTableProperty(new TableProperty()); Task task = TaskBuilder.buildMvTask(mv, "test"); Assert.assertEquals("insert overwrite `aa.bb.cc` select * from table1", task.getDefinition()); }
@VisibleForTesting public void generateStateDiagram(Path outputFile) throws IOException { try (OutputStream outFile = Files.newOutputStream(outputFile, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) { mStateMachine.configuration().generateDotFileInto(outFile, /* printLabels */ true); } }
@Test public void testGenerateStateDiagram() throws IOException { EmbeddedChannel embeddedChannel = new EmbeddedChannel(); NettyReadHandlerStateMachine<ReadRequest> stateMachine = new NettyReadHandlerStateMachine<>( embeddedChannel, ReadRequest.class, (readRequest) -> null); stateMachine.generateStateDiagram(new File("output.dot").toPath()); }
void appendInsertClause(StringBuilder sb) { sb.append("INSERT INTO "); dialect.quoteIdentifier(sb, jdbcTable.getExternalNameList()); sb.append(' '); appendFieldNames(sb, jdbcTable.dbFieldNames()); }
@Test void appendInsertClause() { MySQLUpsertQueryBuilder builder = new MySQLUpsertQueryBuilder(jdbcTable, dialect); StringBuilder sb = new StringBuilder(); builder.appendInsertClause(sb); String insertClause = sb.toString(); assertThat(insertClause).isEqualTo("INSERT INTO `table1` (`field1`,`field2`)"); }
@PostMapping(value = "/switch/lookup") @Secured(resource = Commons.NACOS_CORE_CONTEXT + "/cluster", action = ActionTypes.WRITE, signType = SignType.CONSOLE) public RestResult<String> switchLookup(@RequestParam(name = "type") String type) { try { memberManager.switchLookup(type); return RestResultUtils.success(); } catch (Throwable ex) { return RestResultUtils.failed(ex.getMessage()); } }
@Test void testSwitchLookup() { RestResult<String> result = nacosClusterController.switchLookup("test"); assertTrue(result.ok()); }
public boolean isSplittable(ConfigResource.Type type, String key) { ConfigDef configDef = configDefs.get(type); if (configDef == null) return false; ConfigDef.ConfigKey configKey = configDef.configKeys().get(key); if (configKey == null) return false; return configKey.type == ConfigDef.Type.LIST; }
@Test public void testIsSplittable() { assertTrue(SCHEMA.isSplittable(BROKER, "foo.bar")); assertFalse(SCHEMA.isSplittable(BROKER, "baz")); assertFalse(SCHEMA.isSplittable(BROKER, "foo.baz.quux")); assertFalse(SCHEMA.isSplittable(TOPIC, "baz")); assertTrue(SCHEMA.isSplittable(TOPIC, "abc")); }
public static boolean isInVpc(InstanceInfo instanceInfo) { if (instanceInfo.getDataCenterInfo() instanceof AmazonInfo) { AmazonInfo info = (AmazonInfo) instanceInfo.getDataCenterInfo(); String vpcId = info.get(AmazonInfo.MetaDataKey.vpcId); return !isNullOrEmpty(vpcId); } return false; }
@Test public void testIsInVpc() { InstanceInfo instanceInfo1 = new InstanceInfo.Builder(InstanceInfoGenerator.takeOne()) .setDataCenterInfo(new DataCenterInfo() { @Override public Name getName() { return Name.MyOwn; } }) .build(); Assert.assertFalse(EurekaUtils.isInVpc(instanceInfo1)); InstanceInfo instanceInfo2 = InstanceInfoGenerator.takeOne(); Assert.assertFalse(EurekaUtils.isInVpc(instanceInfo2)); InstanceInfo instanceInfo3 = InstanceInfoGenerator.takeOne(); ((AmazonInfo) instanceInfo3.getDataCenterInfo()).getMetadata() .put(AmazonInfo.MetaDataKey.vpcId.getName(), "vpc-123456"); Assert.assertTrue(EurekaUtils.isInVpc(instanceInfo3)); }
@Benchmark @Threads(16) // Use several threads since we expect contention during bundle processing. public void testLargeBundle(TrivialTransform trivialTransform) throws Exception { Map<String, ? super Coder<WindowedValue<?>>> remoteOutputCoders = trivialTransform.descriptor.getRemoteOutputCoders(); Map<String, RemoteOutputReceiver<?>> outputReceivers = new HashMap<>(); AtomicInteger outputValuesCount = new AtomicInteger(); for (Entry<String, ? super Coder<WindowedValue<?>>> remoteOutputCoder : remoteOutputCoders.entrySet()) { outputReceivers.put( remoteOutputCoder.getKey(), RemoteOutputReceiver.of( (Coder) remoteOutputCoder.getValue(), (FnDataReceiver<? super WindowedValue<?>>) (WindowedValue<?> value) -> outputValuesCount.incrementAndGet())); } try (RemoteBundle bundle = trivialTransform.processor.newBundle(outputReceivers, BundleProgressHandler.ignored())) { for (int i = 0; i < 1_000; i++) { Iterables.getOnlyElement(bundle.getInputReceivers().values()) .accept(valueInGlobalWindow(new byte[0])); } } assertEquals(3_000, outputValuesCount.getAndSet(0)); }
@Test public void testLargeBundle() throws Exception { TrivialTransform transform = new TrivialTransform(); transform.elementsEmbedding = elementsEmbedding; new ProcessBundleBenchmark().testLargeBundle(transform); transform.tearDown(); }
protected boolean isClusterVersionEqualTo(Version version) { Version clusterVersion = getNodeEngine().getClusterService().getClusterVersion(); return clusterVersion.isEqualTo(version); }
@Test public void testClusterVersion_isEqualTo_currentVersion() { assertTrue(object.isClusterVersionEqualTo(CURRENT_CLUSTER_VERSION)); }
@Override public Optional<ComputationConfig> fetchConfig(String computationId) { Preconditions.checkArgument( !computationId.isEmpty(), "computationId is empty. Cannot fetch computation config without a computationId."); return fetchConfigWithRetry( () -> dataflowServiceClient.getStreamingConfigWorkItem(computationId)) .flatMap(StreamingEngineComputationConfigFetcher::createComputationConfig); }
@Test public void testGetComputationConfig_fetchConfigFromDataflowError() throws IOException { streamingEngineConfigFetcher = createConfigFetcher(/* waitForInitialConfig= */ false, 0, ignored -> {}); RuntimeException e = new RuntimeException("something bad happened."); when(mockDataflowServiceClient.getStreamingConfigWorkItem(anyString())).thenThrow(e); Throwable fetchConfigError = assertThrows( RuntimeException.class, () -> streamingEngineConfigFetcher.fetchConfig("someComputationId")); assertThat(fetchConfigError).isSameInstanceAs(e); }
@Override public String getFileName() { return fileName; }
@Test void getFileName() { PMMLRuntimeContextImpl retrieved = new PMMLRuntimeContextImpl(new PMMLRequestData(), fileName, memoryCompilerClassLoader); assertThat(retrieved.getFileName()).isEqualTo(fileName); }
public static BrokerConfig getBrokerConfig() { if (brokerConfig == null) { throw new IllegalArgumentException("brokerConfig Cannot be null !"); } return brokerConfig; }
@Test(expected = IllegalArgumentException.class) public void getBrokerConfig_NullConfiguration_ThrowsException() { BrokerConfigSingleton.getBrokerConfig(); }
public static Pagination pageStartingAt(Integer offset, Integer total, Integer pageSize) { return new Pagination(offset, total, pageSize); }
@Test public void shouldCreatePaginationProvidingNull() { try { Pagination.pageStartingAt(0, 1000, null); } catch (Exception e) { fail(); } }
public int doWork() { final long nowNs = nanoClock.nanoTime(); trackTime(nowNs); int workCount = 0; workCount += processTimers(nowNs); if (!asyncClientCommandInFlight) { workCount += clientCommandAdapter.receive(); } workCount += drainCommandQueue(); workCount += trackStreamPositions(workCount, nowNs); workCount += nameResolver.doWork(cachedEpochClock.time()); workCount += freeEndOfLifeResources(ctx.resourceFreeLimit()); return workCount; }
@Test void shouldIncrementCounterOnConductorThresholdExceeded() { final AtomicCounter maxCycleTime = spySystemCounters.get(CONDUCTOR_MAX_CYCLE_TIME); final AtomicCounter thresholdExceeded = spySystemCounters.get(CONDUCTOR_CYCLE_TIME_THRESHOLD_EXCEEDED); driverConductor.doWork(); nanoClock.advance(MILLISECONDS.toNanos(750)); driverConductor.doWork(); nanoClock.advance(MILLISECONDS.toNanos(1000)); driverConductor.doWork(); nanoClock.advance(MILLISECONDS.toNanos(500)); driverConductor.doWork(); nanoClock.advance(MILLISECONDS.toNanos(600)); driverConductor.doWork(); nanoClock.advance(MILLISECONDS.toNanos(601)); driverConductor.doWork(); assertEquals(SECONDS.toNanos(1), maxCycleTime.get()); assertEquals(3, thresholdExceeded.get()); }
@Override public String getString(int rowIndex, int columnIndex) { JsonNode jsonValue = _rowsArray.get(rowIndex).get(columnIndex); if (jsonValue.isTextual()) { return jsonValue.textValue(); } else { return jsonValue.toString(); } }
@Test public void testGetString() { // Run the test final String result = _resultTableResultSetUnderTest.getString(0, 0); // Verify the results assertEquals("r1c1", result); }
@Override public int compareTo(DateTimeStamp dateTimeStamp) { return comparator.compare(this,dateTimeStamp); }
@Test void testCompareGreaterThan() { DateTimeStamp smaller = new DateTimeStamp("2018-04-04T09:10:00.586-0100"); DateTimeStamp greater = new DateTimeStamp("2018-04-04T10:10:00.587-0100"); assertEquals(1, greater.compareTo(smaller)); }
@Override public boolean offer(E e) { if (size() == capacity) { return false; } return super.offer(e); }
@Test public void test() { int capacity = 16; BoundPriorityQueue<Integer> queue = new BoundPriorityQueue<>(capacity); for (int k = 0; k < capacity; k++) { queue.add(k); } boolean result = queue.offer(0); assertFalse(result); }
public Path getLocalPathForWrite(String pathStr, Configuration conf) throws IOException { return getLocalPathForWrite(pathStr, SIZE_UNKNOWN, conf); }
@Test(timeout = 30000) public void testGetLocalPathForWriteForLessSpace() throws Exception { String dir0 = buildBufferDir(ROOT, 0); String dir1 = buildBufferDir(ROOT, 1); conf.set(CONTEXT, dir0 + "," + dir1); LambdaTestUtils.intercept(DiskErrorException.class, String.format("Could not find any valid local directory for %s with requested size %s", "p1/x", Long.MAX_VALUE - 1), "Expect a DiskErrorException.", () -> dirAllocator.getLocalPathForWrite("p1/x", Long.MAX_VALUE - 1, conf)); }
public void putMap(Map<String, String> map) { if (map == null) { putNumber1(0); } else { Utils.checkArgument(map.size() < 256, "Map has to be smaller than 256 elements"); putNumber1(map.size()); for (Entry<String, String> entry : map.entrySet()) { if (entry.getKey().contains("=")) { throw new IllegalArgumentException("Keys cannot contain '=' sign. " + entry); } if (entry.getValue().contains("=")) { throw new IllegalArgumentException("Values cannot contain '=' sign. " + entry); } String val = entry.getKey() + "=" + entry.getValue(); putString(val); } } }
@Test(expected = IllegalArgumentException.class) public void testMapIncorrectKey() { ZFrame frame = new ZFrame(new byte[(1 + 10)]); ZNeedle needle = new ZNeedle(frame); Map<String, String> map = new HashMap<>(); map.put("ke=", "value"); needle.putMap(map); }
@Override public SpanCustomizer annotate(String value) { return tracer.currentSpanCustomizer().annotate(value); }
@Test void annotate() { span.start(); try (SpanInScope scope = tracing.tracer().withSpanInScope(span)) { spanCustomizer.annotate("foo"); } span.flush(); assertThat(spans.get(0).annotations()) .extracting(Map.Entry::getValue) .containsExactly("foo"); }
@VisibleForTesting // TODO(aksingh737,jzacsh) stop exposing this to unit tests public long importPhotos(Collection<PhotoModel> photos, GPhotosUpload gPhotosUpload) throws Exception { return gPhotosUpload.uploadItemsViaBatching(photos, this::importPhotoBatch); }
@Test public void importPhotoCreatePhotosOtherException() throws Exception { PhotoModel photoModel = new PhotoModel( PHOTO_TITLE, IMG_URI, PHOTO_DESCRIPTION, JPEG_MEDIA_TYPE, "oldPhotoID1", OLD_ALBUM_ID, true); Mockito.when(googlePhotosInterface.uploadMediaContent(any(), eq(null))) .thenReturn("token1", "token2"); JobStore jobStore = Mockito.mock(LocalJobStore.class); Mockito.when(jobStore.getStream(any(), any())) .thenReturn( new TemporaryPerJobDataStore.InputStreamWrapper( new ByteArrayInputStream("TestingBytes".getBytes()))); googlePhotosImporter = new GooglePhotosImporter( null, jobStore, null, null, googlePhotosInterface, connectionProvider, monitor, 1.0); Mockito.when(googlePhotosInterface.createPhotos(any(NewMediaItemUpload.class))) .thenThrow(new IOException("Some other exception")); GoogleAlbum responseAlbum = new GoogleAlbum(); Mockito.when(googlePhotosInterface.getAlbum(any())).thenReturn(responseAlbum); assertThrows(IOException.class, () -> googlePhotosImporter.importPhotos(Lists.newArrayList(photoModel), new GPhotosUpload(uuid, executor, Mockito.mock(TokensAndUrlAuthData.class)))); }
public static BaggageField create(String name) { return new BaggageField(name, ExtraBaggageContext.get()); }
@Test void internalStorage() { assertThat(BaggageField.create("foo").context) .isSameAs(ExtraBaggageContext.get()); BaggageContext context = mock(BaggageContext.class); assertThat(new BaggageField("context", context).context) .isSameAs(context); }
@Override public void execute(GraphModel graphModel) { isCanceled = false; Graph graph; if (isDirected) { graph = graphModel.getDirectedGraphVisible(); } else { graph = graphModel.getUndirectedGraphVisible(); } execute(graph); }
@Test public void testColumnReplace() { GraphModel graphModel = GraphGenerator.generateNullUndirectedGraph(1); graphModel.getNodeTable().addColumn(EigenvectorCentrality.EIGENVECTOR, String.class); EigenvectorCentrality ec = new EigenvectorCentrality(); ec.execute(graphModel); }
@Override public Object[] toArray() { return toArray(new Object[size]); }
@Test public void testToGenericArray() { final OAHashSet<Integer> set = new OAHashSet<>(8); populateSet(set, 10); final Integer[] setElementsProvided = new Integer[10]; final Integer[] setElementsReturned = set.toArray(setElementsProvided); assertSame(setElementsProvided, setElementsReturned); final BitSet foundElements = new BitSet(10); for (Integer foundElement : setElementsProvided) { foundElements.set(foundElement); } for (int i = 0; i < 10; i++) { assertTrue(foundElements.get(i)); } }
Plugin create(Options.Plugin plugin) { try { return instantiate(plugin.pluginString(), plugin.pluginClass(), plugin.argument()); } catch (IOException | URISyntaxException e) { throw new CucumberException(e); } }
@Test void cant_create_plugin_when_parent_directory_is_a_file() throws IOException { Path htmlReport = tmp.resolve("target/cucumber/reports"); PluginOption htmlOption = parse("html:" + htmlReport); plugin = fc.create(htmlOption); Path jsonReport = tmp.resolve("target/cucumber/reports/cucumber.json"); PluginOption jsonOption = parse("json:" + jsonReport); IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> fc.create(jsonOption)); assertThat(exception.getMessage(), is(equalTo( "Couldn't create parent directories of '" + jsonReport.toFile().getCanonicalPath() + "'.\n" + "Make sure the the parent directory '" + jsonReport.getParent().toFile().getCanonicalPath() + "' isn't a file.\n" + "\n" + "Note: This usually happens when plugins write to colliding paths.\n" + "For example: 'html:target/cucumber, json:target/cucumber/report.json'\n" + "You can fix this by making the paths do no collide.\n" + "For example: 'html:target/cucumber/report.html, json:target/cucumber/report.json'\n" + "The details are in the stack trace below:"))); }
@Override public boolean assign(final Map<ProcessId, ClientState> clients, final Set<TaskId> allTaskIds, final Set<TaskId> statefulTaskIds, final AssignmentConfigs configs) { final int numStandbyReplicas = configs.numStandbyReplicas(); final Set<String> rackAwareAssignmentTags = new HashSet<>(tagsFunction.apply(configs)); final Map<TaskId, Integer> tasksToRemainingStandbys = computeTasksToRemainingStandbys( numStandbyReplicas, statefulTaskIds ); final Map<String, Set<String>> tagKeyToValues = new HashMap<>(); final Map<TagEntry, Set<ProcessId>> tagEntryToClients = new HashMap<>(); fillClientsTagStatistics(clients, tagEntryToClients, tagKeyToValues); final ConstrainedPrioritySet standbyTaskClientsByTaskLoad = createLeastLoadedPrioritySetConstrainedByAssignedTask(clients); final Map<TaskId, ProcessId> pendingStandbyTasksToClientId = new HashMap<>(); for (final TaskId statefulTaskId : statefulTaskIds) { for (final Map.Entry<ProcessId, ClientState> entry : clients.entrySet()) { final ProcessId clientId = entry.getKey(); final ClientState clientState = entry.getValue(); if (clientState.activeTasks().contains(statefulTaskId)) { assignStandbyTasksToClientsWithDifferentTags( numStandbyReplicas, standbyTaskClientsByTaskLoad, statefulTaskId, clientId, rackAwareAssignmentTags, clients, tasksToRemainingStandbys, tagKeyToValues, tagEntryToClients, pendingStandbyTasksToClientId ); } } } if (!tasksToRemainingStandbys.isEmpty()) { assignPendingStandbyTasksToLeastLoadedClients(clients, numStandbyReplicas, standbyTaskClientsByTaskLoad, tasksToRemainingStandbys); } // returning false, because standby task assignment will never require a follow-up probing rebalance. return false; }
@Test public void shouldDistributeStandbyTasksUsingFunctionAndSupplierTags() { final Map<ProcessId, String> racksForProcess = mkMap( mkEntry(PID_1, "rack1"), mkEntry(PID_2, "rack2"), mkEntry(PID_3, "rack3"), mkEntry(PID_4, "rack1"), mkEntry(PID_5, "rack2"), mkEntry(PID_6, "rack3"), mkEntry(PID_7, "rack1"), mkEntry(PID_8, "rack2"), mkEntry(PID_9, "rack3") ); final RackAwareTaskAssignor rackAwareTaskAssignor = mock(RackAwareTaskAssignor.class); when(rackAwareTaskAssignor.validClientRack()).thenReturn(true); when(rackAwareTaskAssignor.racksForProcess()).thenReturn(racksForProcess); final AssignmentConfigs assignmentConfigs = newAssignmentConfigs(2); standbyTaskAssignor = StandbyTaskAssignorFactory.create(assignmentConfigs, rackAwareTaskAssignor); verify(rackAwareTaskAssignor, times(1)).racksForProcess(); final Map<ProcessId, ClientState> clientStates = mkMap( mkEntry(PID_1, createClientStateWithCapacity(PID_1, 2, mkMap(), TASK_0_0, TASK_1_0)), mkEntry(PID_2, createClientStateWithCapacity(PID_2, 2, mkMap(), TASK_0_1, TASK_1_1)), mkEntry(PID_3, createClientStateWithCapacity(PID_3, 2, mkMap(), TASK_0_2, TASK_1_2)), mkEntry(PID_4, createClientStateWithCapacity(PID_4, 2, mkMap())), mkEntry(PID_5, createClientStateWithCapacity(PID_5, 2, mkMap())), mkEntry(PID_6, createClientStateWithCapacity(PID_6, 2, mkMap())), mkEntry(PID_7, createClientStateWithCapacity(PID_7, 2, mkMap())), mkEntry(PID_8, createClientStateWithCapacity(PID_8, 2, mkMap())), mkEntry(PID_9, createClientStateWithCapacity(PID_9, 2, mkMap())) ); final Map<ProcessId, ClientState> clientStatesWithTags = mkMap( mkEntry(PID_1, createClientStateWithCapacity(PID_1, 2, mkMap(mkEntry(ZONE_TAG, ZONE_1)), TASK_0_0, TASK_1_0)), mkEntry(PID_2, createClientStateWithCapacity(PID_2, 2, mkMap(mkEntry(ZONE_TAG, ZONE_2)), TASK_0_1, TASK_1_1)), mkEntry(PID_3, createClientStateWithCapacity(PID_3, 2, mkMap(mkEntry(ZONE_TAG, ZONE_3)), TASK_0_2, TASK_1_2)), mkEntry(PID_4, createClientStateWithCapacity(PID_4, 2, mkMap(mkEntry(ZONE_TAG, ZONE_1)))), mkEntry(PID_5, createClientStateWithCapacity(PID_5, 2, mkMap(mkEntry(ZONE_TAG, ZONE_2)))), mkEntry(PID_6, createClientStateWithCapacity(PID_6, 2, mkMap(mkEntry(ZONE_TAG, ZONE_3)))), mkEntry(PID_7, createClientStateWithCapacity(PID_7, 2, mkMap(mkEntry(ZONE_TAG, ZONE_1)))), mkEntry(PID_8, createClientStateWithCapacity(PID_8, 2, mkMap(mkEntry(ZONE_TAG, ZONE_2)))), mkEntry(PID_9, createClientStateWithCapacity(PID_9, 2, mkMap(mkEntry(ZONE_TAG, ZONE_3)))) ); final Set<TaskId> allActiveTasks = findAllActiveTasks(clientStates); standbyTaskAssignor.assign(clientStates, allActiveTasks, allActiveTasks, assignmentConfigs); final AssignmentConfigs assignmentConfigsWithTags = newAssignmentConfigs(2, ZONE_TAG); standbyTaskAssignor = new ClientTagAwareStandbyTaskAssignor(); standbyTaskAssignor.assign(clientStatesWithTags, allActiveTasks, allActiveTasks, assignmentConfigsWithTags); Stream.of(clientStates, clientStatesWithTags).forEach( cs -> { assertTrue(cs.values().stream().allMatch(ClientState::reachedCapacity)); Stream.of(PID_1, PID_2, PID_3) .forEach(client -> assertStandbyTaskCountForClientEqualsTo(cs, client, 0)); Stream.of(PID_4, PID_5, PID_6, PID_7, PID_8, PID_9) .forEach(client -> assertStandbyTaskCountForClientEqualsTo(cs, client, 2)); assertTotalNumberOfStandbyTasksEqualsTo(cs, 12); assertTrue( containsStandbyTasks( TASK_0_0, cs, mkSet(PID_2, PID_3, PID_5, PID_6, PID_8, PID_9) ) ); assertTrue( containsStandbyTasks( TASK_1_0, cs, mkSet(PID_2, PID_3, PID_5, PID_6, PID_8, PID_9) ) ); assertTrue( containsStandbyTasks( TASK_0_1, cs, mkSet(PID_1, PID_3, PID_4, PID_6, PID_7, PID_9) ) ); assertTrue( containsStandbyTasks( TASK_1_1, cs, mkSet(PID_1, PID_3, PID_4, PID_6, PID_7, PID_9) ) ); assertTrue( containsStandbyTasks( TASK_0_2, cs, mkSet(PID_1, PID_2, PID_4, PID_5, PID_7, PID_8) ) ); assertTrue( containsStandbyTasks( TASK_1_2, cs, mkSet(PID_1, PID_2, PID_4, PID_5, PID_7, PID_8) ) ); } ); }
@Override public void setTransactionIsolation(final int level) throws SQLException { transactionIsolation = level; databaseConnectionManager.setTransactionIsolation(level); }
@Test void assertSetTransactionIsolation() throws SQLException { try (ShardingSphereConnection connection = new ShardingSphereConnection(DefaultDatabase.LOGIC_NAME, mockContextManager())) { Connection physicalConnection = connection.getDatabaseConnectionManager().getConnections(DefaultDatabase.LOGIC_NAME, "ds", 0, 1, ConnectionMode.MEMORY_STRICTLY).get(0); connection.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE); verify(physicalConnection).setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE); } }
@Override public PollResult poll(long currentTimeMs) { return pollInternal( prepareFetchRequests(), this::handleFetchSuccess, this::handleFetchFailure ); }
@Test public void testFetchWithTopicId() { buildFetcher(); TopicIdPartition tp = new TopicIdPartition(topicId, new TopicPartition(topicName, 0)); assignFromUser(singleton(tp.topicPartition())); subscriptions.seek(tp.topicPartition(), 0); assertEquals(1, sendFetches()); assertFalse(fetcher.hasCompletedFetches()); // Fetch should use latest version client.prepareResponse( fetchRequestMatcher(ApiKeys.FETCH.latestVersion(), tp, 0, Optional.of(validLeaderEpoch)), fullFetchResponse(tp, records, Errors.NONE, 100L, 0) ); networkClientDelegate.poll(time.timer(0)); assertTrue(fetcher.hasCompletedFetches()); Map<TopicPartition, List<ConsumerRecord<byte[], byte[]>>> partitionRecords = fetchRecords(); assertTrue(partitionRecords.containsKey(tp.topicPartition())); List<ConsumerRecord<byte[], byte[]>> records = partitionRecords.get(tp.topicPartition()); assertEquals(3, records.size()); assertEquals(4L, subscriptions.position(tp.topicPartition()).offset); // this is the next fetching position long offset = 1; for (ConsumerRecord<byte[], byte[]> record : records) { assertEquals(offset, record.offset()); offset += 1; } }
public Capabilities getCapabilities(String pluginId) { return pluginRequestHelper.submitRequest(pluginId, REQUEST_GET_CAPABILITIES, new DefaultPluginInteractionCallback<>() { @Override public Capabilities onSuccess(String responseBody, Map<String, String> responseHeaders, String resolvedExtensionVersion) { return getMessageConverter(resolvedExtensionVersion).getCapabilitiesFromResponseBody(responseBody); } }); }
@Test void shouldTalkToPlugin_To_GetCapabilities() { String responseBody = "{\"supported_auth_type\":\"password\",\"can_search\":true}"; when(pluginManager.submitTo(eq(PLUGIN_ID), eq(AUTHORIZATION_EXTENSION), requestArgumentCaptor.capture())).thenReturn(new DefaultGoPluginApiResponse(SUCCESS_RESPONSE_CODE, responseBody)); com.thoughtworks.go.plugin.domain.authorization.Capabilities capabilities = authorizationExtension.getCapabilities(PLUGIN_ID); assertRequest(requestArgumentCaptor.getValue(), AUTHORIZATION_EXTENSION, "2.0", REQUEST_GET_CAPABILITIES, null); assertThat(capabilities.getSupportedAuthType().toString()).isEqualTo(SupportedAuthType.Password.toString()); assertThat(capabilities.canSearch()).isEqualTo(true); }
@Override public int hashCode() { long xor = mostSignificantBits ^ leastSignificantBits; return (int) (xor >> 32) ^ (int) xor; }
@Test public void testHashCode() { Uuid id1 = new Uuid(16L, 7L); Uuid id2 = new Uuid(1043L, 20075L); Uuid id3 = new Uuid(104312423523523L, 200732425676585L); assertEquals(23, id1.hashCode()); assertEquals(19064, id2.hashCode()); assertEquals(-2011255899, id3.hashCode()); }
public static byte[] deriveEnc(byte[] seed, byte[] nonce) { final MessageDigest md = DigestUtils.digest("SHA-256"); md.update(seed); if (nonce != null) md.update(nonce); md.update(new byte[] {0, 0, 0, 1}); return Arrays.copyOfRange(md.digest(), 0, 32); }
@Test public void shouldDeriveEncryptionKey() { assertEquals( "SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS", ByteArrayUtils.prettyHex(AESSecureMessaging.deriveEnc(Hex.decode("CA"), null)) ); }
public static Number128 hash128(byte[] data) { return hash128(data, 1337); }
@Test @Disabled public void bulkHashing128Test() { String[] strArray = getRandomStringArray(); long startCity = System.currentTimeMillis(); for (String s : strArray) { CityHash.hash128(s.getBytes()); } long endCity = System.currentTimeMillis(); long startMetro = System.currentTimeMillis(); for (String s : strArray) { MetroHash.hash128(StrUtil.utf8Bytes(s)); } long endMetro = System.currentTimeMillis(); System.out.println("metroHash =============" + (endMetro - startMetro)); System.out.println("cityHash =============" + (endCity - startCity)); }
private ScalarOperator reduceDateToDatetimeCast(BinaryPredicateOperator operator) { ScalarOperator castChild = operator.getChild(0).getChild(0); ConstantOperator child2 = (ConstantOperator) operator.getChild(1); if (child2.isNull()) { return operator; } LocalDateTime originalDateTime = child2.getDatetime(); LocalDateTime bottomDateTime = child2.getDatetime().truncatedTo(ChronoUnit.DAYS); LocalDateTime targetDateTime; BinaryType binaryType = operator.getBinaryType(); int offset; BinaryPredicateOperator resultBinaryPredicateOperator; ConstantOperator newDate; switch (binaryType) { case GE: // when the BinaryType is >= ,cast dateTime to minimum date type; // Eg:cast dateTime(2021-12-28 00:00:00.0) to date(2021-12-28) // Eg:cast dateTime(2021-12-28 00:00:00.1) to date(2021-12-29) offset = originalDateTime.isEqual(bottomDateTime) ? 0 : 1; targetDateTime = bottomDateTime.plusDays(offset); newDate = ConstantOperator.createDate(targetDateTime); resultBinaryPredicateOperator = BinaryPredicateOperator.ge(castChild, newDate); break; case GT: // when the BinaryType is > ,cast dateTime to minimum date type; // Eg:cast dateTime(2021-12-28 00:00:00.0) to date(2021-12-29) // Eg:cast dateTime(2021-12-28 00:00:00.1) to date(2021-12-29) offset = 1; targetDateTime = bottomDateTime.plusDays(offset); newDate = ConstantOperator.createDate(targetDateTime); resultBinaryPredicateOperator = BinaryPredicateOperator.ge(castChild, newDate); break; case LE: // when the BinaryType is <= ,cast dateTime to maximum date type; // Eg:cast dateTime(2021-12-28 00:00:00.0) to date(2021-12-28) // Eg:cast dateTime(2021-12-28 00:00:00.1) to date(2021-12-27) offset = 0; targetDateTime = bottomDateTime.plusDays(offset); newDate = ConstantOperator.createDate(targetDateTime); resultBinaryPredicateOperator = BinaryPredicateOperator.le(castChild, newDate); break; case LT: // when the BinaryType is < ,cast dateTime to maximum date type; // Eg:cast dateTime(2021-12-28 00:00:00.0) to date(2021-12-27) // Eg:cast dateTime(2021-12-28 00:00:00.1) to date(2021-12-28) offset = originalDateTime.isEqual(bottomDateTime) ? -1 : 0; targetDateTime = bottomDateTime.plusDays(offset); newDate = ConstantOperator.createDate(targetDateTime); resultBinaryPredicateOperator = BinaryPredicateOperator.le(castChild, newDate); break; case EQ: // when the BinaryType is = ,cast dateTime to equivalent date type; // Eg:cast dateTime(2021-12-28 00:00:00.0) to date(2021-12-28) if (!originalDateTime.isEqual(bottomDateTime)) { resultBinaryPredicateOperator = operator; } else { newDate = ConstantOperator.createDate(bottomDateTime); resultBinaryPredicateOperator = BinaryPredicateOperator.eq(castChild, newDate); } break; default: // current not support != resultBinaryPredicateOperator = operator; break; } return resultBinaryPredicateOperator; }
@Test public void testReduceDateToDatetimeCast() { DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); DateTimeFormatter dateFormat = DateTimeFormatter.ofPattern("yyyy-MM-dd"); ReduceCastRule reduceCastRule = new ReduceCastRule(); { CastOperator castOperator = new CastOperator(Type.DATETIME, new ColumnRefOperator(0, Type.DATE, "id_date", false)); ConstantOperator constantOperator = ConstantOperator.createDatetime(LocalDateTime.parse("2021-12-28 11:11:11", dateTimeFormatter)); BinaryPredicateOperator beforeOptimize = new BinaryPredicateOperator(BinaryType.GE, castOperator, constantOperator); ScalarOperator afterOptimize = reduceCastRule.apply( beforeOptimize, null); Assert.assertTrue(afterOptimize instanceof BinaryPredicateOperator); Assert.assertEquals(BinaryType.GE, ((BinaryPredicateOperator) afterOptimize).getBinaryType()); Assert.assertTrue(afterOptimize.getChild(0) instanceof ColumnRefOperator); Assert.assertTrue(afterOptimize.getChild(1) instanceof ConstantOperator); Assert.assertEquals("2021-12-29", ((ConstantOperator) afterOptimize.getChild(1)).getDate().format(dateFormat)); } { CastOperator castOperator = new CastOperator(Type.DATETIME, new ColumnRefOperator(0, Type.DATE, "id_date", false)); ConstantOperator constantOperator = ConstantOperator.createDatetime(LocalDateTime.parse("2021-12-28 00:00:00", dateTimeFormatter)); BinaryPredicateOperator beforeOptimize = new BinaryPredicateOperator(BinaryType.GE, castOperator, constantOperator); ScalarOperator afterOptimize = reduceCastRule.apply( beforeOptimize, null); Assert.assertTrue(afterOptimize instanceof BinaryPredicateOperator); Assert.assertEquals(BinaryType.GE, ((BinaryPredicateOperator) afterOptimize).getBinaryType()); Assert.assertTrue(afterOptimize.getChild(0) instanceof ColumnRefOperator); Assert.assertTrue(afterOptimize.getChild(1) instanceof ConstantOperator); Assert.assertEquals("2021-12-28", ((ConstantOperator) afterOptimize.getChild(1)).getDate().format(dateFormat)); } { CastOperator castOperator = new CastOperator(Type.DATETIME, new ColumnRefOperator(0, Type.DATE, "id_date", false)); ConstantOperator constantOperator = ConstantOperator.createDatetime(LocalDateTime.parse("2021-12-28 00:00:00", dateTimeFormatter)); BinaryPredicateOperator beforeOptimize = new BinaryPredicateOperator(BinaryType.LE, castOperator, constantOperator); ScalarOperator afterOptimize = reduceCastRule.apply( beforeOptimize, null); Assert.assertTrue(afterOptimize instanceof BinaryPredicateOperator); Assert.assertEquals(BinaryType.LE, ((BinaryPredicateOperator) afterOptimize).getBinaryType()); Assert.assertTrue(afterOptimize.getChild(0) instanceof ColumnRefOperator); Assert.assertTrue(afterOptimize.getChild(1) instanceof ConstantOperator); Assert.assertEquals("2021-12-28", ((ConstantOperator) afterOptimize.getChild(1)).getDate().format(dateFormat)); } { CastOperator castOperator = new CastOperator(Type.DATETIME, new ColumnRefOperator(0, Type.DATE, "id_date", false)); ConstantOperator constantOperator = ConstantOperator.createDatetime(LocalDateTime.parse("2021-12-28 11:11:11", dateTimeFormatter)); BinaryPredicateOperator beforeOptimize = new BinaryPredicateOperator(BinaryType.LT, castOperator, constantOperator); ScalarOperator afterOptimize = reduceCastRule.apply( beforeOptimize, null); Assert.assertTrue(afterOptimize instanceof BinaryPredicateOperator); Assert.assertEquals(BinaryType.LE, ((BinaryPredicateOperator) afterOptimize).getBinaryType()); Assert.assertTrue(afterOptimize.getChild(0) instanceof ColumnRefOperator); Assert.assertTrue(afterOptimize.getChild(1) instanceof ConstantOperator); Assert.assertEquals("2021-12-28", ((ConstantOperator) afterOptimize.getChild(1)).getDate().format(dateFormat)); } { CastOperator castOperator = new CastOperator(Type.DATETIME, new ColumnRefOperator(0, Type.DATE, "id_date", false)); ConstantOperator constantOperator = ConstantOperator.createDatetime(LocalDateTime.parse("2021-12-28 00:00:00", dateTimeFormatter)); BinaryPredicateOperator beforeOptimize = new BinaryPredicateOperator(BinaryType.LT, castOperator, constantOperator); ScalarOperator afterOptimize = reduceCastRule.apply( beforeOptimize, null); Assert.assertTrue(afterOptimize instanceof BinaryPredicateOperator); Assert.assertEquals(BinaryType.LE, ((BinaryPredicateOperator) afterOptimize).getBinaryType()); Assert.assertTrue(afterOptimize.getChild(0) instanceof ColumnRefOperator); Assert.assertTrue(afterOptimize.getChild(1) instanceof ConstantOperator); Assert.assertEquals("2021-12-27", ((ConstantOperator) afterOptimize.getChild(1)).getDate().format(dateFormat)); } { CastOperator castOperator = new CastOperator(Type.DATETIME, new ColumnRefOperator(0, Type.DATE, "id_date", false)); ConstantOperator constantOperator = ConstantOperator.createDatetime(LocalDateTime.parse("2021-12-28 00:00:00", dateTimeFormatter)); BinaryPredicateOperator beforeOptimize = new BinaryPredicateOperator(BinaryType.EQ, castOperator, constantOperator); ScalarOperator afterOptimize = reduceCastRule.apply( beforeOptimize, null); Assert.assertTrue(afterOptimize instanceof BinaryPredicateOperator); Assert.assertEquals(BinaryType.EQ, ((BinaryPredicateOperator) afterOptimize).getBinaryType()); Assert.assertTrue(afterOptimize.getChild(0) instanceof ColumnRefOperator); Assert.assertTrue(afterOptimize.getChild(1) instanceof ConstantOperator); Assert.assertEquals("2021-12-28", ((ConstantOperator) afterOptimize.getChild(1)).getDate().format(dateFormat)); } { CastOperator castOperator = new CastOperator(Type.DATETIME, new ColumnRefOperator(0, Type.DATE, "id_date", false)); ConstantOperator constantOperator = ConstantOperator.createDatetime(LocalDateTime.parse("2021-12-29 11:11:11", dateTimeFormatter)); BinaryPredicateOperator beforeOptimize = new BinaryPredicateOperator(BinaryType.EQ, castOperator, constantOperator); ScalarOperator afterOptimize = reduceCastRule.apply( beforeOptimize, null); Assert.assertSame(beforeOptimize, afterOptimize); } { CastOperator castOperator = new CastOperator(Type.DATETIME, new ColumnRefOperator(0, Type.DATE, "id_date", false)); ConstantOperator constantOperator = ConstantOperator.createNull(Type.DATETIME); BinaryPredicateOperator beforeOptimize = BinaryPredicateOperator.ge(castOperator, constantOperator); ScalarOperator afterOptimize = reduceCastRule.apply( beforeOptimize, null); Assert.assertSame(beforeOptimize, afterOptimize); } }
public static String prettyTime(LocalDateTime date, Locale locale) { if (date == null) { return null; } String keyPrefix = locale.getLanguage().equals("zh") ? "zh" : "us"; long diff = Duration.between(date, LocalDateTime.now()).toMillis() / 1000; int amount; /** * Second counts * 3600: hour * 86400: day * 604800: week * 2592000: month * 31536000: year */ if (diff >= 31536000) { amount = (int) (diff / 31536000); if (amount == 1) { return PRETTY_TIME_I18N.get(keyPrefix + "_LAST_YEAR"); } return amount + PRETTY_TIME_I18N.get(keyPrefix + "_YEARS"); } else if (diff >= 2592000) { amount = (int) (diff / 2592000); if (amount == 1) { return PRETTY_TIME_I18N.get(keyPrefix + "_LAST_MONTH"); } return amount + PRETTY_TIME_I18N.get(keyPrefix + "_MONTHS"); } else if (diff >= 604800) { amount = (int) (diff / 604800); if (amount == 1) { return PRETTY_TIME_I18N.get(keyPrefix + "_LAST_WEEK"); } return amount + PRETTY_TIME_I18N.get(keyPrefix + "_WEEKS"); } else if (diff >= 86400) { amount = (int) (diff / 86400); if (amount == 1) { return PRETTY_TIME_I18N.get(keyPrefix + "_YESTERDAY"); } return amount + PRETTY_TIME_I18N.get(keyPrefix + "_DAYS"); } else if (diff >= 3600) { amount = (int) (diff / 3600); return amount + PRETTY_TIME_I18N.get(keyPrefix + "_HOURS"); } else if (diff >= 60) { amount = (int) (diff / 60); return amount + PRETTY_TIME_I18N.get(keyPrefix + "_MINUTES"); } else { amount = (int) diff; if (amount < 6) { return PRETTY_TIME_I18N.get(keyPrefix + "_JUST_NOW"); } else { return amount + PRETTY_TIME_I18N.get(keyPrefix + "_SECONDS"); } } }
@Test public void testPrettyTime() { Assert.assertEquals("去年", DateKit.prettyTime(LocalDateTime.now().plusYears(-1), Locale.CHINESE)); Assert.assertEquals("上个月", DateKit.prettyTime(LocalDateTime.now().plusMonths(-1), Locale.CHINESE)); Assert.assertEquals("上周", DateKit.prettyTime(LocalDateTime.now().plusWeeks(-1), Locale.CHINESE)); Assert.assertEquals("昨天", DateKit.prettyTime(LocalDateTime.now().plusDays(-1), Locale.CHINESE)); Assert.assertEquals("1小时前", DateKit.prettyTime(LocalDateTime.now().plusHours(-1), Locale.CHINESE)); Assert.assertEquals("1分钟前", DateKit.prettyTime(LocalDateTime.now().plusMinutes(-1), Locale.CHINESE)); Assert.assertEquals("刚刚", DateKit.prettyTime(LocalDateTime.now().plusSeconds(-1), Locale.CHINESE)); Assert.assertEquals("10秒前", DateKit.prettyTime(LocalDateTime.now().plusSeconds(-10), Locale.CHINESE)); }
@Override public void updatePort(Port osPort) { checkNotNull(osPort, ERR_NULL_PORT); checkArgument(!Strings.isNullOrEmpty(osPort.getId()), ERR_NULL_PORT_ID); checkArgument(!Strings.isNullOrEmpty(osPort.getNetworkId()), ERR_NULL_PORT_NET_ID); osNetworkStore.updatePort(osPort); log.info(String.format(MSG_PORT, osPort.getId(), MSG_UPDATED)); }
@Test(expected = NullPointerException.class) public void testUpdateNullPort() { target.updatePort(null); }
public static byte[] createByteArrayFromIpAddressString(String ipAddressString) { if (isValidIpV4Address(ipAddressString)) { return validIpV4ToBytes(ipAddressString); } if (isValidIpV6Address(ipAddressString)) { if (ipAddressString.charAt(0) == '[') { ipAddressString = ipAddressString.substring(1, ipAddressString.length() - 1); } int percentPos = ipAddressString.indexOf('%'); if (percentPos >= 0) { ipAddressString = ipAddressString.substring(0, percentPos); } return getIPv6ByName(ipAddressString, true); } return null; }
@Test public void testCreateByteArrayFromIpAddressString() { for (Entry<String, String> e : validIpV4Hosts.entrySet()) { String ip = e.getKey(); assertHexDumpEquals(e.getValue(), createByteArrayFromIpAddressString(ip), ip); } for (Entry<String, String> e : invalidIpV4Hosts.entrySet()) { String ip = e.getKey(); assertHexDumpEquals(e.getValue(), createByteArrayFromIpAddressString(ip), ip); } for (Entry<String, String> e : validIpV6Hosts.entrySet()) { String ip = e.getKey(); assertHexDumpEquals(e.getValue(), createByteArrayFromIpAddressString(ip), ip); } for (Entry<String, String> e : invalidIpV6Hosts.entrySet()) { String ip = e.getKey(); assertHexDumpEquals(e.getValue(), createByteArrayFromIpAddressString(ip), ip); } }
private static <T> List<BoundedSource<T>> split( BoundedSource<T> source, SamzaPipelineOptions pipelineOptions) throws Exception { final int numSplits = pipelineOptions.getMaxSourceParallelism(); if (numSplits > 1) { final long estimatedSize = source.getEstimatedSizeBytes(pipelineOptions); // calculate the size of each split, rounded up to the ceiling. final long bundleSize = (estimatedSize + numSplits - 1) / numSplits; @SuppressWarnings("unchecked") final List<BoundedSource<T>> splits = (List<BoundedSource<T>>) source.split(bundleSize, pipelineOptions); // Need the empty check here because Samza doesn't handle empty partition well if (!splits.isEmpty()) { return splits; } } return Collections.singletonList(source); }
@Test public void testSplit() throws IOException, InterruptedException { final TestBoundedSource.SplittableBuilder<String> builder = TestBoundedSource.<String>createSplits(3); builder.forSplit(0).addElements("split-0"); builder.forSplit(1).addElements("split-1"); builder.forSplit(2).addElements("split-2"); final TestBoundedSource<String> source = builder.build(); final BoundedSourceSystem.Consumer<String> consumer = createConsumer(source, 3); consumer.register(ssp(0), NULL_STRING); consumer.register(ssp(1), NULL_STRING); consumer.register(ssp(2), NULL_STRING); consumer.start(); final Set<String> offsets = new HashSet<>(); // check split0 List<IncomingMessageEnvelope> envelopes = consumeUntilTimeoutOrEos(consumer, ssp(0), DEFAULT_TIMEOUT_MILLIS); assertEquals( Arrays.asList( createElementMessage( ssp(0), envelopes.get(0).getOffset(), "split-0", BoundedWindow.TIMESTAMP_MIN_VALUE), createWatermarkMessage(ssp(0), BoundedWindow.TIMESTAMP_MAX_VALUE), createEndOfStreamMessage(ssp(0))), envelopes); offsets.add(envelopes.get(0).getOffset()); // check split1 envelopes = consumeUntilTimeoutOrEos(consumer, ssp(1), DEFAULT_TIMEOUT_MILLIS); assertEquals( Arrays.asList( createElementMessage( ssp(1), envelopes.get(0).getOffset(), "split-1", BoundedWindow.TIMESTAMP_MIN_VALUE), createWatermarkMessage(ssp(1), BoundedWindow.TIMESTAMP_MAX_VALUE), createEndOfStreamMessage(ssp(1))), envelopes); offsets.add(envelopes.get(0).getOffset()); // check split2 envelopes = consumeUntilTimeoutOrEos(consumer, ssp(2), DEFAULT_TIMEOUT_MILLIS); assertEquals( Arrays.asList( createElementMessage( ssp(2), envelopes.get(0).getOffset(), "split-2", BoundedWindow.TIMESTAMP_MIN_VALUE), createWatermarkMessage(ssp(2), BoundedWindow.TIMESTAMP_MAX_VALUE), createEndOfStreamMessage(ssp(2))), envelopes); offsets.add(envelopes.get(0).getOffset()); // check offsets assertEquals(Sets.newHashSet("0", "1", "2"), offsets); consumer.stop(); }
@JsonIgnore boolean isForeachIterationRestartable(long iterationId) { return info.entrySet().stream() .filter(e -> (e.getKey().isTerminal())) .anyMatch( e -> { for (Interval interval : e.getValue()) { if (iterationId >= interval.start && iterationId <= interval.end) { return true; } } return false; }); }
@Test public void testIsForeachIterationRestartable() throws Exception { TestDetails testDetails = loadObject("fixtures/instances/sample-foreach-details.json", TestDetails.class); assertFalse(testDetails.test1.isForeachIterationRestartable(1)); assertFalse(testDetails.test1.isForeachIterationRestartable(2)); assertFalse(testDetails.test1.isForeachIterationRestartable(5)); assertTrue(testDetails.test1.isForeachIterationRestartable(6)); assertTrue(testDetails.test1.isForeachIterationRestartable(7)); assertTrue(testDetails.test1.isForeachIterationRestartable(10)); assertTrue(testDetails.test1.isForeachIterationRestartable(17)); assertTrue(testDetails.test1.isForeachIterationRestartable(12)); }
public static List<String> getServerIdentities(X509Certificate x509Certificate) { List<String> names = new ArrayList<>(); for (CertificateIdentityMapping mapping : serverCertMapping) { List<String> identities = mapping.mapIdentity(x509Certificate); Log.debug("CertificateManager: " + mapping.name() + " returned " + identities.toString()); if (!identities.isEmpty()) { names.addAll(identities); break; } } return names; }
@Test public void testServerIdentitiesXmppAddrAndDNS() throws Exception { // Setup fixture. final String subjectCommonName = "MySubjectCommonName"; final String subjectAltNameXmppAddr = "MySubjectAltNameXmppAddr"; final String subjectAltNameDNS = "MySubjectAltNameDNS"; final X509v3CertificateBuilder builder = new JcaX509v3CertificateBuilder( new X500Name( "CN=MyIssuer" ), // Issuer BigInteger.valueOf( Math.abs( new SecureRandom().nextInt() ) ), // Random serial number new Date( System.currentTimeMillis() - ( 1000L * 60 * 60 * 24 * 30 ) ), // Not before 30 days ago new Date( System.currentTimeMillis() + ( 1000L * 60 * 60 * 24 * 99 ) ), // Not after 99 days from now new X500Name( "CN=" + subjectCommonName ), // Subject subjectKeyPair.getPublic() ); final DERSequence otherName = new DERSequence( new ASN1Encodable[] { XMPP_ADDR_OID, new DERUTF8String( subjectAltNameXmppAddr ) }); final GeneralNames subjectAltNames = new GeneralNames( new GeneralName[] { new GeneralName( GeneralName.otherName, otherName ), new GeneralName( GeneralName.dNSName, subjectAltNameDNS ) }); builder.addExtension( Extension.subjectAlternativeName, true, subjectAltNames ); final X509CertificateHolder certificateHolder = builder.build( contentSigner ); final X509Certificate cert = new JcaX509CertificateConverter().getCertificate( certificateHolder ); // Execute system under test final List<String> serverIdentities = CertificateManager.getServerIdentities( cert ); // Verify result assertEquals( 2, serverIdentities.size() ); assertTrue( serverIdentities.contains( subjectAltNameXmppAddr )); assertFalse( serverIdentities.contains( subjectCommonName ) ); }
public RuntimeOptionsBuilder parse(Class<?> clazz) { RuntimeOptionsBuilder args = new RuntimeOptionsBuilder(); for (Class<?> classWithOptions = clazz; hasSuperClass( classWithOptions); classWithOptions = classWithOptions.getSuperclass()) { CucumberOptions options = requireNonNull(optionsProvider).getOptions(classWithOptions); if (options != null) { addDryRun(options, args); addMonochrome(options, args); addTags(classWithOptions, options, args); addPlugins(options, args); addPublish(options, args); addName(options, args); addSnippets(options, args); addGlue(options, args); addFeatures(options, args); addObjectFactory(options, args); addUuidGenerator(options, args); } } addDefaultFeaturePathIfNoFeaturePathIsSpecified(args, clazz); addDefaultGlueIfNoOverridingGlueIsSpecified(args, clazz); return args; }
@Test void throws_runtime_exception_on_invalid_inherited_tag() { RuntimeException actual = assertThrows(RuntimeException.class, () -> parser().parse(ClassWithInheredInvalidTagExpression.class).build()); assertAll( () -> assertThat(actual.getMessage(), is( "Invalid tag expression at 'io.cucumber.core.options.CucumberOptionsAnnotationParserTest$ClassWithInvalidTagExpression'")), () -> assertThat(actual.getCause(), isA(TagExpressionException.class))); }
public static void ensureTopic( final String name, final KsqlConfig ksqlConfig, final KafkaTopicClient topicClient ) { if (topicClient.isTopicExists(name)) { validateTopicConfig(name, ksqlConfig, topicClient); return; } final short replicationFactor = ksqlConfig .getShort(KsqlConfig.KSQL_INTERNAL_TOPIC_REPLICAS_PROPERTY); if (replicationFactor < 2) { log.warn("Creating topic {} with replication factor of {} which is less than 2. " + "This is not advisable in a production environment. ", name, replicationFactor); } final short minInSyncReplicas = ksqlConfig .getShort(KsqlConfig.KSQL_INTERNAL_TOPIC_MIN_INSYNC_REPLICAS_PROPERTY); topicClient.createTopic( name, INTERNAL_TOPIC_PARTITION_COUNT, replicationFactor, ImmutableMap.<String, Object>builder() .putAll(INTERNAL_TOPIC_CONFIG) .put(TopicConfig.MIN_IN_SYNC_REPLICAS_CONFIG, minInSyncReplicas) .build() ); }
@Test public void shouldFailIfTopicExistsWithInvalidNReplicas() { // Given: whenTopicExistsWith(1, 1); // When: assertThrows( IllegalStateException.class, () -> KsqlInternalTopicUtils.ensureTopic(TOPIC_NAME, ksqlConfig, topicClient) ); }
@Override protected void handleGetTopicsOfNamespace(CommandGetTopicsOfNamespace commandGetTopicsOfNamespace) { checkArgument(state == State.Connected); final long requestId = commandGetTopicsOfNamespace.getRequestId(); final String namespace = commandGetTopicsOfNamespace.getNamespace(); final CommandGetTopicsOfNamespace.Mode mode = commandGetTopicsOfNamespace.getMode(); final Optional<String> topicsPattern = Optional.ofNullable(commandGetTopicsOfNamespace.hasTopicsPattern() ? commandGetTopicsOfNamespace.getTopicsPattern() : null); final Optional<String> topicsHash = Optional.ofNullable(commandGetTopicsOfNamespace.hasTopicsHash() ? commandGetTopicsOfNamespace.getTopicsHash() : null); final NamespaceName namespaceName = NamespaceName.get(namespace); final Semaphore lookupSemaphore = service.getLookupRequestSemaphore(); if (lookupSemaphore.tryAcquire()) { isNamespaceOperationAllowed(namespaceName, NamespaceOperation.GET_TOPICS).thenApply(isAuthorized -> { if (isAuthorized) { getBrokerService().pulsar().getNamespaceService().getListOfUserTopics(namespaceName, mode) .thenAccept(topics -> { boolean filterTopics = false; // filter system topic List<String> filteredTopics = topics; if (enableSubscriptionPatternEvaluation && topicsPattern.isPresent()) { if (topicsPattern.get().length() <= maxSubscriptionPatternLength) { filterTopics = true; filteredTopics = TopicList.filterTopics(filteredTopics, topicsPattern.get()); } else { log.info("[{}] Subscription pattern provided was longer than maximum {}.", remoteAddress, maxSubscriptionPatternLength); } } String hash = TopicList.calculateHash(filteredTopics); boolean hashUnchanged = topicsHash.isPresent() && topicsHash.get().equals(hash); if (hashUnchanged) { filteredTopics = Collections.emptyList(); } if (log.isDebugEnabled()) { log.debug( "[{}] Received CommandGetTopicsOfNamespace for namespace [//{}] by {}, size:{}", remoteAddress, namespace, requestId, topics.size()); } commandSender.sendGetTopicsOfNamespaceResponse(filteredTopics, hash, filterTopics, !hashUnchanged, requestId); lookupSemaphore.release(); }) .exceptionally(ex -> { log.warn("[{}] Error GetTopicsOfNamespace for namespace [//{}] by {}", remoteAddress, namespace, requestId); commandSender.sendErrorResponse(requestId, BrokerServiceException.getClientErrorCode(new ServerMetadataException(ex)), ex.getMessage()); lookupSemaphore.release(); return null; }); } else { final String msg = "Client is not authorized to GetTopicsOfNamespace"; log.warn("[{}] {} with role {} on namespace {}", remoteAddress, msg, getPrincipal(), namespaceName); commandSender.sendErrorResponse(requestId, ServerError.AuthorizationError, msg); lookupSemaphore.release(); } return null; }).exceptionally(ex -> { logNamespaceNameAuthException(remoteAddress, "GetTopicsOfNamespace", getPrincipal(), Optional.of(namespaceName), ex); final String msg = "Exception occurred while trying to authorize GetTopicsOfNamespace"; commandSender.sendErrorResponse(requestId, ServerError.AuthorizationError, msg); lookupSemaphore.release(); return null; }); } else { if (log.isDebugEnabled()) { log.debug("[{}] Failed GetTopicsOfNamespace lookup due to too many lookup-requests {}", remoteAddress, namespaceName); } commandSender.sendErrorResponse(requestId, ServerError.TooManyRequests, "Failed due to too many pending lookup requests"); } }
@Test(expectedExceptions = IllegalArgumentException.class) public void shouldFailHandleGetTopicsOfNamespace() throws Exception { ServerCnx serverCnx = mock(ServerCnx.class, CALLS_REAL_METHODS); Field stateUpdater = ServerCnx.class.getDeclaredField("state"); stateUpdater.setAccessible(true); stateUpdater.set(serverCnx, ServerCnx.State.Failed); serverCnx.handleGetTopicsOfNamespace(any()); }
private static String getAppNameByProjectName() { return NacosClientProperties.PROTOTYPE.getProperty(Constants.SysEnv.PROJECT_NAME); }
@Test void testGetAppNameByProjectName() { System.setProperty(Constants.SysEnv.PROJECT_NAME, "testAppName"); String appName = AppNameUtils.getAppName(); assertEquals("testAppName", appName); }
static <T> void compactNulls(ArrayList<T> list) { list.removeIf(Objects::isNull); }
@Test public void shouldCompactNulls1() { List<Integer> listGood = Lists.newArrayList(1, 2, 3, 4, 5, 6, 7, 8, 9); List<Integer> listBad = Lists.newArrayList(null, 1, 2, null, 3, 4, null, 5, 6, null, 7, 8, 9, null); MutableFSTImpl.compactNulls((ArrayList) listBad); assertEquals(listGood, listBad); }
@Override public void error(String msg) { logger.error(msg); }
@Test public void testError() { Logger mockLogger = mock(Logger.class); when(mockLogger.getName()).thenReturn("foo"); InternalLogger logger = new Slf4JLogger(mockLogger); logger.error("a"); verify(mockLogger).getName(); verify(mockLogger).error("a"); }
@Override public void createRouter(KubevirtRouter router) { checkNotNull(router, ERR_NULL_ROUTER); checkArgument(!Strings.isNullOrEmpty(router.name()), ERR_NULL_ROUTER_NAME); kubevirtRouterStore.createRouter(router); log.info(String.format(MSG_ROUTER, router.name(), MSG_CREATED)); }
@Test(expected = NullPointerException.class) public void testCreateNullRouter() { target.createRouter(null); }
public void generateResponse(HttpServletResponse response, ArtifactResolveRequest artifactResolveRequest) throws SamlParseException { try { final var context = new MessageContext(); final var signType = determineSignType(artifactResolveRequest.getSamlSession()); String entityId = determineEntityId(signType); context.setMessage(buildArtifactResponse(artifactResolveRequest, entityId, signType)); SAMLBindingSupport.setRelayState(context, artifactResolveRequest.getSamlSession().getRelayState()); final var encoder = new HTTPSOAP11Encoder(); encoder.setMessageContext(context); encoder.setHttpServletResponse(response); encoder.prepareContext(); encoder.initialize(); encoder.encode(); } catch (MessageEncodingException e) { throw new SamlParseException("ArtifactResolveRequest soap11 decode exception", e); } catch (ComponentInitializationException e) { throw new SamlParseException("ArtifactResolveRequest initialization exception", e); } catch (ValidationException e) { throw new SamlParseException("Failed to sign request", e); } catch (InstantiationException | ArtifactBuildException e) { throw new SamlParseException("Failed to build artifact response", e); } catch (BvdException e) { throw new SamlParseException("Failed to connect to BVD", e); } }
@Test void generateResponseCombiConnect() throws SamlParseException, MetadataException { MockHttpServletResponse response = new MockHttpServletResponse(); artifactResponseService.generateResponse(response, getArtifactResolveRequest("success", true,false, SAML_COMBICONNECT, EncryptionType.BSN, ENTRANCE_ENTITY_ID)); verify(bvdMetadataServiceMock, times(0)).generateMetadata(); }
@CanIgnoreReturnValue public final Ordered containsAtLeast( @Nullable Object k0, @Nullable Object v0, @Nullable Object... rest) { return containsAtLeastEntriesIn(accumulateMultimap(k0, v0, rest)); }
@Test public void containsAtLeastVarargRespectsDuplicates() { ImmutableListMultimap<Integer, String> actual = ImmutableListMultimap.of(3, "one", 3, "two", 3, "one", 4, "five", 4, "five"); assertThat(actual).containsAtLeast(3, "two", 4, "five", 3, "one", 3, "one"); }
public Type resolveReferenceType( Object name ) { if ( name instanceof String ) { ValueMetaInterface valueMeta = this.rowMeta.searchValueMeta( (String) name ); if ( valueMeta != null ) { switch ( valueMeta.getType() ) { case ValueMetaInterface.TYPE_STRING: return TextType.TYPE; case ValueMetaInterface.TYPE_INTEGER: case ValueMetaInterface.TYPE_BIGNUMBER: case ValueMetaInterface.TYPE_NUMBER: return NumberType.GENERIC_NUMBER; default: return AnyType.TYPE; } } } return AnyType.TYPE; }
@Test public void testResolveReferenceTypeWithMetaTypeNotStringAndNotNumeric() { RowMetaInterface row = mock( RowMetaInterface.class ); ValueMetaInterface valueMeta = mock( ValueMetaInterface.class ); RowForumulaContext context = new RowForumulaContext( row ); String name = "name"; when( row.searchValueMeta( name ) ).thenReturn( valueMeta ); when( valueMeta.getType() ).thenReturn( ValueMetaInterface.TYPE_DATE ); Type type = context.resolveReferenceType( name ); assertTrue( type instanceof AnyType ); }
@Override public ConfigData get(String path) { return get(path, Files::isRegularFile); }
@Test public void testGetSetOfKeysAtPath() { Set<String> keys = toSet(asList(foo, "baz")); ConfigData configData = provider.get(dir, keys); assertEquals(Collections.singleton(foo), configData.data().keySet()); assertEquals("FOO", configData.data().get(foo)); assertNull(configData.ttl()); }
@Override public JFieldVar apply(String nodeName, JsonNode node, JsonNode parent, JFieldVar field, Schema currentSchema) { if (ruleFactory.getGenerationConfig().isIncludeJsr303Annotations() && isApplicableType(field)) { if (node.has("minimum")) { final Class<? extends Annotation> decimalMinClass = ruleFactory.getGenerationConfig().isUseJakartaValidation() ? DecimalMin.class : javax.validation.constraints.DecimalMin.class; JAnnotationUse annotation = field.annotate(decimalMinClass); annotation.param("value", node.get("minimum").asText()); } if (node.has("maximum")) { final Class<? extends Annotation> decimalMaxClass = ruleFactory.getGenerationConfig().isUseJakartaValidation() ? DecimalMax.class : javax.validation.constraints.DecimalMax.class; JAnnotationUse annotation = field.annotate(decimalMaxClass); annotation.param("value", node.get("maximum").asText()); } } return field; }
@Test public void testMaximum() { when(config.isIncludeJsr303Annotations()).thenReturn(true); final String maxValue = Integer.toString(new Random().nextInt()); when(subNode.asText()).thenReturn(maxValue); when(node.get("maximum")).thenReturn(subNode); when(fieldVar.annotate(decimalMaxClass)).thenReturn(annotationMax); when(node.has("maximum")).thenReturn(true); when(fieldVar.type().boxify().fullName()).thenReturn(fieldClass.getTypeName()); JFieldVar result = rule.apply("node", node, null, fieldVar, null); assertSame(fieldVar, result); verify(fieldVar, times(isApplicable ? 1 : 0)).annotate(decimalMaxClass); verify(annotationMax, times(isApplicable ? 1 : 0)).param("value", maxValue); verify(fieldVar, never()).annotate(decimalMinClass); verify(annotationMin, never()).param(eq("value"), anyString()); }
@Override public V poll(long timeout, TimeUnit unit) throws InterruptedException { return commandExecutor.getInterrupted(pollAsync(timeout, unit)); }
@Test public void testPoll() throws InterruptedException { RBlockingQueue<Integer> queue1 = getQueue(); queue1.put(1); Assertions.assertEquals((Integer)1, queue1.poll(2, TimeUnit.SECONDS)); long s = System.currentTimeMillis(); Assertions.assertNull(queue1.poll(5, TimeUnit.SECONDS)); Assertions.assertTrue(System.currentTimeMillis() - s > 4900); }
public static Bip32ECKeyPair generateBip44KeyPair(Bip32ECKeyPair master) { return generateBip44KeyPair(master, false); }
@Test public void generateBip44KeyPair() { String mnemonic = "spider elbow fossil truck deal circle divert sleep safe report laundry above"; byte[] seed = MnemonicUtils.generateSeed(mnemonic, null); String seedStr = bytesToHex(seed); assertEquals( "f0d2ab78b96acd147119abad1cd70eb4fec4f0e0a95744cf532e6a09347b08101213b4cbf50eada0eb89cba444525fe28e69707e52aa301c6b47ce1c5ef82eb5", seedStr); Bip32ECKeyPair masterKeypair = Bip32ECKeyPair.generateKeyPair(seed); assertEquals( "xprv9s21ZrQH143K2yA9Cdad5gjqHRC7apVUgEyYq5jXeXigDZ3PfEnps44tJprtMXr7PZivEsin6Qrbad7PuiEy4tn5jAEK6A3U46f9KvfRCmD", Base58.encode(addChecksum(serializePrivate(masterKeypair)))); Bip32ECKeyPair bip44Keypair = Bip44WalletUtils.generateBip44KeyPair(masterKeypair); assertEquals( "xprvA3p5nTrBJcdEvUQAK64rZ4oJTwsTiMg7JQrqNh6JNWe3VUW2tcLb7GW1wj1fNDAoymUTSFERZ2LxPxJNmqoMZPs9y3TMNMuBN8MS9eigoWq", Base58.encode(addChecksum(serializePrivate(bip44Keypair)))); assertEquals( "xpub6GoSByP58zBY8xUdR7brvCk31yhx7pPxfdnSB5VuvrB2NGqBS9eqf4pVo1xev4GEmip5Wuky9KUtJVxq4fvYfFchS6SA6C4cCRyQkLqNNjq", Base58.encode(addChecksum(serializePublic(bip44Keypair)))); Credentials credentials = Bip44WalletUtils.loadBip44Credentials( "", mnemonic); // Verify address according to https://iancoleman.io/bip39/ assertEquals( "0xece62451ca8fba33746d6dafd0d0ebdef84778b7", credentials.getAddress().toLowerCase()); }
public static byte[] plus(byte[] in, int add) { if (in.length == 0) return in; final byte[] out = in.clone(); add(out, add); return out; }
@Test public void plusNormal() { assertArrayEquals(new byte[] { 1, 2, 4}, ByteArrayUtils.plus(new byte[] { 1, 2, 3}, 1)); }
public static InstrumentedExecutorService newFixedThreadPool(int nThreads, MetricRegistry registry, String name) { return new InstrumentedExecutorService(Executors.newFixedThreadPool(nThreads), registry, name); }
@Test public void testNewFixedThreadPool() throws Exception { final ExecutorService executorService = InstrumentedExecutors.newFixedThreadPool(2, registry, "xs"); executorService.submit(new NoopRunnable()); assertThat(registry.meter("xs.submitted").getCount()).isEqualTo(1L); final Field delegateField = InstrumentedExecutorService.class.getDeclaredField("delegate"); delegateField.setAccessible(true); final ThreadPoolExecutor delegate = (ThreadPoolExecutor) delegateField.get(executorService); assertThat(delegate.getCorePoolSize()).isEqualTo(2); assertThat(delegate.getMaximumPoolSize()).isEqualTo(2); executorService.shutdown(); }
public static int hash(Client client) { if (!(client instanceof IpPortBasedClient)) { return 0; } return Objects.hash(client.getClientId(), client.getAllPublishedService().stream() .map(s -> { InstancePublishInfo ip = client.getInstancePublishInfo(s); double weight = getWeight(ip); Boolean enabled = getEnabled(ip); String cluster = StringUtils.defaultIfBlank(ip.getCluster(), DEFAULT_CLUSTER_NAME); return Objects.hash( s.getNamespace(), s.getGroup(), s.getName(), s.isEphemeral(), ip.getIp(), ip.getPort(), weight, ip.isHealthy(), enabled, cluster, ip.getExtendDatum() ); }) .collect(Collectors.toSet())); }
@Test void testHash0() { assertEquals(-1320954445, DistroUtils.hash(client0)); }
@VisibleForTesting static <T, CoderT extends Coder<T>, CandidateT> void verifyCompatible( CoderT coder, Type candidateType) throws IncompatibleCoderException { // Various representations of the coder's class @SuppressWarnings("unchecked") Class<CoderT> coderClass = (Class<CoderT>) coder.getClass(); TypeDescriptor<CoderT> coderDescriptor = TypeDescriptor.of(coderClass); // Various representations of the actual coded type @SuppressWarnings("unchecked") TypeDescriptor<T> codedDescriptor = CoderUtils.getCodedType(coderDescriptor); @SuppressWarnings("unchecked") Class<T> codedClass = (Class<T>) codedDescriptor.getRawType(); Type codedType = codedDescriptor.getType(); // Various representations of the candidate type @SuppressWarnings("unchecked") TypeDescriptor<CandidateT> candidateDescriptor = (TypeDescriptor<CandidateT>) TypeDescriptor.of(candidateType); @SuppressWarnings("unchecked") Class<CandidateT> candidateClass = (Class<CandidateT>) candidateDescriptor.getRawType(); // If coder has type Coder<T> where the actual value of T is lost // to erasure, then we cannot rule it out. if (candidateType instanceof TypeVariable) { return; } // If the raw types are not compatible, we can certainly rule out // coder compatibility if (!codedClass.isAssignableFrom(candidateClass)) { throw new IncompatibleCoderException( String.format( "Cannot encode elements of type %s with coder %s because the" + " coded type %s is not assignable from %s", candidateType, coder, codedClass, candidateType), coder, candidateType); } // we have established that this is a covariant upcast... though // coders are invariant, we are just checking one direction @SuppressWarnings("unchecked") TypeDescriptor<T> candidateOkDescriptor = (TypeDescriptor<T>) candidateDescriptor; // If the coded type is a parameterized type where any of the actual // type parameters are not compatible, then the whole thing is certainly not // compatible. if ((codedType instanceof ParameterizedType) && !isNullOrEmpty(coder.getCoderArguments())) { ParameterizedType parameterizedSupertype = (ParameterizedType) candidateOkDescriptor.getSupertype(codedClass).getType(); Type[] typeArguments = parameterizedSupertype.getActualTypeArguments(); List<? extends Coder<?>> typeArgumentCoders = coder.getCoderArguments(); if (typeArguments.length < typeArgumentCoders.size()) { throw new IncompatibleCoderException( String.format( "Cannot encode elements of type %s with coder %s:" + " the generic supertype %s has %s type parameters, which is less than the" + " number of coder arguments %s has (%s).", candidateOkDescriptor, coder, parameterizedSupertype, typeArguments.length, coder, typeArgumentCoders.size()), coder, candidateOkDescriptor.getType()); } for (int i = 0; i < typeArgumentCoders.size(); i++) { try { Coder<?> typeArgumentCoder = typeArgumentCoders.get(i); verifyCompatible( typeArgumentCoder, candidateDescriptor.resolveType(typeArguments[i]).getType()); } catch (IncompatibleCoderException exn) { throw new IncompatibleCoderException( String.format( "Cannot encode elements of type %s with coder %s" + " because some component coder is incompatible", candidateType, coder), coder, candidateType, exn); } } } }
@Test public void testTypeCompatibility() throws Exception { CoderRegistry.verifyCompatible(BigEndianIntegerCoder.of(), Integer.class); CoderRegistry.verifyCompatible( ListCoder.of(BigEndianIntegerCoder.of()), new TypeDescriptor<List<Integer>>() {}.getType()); }
public static boolean hasUpperCase(String text) { if (text == null) { return false; } for (int i = 0; i < text.length(); i++) { char ch = text.charAt(i); if (Character.isUpperCase(ch)) { return true; } } return false; }
@Test public void testHasUpper() { assertFalse(StringHelper.hasUpperCase(null)); assertFalse(StringHelper.hasUpperCase("")); assertFalse(StringHelper.hasUpperCase(" ")); assertFalse(StringHelper.hasUpperCase("com.foo")); assertFalse(StringHelper.hasUpperCase("com.foo.123")); assertTrue(StringHelper.hasUpperCase("com.foo.MyClass")); assertTrue(StringHelper.hasUpperCase("com.foo.My")); // Note, this is not a FQN assertTrue(StringHelper.hasUpperCase("com.foo.subA")); }
public static Serializable decode(final ByteBuf byteBuf) { int valueType = byteBuf.readUnsignedByte() & 0xff; StringBuilder result = new StringBuilder(); decodeValue(valueType, 1, byteBuf, result); return result.toString(); }
@Test void assertDecodeSmallJsonObjectWithSubArray() { List<JsonEntry> subArrays = Collections.singletonList(new JsonEntry(JsonValueTypes.INT32, null, 111)); ByteBuf payload = mockJsonObjectByteBuf(Collections.singletonList(new JsonEntry(JsonValueTypes.SMALL_JSON_ARRAY, "subJson", subArrays)), true); String actual = (String) MySQLJsonValueDecoder.decode(payload); assertThat(actual, is("{\"subJson\":[111]}")); }
@Override public void uncaughtException(@NonNull Thread thread, Throwable ex) { ex.printStackTrace(); Logger.e(TAG, "Caught an unhandled exception!!!", ex); // https://github.com/AnySoftKeyboard/AnySoftKeyboard/issues/15 // https://github.com/AnySoftKeyboard/AnySoftKeyboard/issues/433 final String stackTrace = Logger.getStackTrace(ex); if (ex instanceof NullPointerException) { if (stackTrace.contains( "android.inputmethodservice.IInputMethodSessionWrapper.executeMessage(IInputMethodSessionWrapper.java") || stackTrace.contains( "android.inputmethodservice.IInputMethodWrapper.executeMessage(IInputMethodWrapper.java")) { Logger.w(TAG, "An OS bug has been adverted. Move along, there is nothing to see here."); return; } } else if (ex instanceof java.util.concurrent.TimeoutException && stackTrace.contains(".finalize")) { Logger.w(TAG, "An OS bug has been adverted. Move along, there is nothing to see here."); return; } StringBuilder reportMessage = new StringBuilder(); reportMessage .append("Hi. It seems that we have crashed.... Here are some details:") .append(NEW_LINE) .append("****** UTC Time: ") .append(DateFormat.format("kk:mm:ss dd.MM.yyyy", System.currentTimeMillis())) .append(NEW_LINE) .append("****** Application name: ") .append(getAppDetails()) .append(NEW_LINE) .append("******************************") .append(NEW_LINE) .append(ex.getClass().getName()) .append(NEW_LINE) .append("****** Exception message: ") .append(ex.getMessage()) .append(NEW_LINE) .append(HEADER_BREAK_LINE) .append(NEW_LINE) .append("****** Trace trace:") .append(NEW_LINE) .append(stackTrace) .append(NEW_LINE) .append("******************************") .append(NEW_LINE) .append("****** Device information:") .append(NEW_LINE) .append(ChewbaccaUtils.getSysInfo(mApp)) .append(NEW_LINE); if (ex instanceof OutOfMemoryError || (ex.getCause() != null && ex.getCause() instanceof OutOfMemoryError)) { reportMessage .append("******************************") .append(NEW_LINE) .append("****** Memory: ") .append(Runtime.getRuntime().totalMemory()) .append(NEW_LINE) .append("Free: ") .append(Runtime.getRuntime().freeMemory()) .append(NEW_LINE) .append("Max: ") .append(Runtime.getRuntime().maxMemory()) .append(NEW_LINE); } reportMessage .append("******************************") .append(NEW_LINE) .append("****** Log-Cat: ") .append(NEW_LINE) .append(Logger.getAllLogLines()) .append(NEW_LINE); try (OutputStreamWriter writer = new OutputStreamWriter( mApp.openFileOutput(NEW_CRASH_FILENAME, Context.MODE_PRIVATE), Charset.forName("UTF-8"))) { writer.write(reportMessage.toString()); Logger.i(TAG, "Wrote crash report to %s.", NEW_CRASH_FILENAME); Logger.d(TAG, "Crash report:"); for (String line : TextUtils.split(reportMessage.toString(), NEW_LINE)) { Logger.d(TAG, "err: %s", line); } } catch (Exception writeEx) { Logger.e(TAG, writeEx, "Failed to write crash report file!"); } // and sending to the OS if (mOsDefaultHandler != null) { Logger.i(TAG, "Sending the exception to OS exception handler..."); mOsDefaultHandler.uncaughtException(thread, ex); } }
@Test public void testDoesNotCrashOnNullPreviousHandler() { TestableChewbaccaUncaughtExceptionHandler underTest = new TestableChewbaccaUncaughtExceptionHandler( ApplicationProvider.getApplicationContext(), null, Mockito.mock(NotificationDriver.class)); underTest.uncaughtException(Thread.currentThread(), new IOException("an error")); }
public static int latitudeToTileY(double latitude, byte zoomLevel) { return pixelYToTileY(latitudeToPixelY(latitude, zoomLevel, DUMMY_TILE_SIZE), zoomLevel, DUMMY_TILE_SIZE); }
@Test public void latitudeToTileYTest() { for (byte zoomLevel = ZOOM_LEVEL_MIN; zoomLevel <= ZOOM_LEVEL_MAX; ++zoomLevel) { long tileY = MercatorProjection.latitudeToTileY(MercatorProjection.LATITUDE_MAX, zoomLevel); Assert.assertEquals(0, tileY); tileY = MercatorProjection.latitudeToTileYWithScaleFactor(MercatorProjection.LATITUDE_MAX, MercatorProjection.zoomLevelToScaleFactor(zoomLevel)); Assert.assertEquals(0, tileY); tileY = MercatorProjection.latitudeToTileY(MercatorProjection.LATITUDE_MIN, zoomLevel); Assert.assertEquals(Tile.getMaxTileNumber(zoomLevel), tileY); tileY = MercatorProjection.latitudeToTileYWithScaleFactor(MercatorProjection.LATITUDE_MIN, MercatorProjection.zoomLevelToScaleFactor(zoomLevel)); Assert.assertEquals(Tile.getMaxTileNumber(zoomLevel), tileY); } }
@ModelAttribute(name = "product", binding = false) public Mono<Product> loadProduct(@PathVariable("productId") int id) { return this.productsClient.findProduct(id) .switchIfEmpty(Mono.defer( () -> Mono.error(new NoSuchElementException("customer.products.error.not_found")) )); }
@Test void loadProduct_ProductDoesNotExist_ReturnsMonoWithNoSuchElementException() { // given doReturn(Mono.empty()).when(this.productsClient).findProduct(1); // when StepVerifier.create(this.controller.loadProduct(1)) // then .expectErrorMatches(exception -> exception instanceof NoSuchElementException e && e.getMessage().equals("customer.products.error.not_found")) .verify(); verify(this.productsClient).findProduct(1); verifyNoMoreInteractions(this.productsClient); verifyNoInteractions(this.favouriteProductsClient, this.productReviewsClient); }
public static boolean isContentType(String contentType, Message message) { if (contentType == null) { return message.getContentType() == null; } else { return contentType.equals(message.getContentType()); } }
@Test public void testIsContentTypeWithNonNullStringValueAndNonNullMessageContentTypeEqual() { Message message = Proton.message(); message.setContentType("test"); assertTrue(AmqpMessageSupport.isContentType("test", message)); }
public static <K, InputT, AccumT> ParDoFn create( PipelineOptions options, KvCoder<K, ?> inputElementCoder, @Nullable CloudObject cloudUserFn, @Nullable List<SideInputInfo> sideInputInfos, List<Receiver> receivers, DataflowExecutionContext<?> executionContext, DataflowOperationContext operationContext) throws Exception { AppliedCombineFn<K, InputT, AccumT, ?> combineFn; SideInputReader sideInputReader; StepContext stepContext; if (cloudUserFn == null) { combineFn = null; sideInputReader = NullSideInputReader.empty(); stepContext = null; } else { Object deserializedFn = SerializableUtils.deserializeFromByteArray( getBytes(cloudUserFn, PropertyNames.SERIALIZED_FN), "serialized combine fn"); @SuppressWarnings("unchecked") AppliedCombineFn<K, InputT, AccumT, ?> combineFnUnchecked = ((AppliedCombineFn<K, InputT, AccumT, ?>) deserializedFn); combineFn = combineFnUnchecked; sideInputReader = executionContext.getSideInputReader( sideInputInfos, combineFn.getSideInputViews(), operationContext); stepContext = executionContext.getStepContext(operationContext); } return create( options, inputElementCoder, combineFn, sideInputReader, receivers.get(0), stepContext); }
@Test public void testCreateWithCombinerAndStreamingSideInputs() throws Exception { StreamingOptions options = PipelineOptionsFactory.as(StreamingOptions.class); options.setStreaming(true); Coder keyCoder = StringUtf8Coder.of(); Coder valueCoder = BigEndianIntegerCoder.of(); KvCoder<String, Integer> kvCoder = KvCoder.of(keyCoder, valueCoder); TestOutputReceiver receiver = new TestOutputReceiver( new ElementByteSizeObservableCoder(WindowedValue.getValueOnlyCoder(kvCoder)), counterSet, NameContextsForTests.nameContextForTest()); when(mockSideInputReader.isEmpty()).thenReturn(false); when(mockStreamingStepContext.stateInternals()).thenReturn((StateInternals) mockStateInternals); when(mockStateInternals.state(Matchers.<StateNamespace>any(), Matchers.<StateTag>any())) .thenReturn(mockState); when(mockState.read()).thenReturn(Maps.newHashMap()); ParDoFn pgbk = PartialGroupByKeyParDoFns.create( options, kvCoder, AppliedCombineFn.withInputCoder( Sum.ofIntegers(), CoderRegistry.createDefault(), kvCoder, ImmutableList.<PCollectionView<?>>of(), WindowingStrategy.globalDefault()), mockSideInputReader, receiver, mockStreamingStepContext); assertTrue(pgbk instanceof StreamingSideInputPGBKParDoFn); }
@Operation(summary = "queryAllEnvironmentList", description = "QUERY_ALL_ENVIRONMENT_LIST_NOTES") @GetMapping(value = "/query-environment-list") @ResponseStatus(HttpStatus.OK) @ApiException(QUERY_ENVIRONMENT_ERROR) public Result queryAllEnvironmentList(@Parameter(hidden = true) @RequestAttribute(value = Constants.SESSION_USER) User loginUser) { Map<String, Object> result = environmentService.queryAllEnvironmentList(loginUser); return returnDataList(result); }
@Test public void testQueryAllEnvironmentList() throws Exception { MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>(); MvcResult mvcResult = mockMvc.perform(get("/environment/query-environment-list") .header(SESSION_ID, sessionId) .params(paramsMap)) .andExpect(status().isOk()) .andExpect(content().contentType(MediaType.APPLICATION_JSON)) .andReturn(); Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class); logger.info(result.toString()); Assertions.assertTrue(result != null && result.isSuccess()); logger.info("query all environment return result:{}", mvcResult.getResponse().getContentAsString()); }
@VisibleForTesting void checkProcessorConfig(EventDefinitionDto oldEventDefinition, EventDefinitionDto updatedEventDefinition) { if (!oldEventDefinition.config().isUserPresentable() && !oldEventDefinition.config().type().equals(updatedEventDefinition.config().type())) { LOG.error("Not allowed to change event definition condition type from <{}> to <{}>.", oldEventDefinition.config().type(), updatedEventDefinition.config().type()); throw new ForbiddenException("Condition type not changeable"); } }
@Test public void testUpdateUnmodifiableConfigType() { when(config1.isUserPresentable()).thenReturn(false); assertThrows(ForbiddenException.class, () -> resource.checkProcessorConfig(eventDefinitionDto(config1), eventDefinitionDto(config2))); }
@SafeVarargs public static Optional<Predicate<Throwable>> createExceptionsPredicate( Predicate<Throwable> exceptionPredicate, Class<? extends Throwable>... exceptions) { return PredicateCreator.createExceptionsPredicate(exceptions) .map(predicate -> exceptionPredicate == null ? predicate : predicate.or(exceptionPredicate)) .or(() -> Optional.ofNullable(exceptionPredicate)); }
@Test public void buildComplexRecordExceptionsPredicateOnlyClasses() { Predicate<Throwable> exceptionPredicate = null; Predicate<Throwable> predicate = PredicateCreator .createExceptionsPredicate(exceptionPredicate, IOException.class, RuntimeException.class) .orElseThrow(); then(predicate.test(new RuntimeException())).isTrue(); then(predicate.test(new IllegalArgumentException())).isTrue(); then(predicate.test(new Throwable())).isFalse(); then(predicate.test(new Exception())).isFalse(); then(predicate.test(new IOException())).isTrue(); }
public static ShardingRouteEngine newInstance(final ShardingRule shardingRule, final ShardingSphereDatabase database, final QueryContext queryContext, final ShardingConditions shardingConditions, final ConfigurationProperties props, final ConnectionContext connectionContext) { SQLStatementContext sqlStatementContext = queryContext.getSqlStatementContext(); SQLStatement sqlStatement = sqlStatementContext.getSqlStatement(); if (sqlStatement instanceof TCLStatement) { return new ShardingDatabaseBroadcastRoutingEngine(); } if (sqlStatement instanceof DDLStatement) { if (sqlStatementContext instanceof CursorAvailable) { return getCursorRouteEngine(shardingRule, database, sqlStatementContext, queryContext.getHintValueContext(), shardingConditions, props); } return getDDLRoutingEngine(shardingRule, database, sqlStatementContext); } if (sqlStatement instanceof DALStatement) { return getDALRoutingEngine(shardingRule, database, sqlStatementContext, connectionContext); } if (sqlStatement instanceof DCLStatement) { return getDCLRoutingEngine(shardingRule, database, sqlStatementContext); } return getDQLRoutingEngine(shardingRule, database, sqlStatementContext, queryContext.getHintValueContext(), shardingConditions, props, connectionContext); }
@Test void assertNewInstanceForDDLWithShardingRule() { when(sqlStatementContext.getSqlStatement()).thenReturn(mock(DDLStatement.class)); QueryContext queryContext = new QueryContext(sqlStatementContext, "", Collections.emptyList(), new HintValueContext(), mockConnectionContext(), mock(ShardingSphereMetaData.class)); ShardingRouteEngine actual = ShardingRouteEngineFactory.newInstance(shardingRule, database, queryContext, shardingConditions, props, new ConnectionContext(Collections::emptySet)); assertThat(actual, instanceOf(ShardingTableBroadcastRoutingEngine.class)); }
public static String substVars(String val, PropertyContainer pc1) throws ScanException { return substVars(val, pc1, null); }
@Test public void detectCircularReferencesInDefault() throws ScanException { context.putProperty("A", "${B:-${A}}"); Exception e = assertThrows(IllegalArgumentException.class, () -> { OptionHelper.substVars("${A}", context); }); String expectedMessage = CIRCULAR_VARIABLE_REFERENCE_DETECTED+"${A} --> ${B} --> ${A}]"; assertEquals(expectedMessage, e.getMessage()); }
@UdafFactory(description = "Compute sample standard deviation of column with type Integer.", aggregateSchema = "STRUCT<SUM integer, COUNT bigint, M2 double>") public static TableUdaf<Integer, Struct, Double> stdDevInt() { return getStdDevImplementation( 0, STRUCT_INT, (agg, newValue) -> newValue + agg.getInt32(SUM), (agg, newValue) -> Double.valueOf(newValue * (agg.getInt64(COUNT) + 1) - (agg.getInt32(SUM) + newValue)), (agg1, agg2) -> agg1.getInt32(SUM).doubleValue() / agg1.getInt64(COUNT).doubleValue() - agg2.getInt32(SUM).doubleValue() / agg2.getInt64(COUNT).doubleValue(), (agg1, agg2) -> agg1.getInt32(SUM) + agg2.getInt32(SUM), (agg, valueToRemove) -> agg.getInt32(SUM) - valueToRemove); }
@Test public void shouldAverageZeroes() { final TableUdaf<Integer, Struct, Double> udaf = stdDevInt(); Struct agg = udaf.initialize(); final int[] values = new int[] {0, 0, 0}; for (final int thisValue : values) { agg = udaf.aggregate(thisValue, agg); } final double standardDev = udaf.map(agg); assertThat(standardDev, equalTo(0.0)); }
@VisibleForTesting static String secondsToTimeString(double seconds) { int hours = (int) (Math.floor(seconds) / 3600); int minutes = (int) (Math.floor(seconds / 60) % 60); seconds = seconds % 60; String timeString = hours > 0 ? String.format("%d:%02d:", hours, minutes) : String.format("%d:", minutes); // If the seconds is an integer, it is ambiguous if the pb is a precise // pb or not. So we always show it without the trailing .00. return timeString + (Math.floor(seconds) == seconds ? String.format("%02d", (int) seconds) : String.format("%05.2f", seconds)); }
@Test public void testSecondsToTimeString() { assertEquals("0:03.60", ChatCommandsPlugin.secondsToTimeString(3.6)); assertEquals("0:03", ChatCommandsPlugin.secondsToTimeString(3)); assertEquals("1:23:45.60", ChatCommandsPlugin.secondsToTimeString(5025.6)); assertEquals("8:00:00", ChatCommandsPlugin.secondsToTimeString(60 * 60 * 8)); }
@Override public InterpreterResult interpret(String st, InterpreterContext context) throws InterpreterException { LOGGER.info("Running SQL query: '{}' over Pandas DataFrame", st); return pythonInterpreter.interpret( "z.show(pysqldf('" + st.trim() + "'))", context); }
@Test public void dependenciesAreInstalled() throws InterpreterException { InterpreterResult ret = pythonInterpreter.interpret("import pandas\nimport pandasql\nimport numpy\n", context); assertEquals(InterpreterResult.Code.SUCCESS, ret.code(), ret.message().toString()); }
public void addNotificationFilter(NotificationFilter another) { checkForDuplicates(another); notificationFilters.add(another); }
@Test void shouldThrowExceptionIfFilterWithSameEventAlreadyExist() { User user = new User("foo"); user.addNotificationFilter(new NotificationFilter("cruise", "dev", StageEvent.Fixed, false)); try { user.addNotificationFilter(new NotificationFilter("cruise", "dev", StageEvent.Fixed, false)); fail("shouldThrowExceptionIfFilterWithSameEventAlreadyExist"); } catch (Exception e) { assertThat(e.getMessage()).contains("Duplicate notification filter"); } }
@Override public Mono<ServerResponse> handle(ServerRequest request) { var name = request.pathVariable("name"); return client.get(scheme.type(), name) .flatMap(client::delete) .flatMap(deleted -> ServerResponse .ok() .contentType(MediaType.APPLICATION_JSON) .bodyValue(deleted)); }
@Test void shouldReturnErrorWhenExtensionNotFound() { var serverRequest = MockServerRequest.builder() .pathVariable("name", "my-fake") .build(); when(client.get(FakeExtension.class, "my-fake")).thenReturn( Mono.error( new ExtensionNotFoundException(fromExtension(FakeExtension.class), "my-fake"))); var scheme = Scheme.buildFromType(FakeExtension.class); var deleteHandler = new ExtensionDeleteHandler(scheme, client); var responseMono = deleteHandler.handle(serverRequest); StepVerifier.create(responseMono) .verifyError(ExtensionNotFoundException.class); verify(client, times(1)).get(same(FakeExtension.class), anyString()); verify(client, times(0)).update(any()); verify(client, times(0)).delete(any()); }
public String doLayout(ILoggingEvent event) { if (!isStarted()) { return CoreConstants.EMPTY_STRING; } return writeLoopOnConverters(event); }
@Test public void replaceNewline() throws ScanException { String pattern = "%replace(A\nB){'\n', '\n\t'}"; String substPattern = OptionHelper.substVars(pattern, null, loggerContext); assertEquals(pattern, substPattern); pl.setPattern(substPattern); pl.start(); //StatusPrinter.print(lc); String val = pl.doLayout(makeLoggingEvent("", null)); assertEquals("A\n\tB", val); }
@Override @Deprecated public <K1, V1> KStream<K1, V1> flatTransform(final org.apache.kafka.streams.kstream.TransformerSupplier<? super K, ? super V, Iterable<KeyValue<K1, V1>>> transformerSupplier, final String... stateStoreNames) { Objects.requireNonNull(transformerSupplier, "transformerSupplier can't be null"); final String name = builder.newProcessorName(TRANSFORM_NAME); return flatTransform(transformerSupplier, Named.as(name), stateStoreNames); }
@Test @SuppressWarnings("deprecation") public void shouldNotAllowBadTransformerSupplierOnFlatTransformWithStores() { final org.apache.kafka.streams.kstream.Transformer<String, String, Iterable<KeyValue<String, String>>> transformer = flatTransformerSupplier.get(); final IllegalArgumentException exception = assertThrows( IllegalArgumentException.class, () -> testStream.flatTransform(() -> transformer, "storeName") ); assertThat(exception.getMessage(), containsString("#get() must return a new object each time it is called.")); }
public static Object typeConvert(String tableName ,String columnName, String value, int sqlType, String mysqlType) { if (value == null || (value.equals("") && !(isText(mysqlType) || sqlType == Types.CHAR || sqlType == Types.VARCHAR || sqlType == Types.LONGVARCHAR))) { return null; } try { Object res; switch (sqlType) { case Types.INTEGER: res = Integer.parseInt(value); break; case Types.SMALLINT: res = Short.parseShort(value); break; case Types.BIT: case Types.TINYINT: res = Byte.parseByte(value); break; case Types.BIGINT: if (mysqlType.startsWith("bigint") && mysqlType.endsWith("unsigned")) { res = new BigInteger(value); } else { res = Long.parseLong(value); } break; // case Types.BIT: case Types.BOOLEAN: res = !"0".equals(value); break; case Types.DOUBLE: case Types.FLOAT: res = Double.parseDouble(value); break; case Types.REAL: res = Float.parseFloat(value); break; case Types.DECIMAL: case Types.NUMERIC: res = new BigDecimal(value); break; case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: case Types.BLOB: res = value.getBytes("ISO-8859-1"); break; case Types.DATE: if (!value.startsWith("0000-00-00")) { java.util.Date date = Util.parseDate(value); if (date != null) { res = new Date(date.getTime()); } else { res = null; } } else { res = null; } break; case Types.TIME: { java.util.Date date = Util.parseDate(value); if (date != null) { res = new Time(date.getTime()); } else { res = null; } break; } case Types.TIMESTAMP: if (!value.startsWith("0000-00-00")) { java.util.Date date = Util.parseDate(value); if (date != null) { res = new Timestamp(date.getTime()); } else { res = null; } } else { res = null; } break; case Types.CLOB: default: res = value; break; } return res; } catch (Exception e) { logger.error("table: {} column: {}, failed convert type {} to {}", tableName, columnName, value, sqlType); return value; } }
@Test public void typeConvertInputNotNullNotNullNotNullNegativeNotNullOutput3() { // Arrange final String tableName = "foo"; final String columnName = "foo"; final String value = "foo"; final int sqlType = -4; final String mysqlType = "foo"; // Act final Object actual = JdbcTypeUtil.typeConvert(tableName, columnName, value, sqlType, mysqlType); // Assert result Assert.assertArrayEquals(new byte[] {(byte)102, (byte)111, (byte)111}, ((byte[])actual)); }
static LambdaExpr getPredictorTermFunction(final PredictorTerm predictorTerm) { try { LambdaExpr toReturn = new LambdaExpr(); toReturn.setParameters(NodeList.nodeList(new Parameter(new UnknownType(), "resultMap"))); final BlockStmt body = getPredictorTermBody(predictorTerm); toReturn.setBody(body); return toReturn; } catch (Exception e) { throw new KiePMMLInternalException(String.format("Failed to get PredictorTermFunction for %s", predictorTerm), e); } }
@Test void getPredictorTermFunction() throws IOException { String predictorName = "predictorName"; double coefficient = 23.12; String fieldRef = "fieldRef"; PredictorTerm predictorTerm = PMMLModelTestUtils.getPredictorTerm(predictorName, coefficient, Collections.singletonList(fieldRef)); LambdaExpr retrieved = KiePMMLRegressionTableFactory.getPredictorTermFunction(predictorTerm); String text = getFileContent(TEST_07_SOURCE); Expression expected = JavaParserUtils.parseExpression(String.format(text, fieldRef, coefficient)); assertThat(JavaParserUtils.equalsNode(expected, retrieved)).isTrue(); }