focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static TableUpdateArbitrator create(UpdateContext context) { String location = context.table.getTableLocation(); URI uri = URI.create(location); String scheme = Optional.ofNullable(uri.getScheme()).orElse("").toUpperCase(Locale.ROOT); if (!SUPPORTED_SCHEMA.contains(scheme)) { return null; } Pair key = Pair.create(context.table.getType(), scheme); TableUpdateArbitrator arbitrator = Preconditions.checkNotNull(TRAITS_TABLE.get(key), String.format("table type:%s, schema:%s not supported in update arbitrator's traits", context.table.getType(), uri.getScheme())).get(); arbitrator.table = context.table; arbitrator.partitionLimit = context.partitionLimit; arbitrator.partitionNames = context.partitionNames; return arbitrator; }
@Test public void testHiveOnOss() { String location = "oss://bucket_name/lineorder_part"; HiveTable hiveTable = createHiveTable(location); List<String> partitionNames = Lists.newArrayList(); TableUpdateArbitrator.UpdateContext updateContext = new TableUpdateArbitrator.UpdateContext(hiveTable, -1, partitionNames); TableUpdateArbitrator arbitrator = TableUpdateArbitrator.create(updateContext); Assert.assertTrue(arbitrator instanceof ObjectBasedUpdateArbitrator); }
protected boolean addIfService(Object object) { if (object instanceof Service) { addService((Service) object); return true; } else { return false; } }
@Test (timeout = 10000) public void testAddIfService() { CompositeService testService = new CompositeService("TestService") { Service service; @Override public void serviceInit(Configuration conf) { Integer notAService = new Integer(0); assertFalse("Added an integer as a service", addIfService(notAService)); service = new AbstractService("Service") {}; assertTrue("Unable to add a service", addIfService(service)); } }; testService.init(new Configuration()); assertEquals("Incorrect number of services", 1, testService.getServices().size()); }
@Override public Map<String, Set<Integer>> assignBrokerSetsForUnresolvedBrokers(final ClusterModel clusterModel, final Map<String, Set<Integer>> existingBrokerSetMapping) { Set<Broker> allMappedBrokers = existingBrokerSetMapping.values() .stream() .flatMap(brokerIds -> brokerIds.stream()) .map(brokerId -> clusterModel.broker(brokerId)) .filter(Objects::nonNull) .collect(Collectors.toSet()); Set<Broker> extraBrokersInClusterModel = new HashSet<>(clusterModel.brokers()); extraBrokersInClusterModel.removeAll(allMappedBrokers); int numberOfBrokerSets = existingBrokerSetMapping.size(); List<String> brokerSetIds = new ArrayList<>(existingBrokerSetMapping.keySet()); Collections.sort(brokerSetIds); extraBrokersInClusterModel.stream().forEach(broker -> { String brokerSet = brokerSetIds.get(broker.id() % numberOfBrokerSets); Set<Integer> brokerIdsForBrokerSet = existingBrokerSetMapping.getOrDefault(brokerSet, new HashSet<>()); brokerIdsForBrokerSet.add(broker.id()); existingBrokerSetMapping.put(brokerSet, brokerIdsForBrokerSet); }); return existingBrokerSetMapping; }
@Test public void testBrokerSetAssignment() { // The cluster model has 6 brokers - 0,1,2,3,4,5 Map<Integer, String> brokers = Map.of(0, "", 1, "", 2, "", 3, "", 4, "", 5, ""); Map<String, Set<Integer>> mappedBrokers = new HashMap<>(); mappedBrokers.put("bs1", new HashSet<>(Arrays.asList(0))); mappedBrokers.put("bs2", new HashSet<>(Arrays.asList(1))); Map<String, Set<Integer>> mappedBrokersAfterAssignment = MODULO_BASED_BROKER_SET_ASSIGNMENT_POLICY.assignBrokerSetsForUnresolvedBrokers(brokers, mappedBrokers); assertNotNull(mappedBrokersAfterAssignment); assertEquals(mappedBrokers.get("bs1"), Set.of(0, 2, 4)); assertEquals(mappedBrokers.get("bs2"), Set.of(1, 3, 5)); }
@Override public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) throws SubCommandException { DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook); defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis())); try { defaultMQAdminExt.start(); String brokerAddr = commandLine.getOptionValue('b').trim(); ProducerTableInfo cc = defaultMQAdminExt.getAllProducerInfo(brokerAddr); if (cc != null && cc.getData() != null && !cc.getData().isEmpty()) { for (String group : cc.getData().keySet()) { List<ProducerInfo> list = cc.getData().get(group); if (list == null || list.isEmpty()) { System.out.printf("producer group (%s) instances are empty\n", group); continue; } for (ProducerInfo producer : list) { System.out.printf("producer group (%s) instance : %s\n", group, producer.toString()); } } } } catch (Exception e) { throw new SubCommandException(this.getClass().getSimpleName() + " command failed", e); } finally { defaultMQAdminExt.shutdown(); } }
@Test public void testExecute() throws SubCommandException { ProducerSubCommand cmd = new ProducerSubCommand(); Options options = ServerUtil.buildCommandlineOptions(new Options()); String[] subargs = new String[]{"-b 127.0.0.1:" + brokerMocker.listenPort()}; final CommandLine commandLine = ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs, cmd.buildCommandlineOptions(options), new DefaultParser()); cmd.execute(commandLine, options, null); }
@Override public CompletableFuture<RegistrationResponse> registerTaskExecutor( final TaskExecutorRegistration taskExecutorRegistration, final Time timeout) { CompletableFuture<TaskExecutorGateway> taskExecutorGatewayFuture = getRpcService() .connect( taskExecutorRegistration.getTaskExecutorAddress(), TaskExecutorGateway.class); taskExecutorGatewayFutures.put( taskExecutorRegistration.getResourceId(), taskExecutorGatewayFuture); return taskExecutorGatewayFuture.handleAsync( (TaskExecutorGateway taskExecutorGateway, Throwable throwable) -> { final ResourceID resourceId = taskExecutorRegistration.getResourceId(); if (taskExecutorGatewayFuture == taskExecutorGatewayFutures.get(resourceId)) { taskExecutorGatewayFutures.remove(resourceId); if (throwable != null) { return new RegistrationResponse.Failure(throwable); } else { return registerTaskExecutorInternal( taskExecutorGateway, taskExecutorRegistration); } } else { log.debug( "Ignoring outdated TaskExecutorGateway connection for {}.", resourceId.getStringWithMetadata()); return new RegistrationResponse.Failure( new FlinkException("Decline outdated task executor registration.")); } }, getMainThreadExecutor()); }
@Test void testHeartbeatTimeoutWithTaskExecutor() throws Exception { final ResourceID taskExecutorId = ResourceID.generate(); final CompletableFuture<ResourceID> heartbeatRequestFuture = new CompletableFuture<>(); final CompletableFuture<Exception> disconnectFuture = new CompletableFuture<>(); final CompletableFuture<ResourceID> stopWorkerFuture = new CompletableFuture<>(); final TaskExecutorGateway taskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setDisconnectResourceManagerConsumer(disconnectFuture::complete) .setHeartbeatResourceManagerFunction( resourceId -> { heartbeatRequestFuture.complete(resourceId); return FutureUtils.completedVoidFuture(); }) .createTestingTaskExecutorGateway(); rpcService.registerGateway(taskExecutorGateway.getAddress(), taskExecutorGateway); runHeartbeatTimeoutTest( builder -> builder.withStopWorkerConsumer(stopWorkerFuture::complete), resourceManagerGateway -> registerTaskExecutor( resourceManagerGateway, taskExecutorId, taskExecutorGateway.getAddress()), resourceManagerResourceId -> { // might have been completed or not depending whether the timeout was triggered // first final ResourceID optionalHeartbeatRequestOrigin = heartbeatRequestFuture.getNow(null); assertThat(optionalHeartbeatRequestOrigin) .satisfiesAnyOf( resourceID -> assertThat(resourceID) .isEqualTo(resourceManagerResourceId), resourceID -> assertThat(resourceID).isNull()); assertThatFuture(disconnectFuture) .eventuallySucceeds() .isInstanceOf(TimeoutException.class); assertThatFuture(stopWorkerFuture) .eventuallySucceeds() .isEqualTo(taskExecutorId); }); }
@Override public void destroy() { decoder.destroy(); }
@Test public void testNotLeak() { final FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/", Unpooled.copiedBuffer("a=1&=2&b=3", CharsetUtil.US_ASCII)); try { assertThrows(HttpPostRequestDecoder.ErrorDataDecoderException.class, new Executable() { @Override public void execute() { new HttpPostStandardRequestDecoder(request).destroy(); } }); } finally { assertTrue(request.release()); } }
static <T> int[] getNullIndexes(T[] inputs) { int[] nullIndexes = new int[inputs.length]; int idx = 0; for (int i = 0; i < inputs.length; i++) { if (inputs[i] == null) { nullIndexes[idx++] = i; } } return Arrays.copyOf(nullIndexes, idx); }
@Test public void testGetNullIndexes() { byte[][] inputs = new byte[numInputs][]; inputs[0] = new byte[chunkSize]; inputs[1] = new byte[chunkSize]; for (int i = 2; i < 7; i++) { inputs[i] = null; } inputs[7] = new byte[chunkSize]; inputs[8] = new byte[chunkSize]; int[] nullIndexes = CoderUtil.getNullIndexes(inputs); assertEquals(2, nullIndexes[0]); assertEquals(3, nullIndexes[1]); assertEquals(4, nullIndexes[2]); assertEquals(5, nullIndexes[3]); assertEquals(6, nullIndexes[4]); }
public List<ExecutionVertexID> getExecutionVertexIds() { return executionVertexIds; }
@Test void testSerializeExecutionVertexResetEvent() throws IOException { JobVertexID jobVertexId = new JobVertexID(); ExecutionVertexID executionVertexId1 = new ExecutionVertexID(jobVertexId, 0); ExecutionVertexID executionVertexId2 = new ExecutionVertexID(jobVertexId, 1); ExecutionVertexID executionVertexId3 = new ExecutionVertexID(jobVertexId, 2); ExecutionVertexResetEvent event = new ExecutionVertexResetEvent( Arrays.asList(executionVertexId1, executionVertexId2, executionVertexId3)); final GenericJobEventSerializer serializer = new GenericJobEventSerializer(); byte[] binary = serializer.serialize(event); ExecutionVertexResetEvent deserializeEvent = (ExecutionVertexResetEvent) serializer.deserialize(serializer.getVersion(), binary); assertThat(event.getExecutionVertexIds().equals(deserializeEvent.getExecutionVertexIds())) .isTrue(); }
public static Ip4Address valueOf(int value) { byte[] bytes = ByteBuffer.allocate(INET_BYTE_LENGTH).putInt(value).array(); return new Ip4Address(bytes); }
@Test(expected = IllegalArgumentException.class) public void testInvalidValueOfShortArrayIPv4() { Ip4Address ipAddress; byte[] value; value = new byte[] {1, 2, 3}; ipAddress = Ip4Address.valueOf(value); }
@SuppressWarnings("MethodLength") public void onFragment(final DirectBuffer buffer, final int offset, final int length, final Header header) { messageHeaderDecoder.wrap(buffer, offset); final int templateId = messageHeaderDecoder.templateId(); final int schemaId = messageHeaderDecoder.schemaId(); if (schemaId != MessageHeaderDecoder.SCHEMA_ID) { if (listenerExtension != null) { listenerExtension.onExtensionMessage( messageHeaderDecoder.blockLength(), templateId, schemaId, messageHeaderDecoder.version(), buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, length - MessageHeaderDecoder.ENCODED_LENGTH); return; } throw new ClusterException("expected schemaId=" + MessageHeaderDecoder.SCHEMA_ID + ", actual=" + schemaId); } switch (templateId) { case SessionMessageHeaderDecoder.TEMPLATE_ID: { sessionMessageHeaderDecoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, messageHeaderDecoder.blockLength(), messageHeaderDecoder.version()); final long sessionId = sessionMessageHeaderDecoder.clusterSessionId(); if (sessionId == clusterSessionId) { listener.onMessage( sessionId, sessionMessageHeaderDecoder.timestamp(), buffer, offset + SESSION_HEADER_LENGTH, length - SESSION_HEADER_LENGTH, header); } break; } case SessionEventDecoder.TEMPLATE_ID: { sessionEventDecoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, messageHeaderDecoder.blockLength(), messageHeaderDecoder.version()); final long sessionId = sessionEventDecoder.clusterSessionId(); if (sessionId == clusterSessionId) { listener.onSessionEvent( sessionEventDecoder.correlationId(), sessionId, sessionEventDecoder.leadershipTermId(), sessionEventDecoder.leaderMemberId(), sessionEventDecoder.code(), sessionEventDecoder.detail()); } break; } case NewLeaderEventDecoder.TEMPLATE_ID: { newLeaderEventDecoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, messageHeaderDecoder.blockLength(), messageHeaderDecoder.version()); final long sessionId = newLeaderEventDecoder.clusterSessionId(); if (sessionId == clusterSessionId) { listener.onNewLeader( sessionId, newLeaderEventDecoder.leadershipTermId(), newLeaderEventDecoder.leaderMemberId(), newLeaderEventDecoder.ingressEndpoints()); } break; } case AdminResponseDecoder.TEMPLATE_ID: { adminResponseDecoder.wrap( buffer, offset + MessageHeaderDecoder.ENCODED_LENGTH, messageHeaderDecoder.blockLength(), messageHeaderDecoder.version()); final long sessionId = adminResponseDecoder.clusterSessionId(); if (sessionId == clusterSessionId) { final long correlationId = adminResponseDecoder.correlationId(); final AdminRequestType requestType = adminResponseDecoder.requestType(); final AdminResponseCode responseCode = adminResponseDecoder.responseCode(); final String message = adminResponseDecoder.message(); final int payloadOffset = adminResponseDecoder.offset() + AdminResponseDecoder.BLOCK_LENGTH + AdminResponseDecoder.messageHeaderLength() + message.length() + AdminResponseDecoder.payloadHeaderLength(); final int payloadLength = adminResponseDecoder.payloadLength(); listener.onAdminResponse( sessionId, correlationId, requestType, responseCode, message, buffer, payloadOffset, payloadLength); } break; } default: break; } }
@Test void onFragmentIsANoOpIfSessionIdDoesNotMatchOnSessionEvent() { final int offset = 8; final long clusterSessionId = 42; final long correlationId = 777; final long leadershipTermId = 6; final int leaderMemberId = 3; final EventCode eventCode = EventCode.REDIRECT; final int version = 18; final String eventDetail = "Event details"; sessionEventEncoder .wrapAndApplyHeader(buffer, offset, messageHeaderEncoder) .clusterSessionId(clusterSessionId) .correlationId(correlationId) .leadershipTermId(leadershipTermId) .leaderMemberId(leaderMemberId) .code(eventCode) .version(version) .detail(eventDetail); final EgressListener egressListener = mock(EgressListener.class); final Header header = new Header(0, 0); final EgressAdapter adapter = new EgressAdapter( egressListener, clusterSessionId + 1, mock(Subscription.class), 3); adapter.onFragment(buffer, offset, sessionEventEncoder.encodedLength(), header); verifyNoInteractions(egressListener); }
@ExecuteOn(TaskExecutors.IO) @Post(uri = "/trigger/{namespace}/{id}", consumes = MediaType.MULTIPART_FORM_DATA) @Operation(tags = {"Executions"}, summary = "Trigger a new execution for a flow") @ApiResponse(responseCode = "409", description = "if the flow is disabled") @SingleResult @Deprecated public Publisher<Execution> trigger( @Parameter(description = "The flow namespace") @PathVariable String namespace, @Parameter(description = "The flow id") @Nullable @PathVariable String id, @Parameter(description = "The inputs") @Nullable @Body MultipartBody inputs, @Parameter(description = "The labels as a list of 'key:value'") @Nullable @QueryValue @Format("MULTI") List<String> labels, @Parameter(description = "If the server will wait the end of the execution") @QueryValue(defaultValue = "false") Boolean wait, @Parameter(description = "The flow revision or latest if null") @QueryValue Optional<Integer> revision ) throws IOException { return this.create(namespace, id, inputs, labels, wait, revision); }
@Test void trigger() { Execution result = triggerInputsFlowExecution(false); assertThat(result.getState().getCurrent(), is(State.Type.CREATED)); assertThat(result.getFlowId(), is("inputs")); assertThat(result.getInputs().get("float"), is(42.42)); assertThat(result.getInputs().get("file").toString(), startsWith("kestra:///io/kestra/tests/inputs/executions/")); assertThat(result.getInputs().get("file").toString(), startsWith("kestra:///io/kestra/tests/inputs/executions/")); assertThat(result.getInputs().containsKey("bool"), is(true)); assertThat(result.getInputs().get("bool"), nullValue()); assertThat(result.getLabels().size(), is(5)); assertThat(result.getLabels().getFirst(), is(new Label("flow-label-1", "flow-label-1"))); assertThat(result.getLabels().get(1), is(new Label("flow-label-2", "flow-label-2"))); assertThat(result.getLabels().get(2), is(new Label("a", "label-1"))); assertThat(result.getLabels().get(3), is(new Label("b", "label-2"))); assertThat(result.getLabels().get(4), is(new Label("url", URL_LABEL_VALUE))); var notFound = assertThrows(HttpClientResponseException.class, () -> client.toBlocking().exchange( HttpRequest .POST("/api/v1/executions/foo/bar", createInputsFlowBody()) .contentType(MediaType.MULTIPART_FORM_DATA_TYPE), HttpResponse.class )); assertThat(notFound.getStatus(), is(HttpStatus.NOT_FOUND)); }
public boolean containServiceMetadata(Service service) { return serviceMetadataMap.containsKey(service); }
@Test void testContainServiceMetadata() { boolean result = namingMetadataManager.containServiceMetadata(service); assertTrue(result); }
@Override public String doLayout(ILoggingEvent event) { StringWriter output = new StringWriter(); try (JsonWriter json = new JsonWriter(output)) { json.beginObject(); if (!"".equals(nodeName)) { json.name("nodename").value(nodeName); } json.name("process").value(processKey); for (Map.Entry<String, String> entry : event.getMDCPropertyMap().entrySet()) { if (entry.getValue() != null && !exclusions.contains(entry.getKey())) { json.name(entry.getKey()).value(entry.getValue()); } } json .name("timestamp").value(DATE_FORMATTER.format(Instant.ofEpochMilli(event.getTimeStamp()))) .name("severity").value(event.getLevel().toString()) .name("logger").value(event.getLoggerName()) .name("message").value(NEWLINE_REGEXP.matcher(event.getFormattedMessage()).replaceAll("\r")); IThrowableProxy tp = event.getThrowableProxy(); if (tp != null) { json.name("stacktrace").beginArray(); int nbOfTabs = 0; while (tp != null) { printFirstLine(json, tp, nbOfTabs); render(json, tp, nbOfTabs); tp = tp.getCause(); nbOfTabs++; } json.endArray(); } json.endObject(); } catch (Exception e) { e.printStackTrace(); throw new IllegalStateException("BUG - fail to create JSON", e); } output.write(System.lineSeparator()); return output.toString(); }
@Test public void test_log_with_message_arguments() { LoggingEvent event = new LoggingEvent("org.foundation.Caller", (Logger) LoggerFactory.getLogger("the.logger"), Level.WARN, "the {}", null, new Object[] {"message"}); String log = underTest.doLayout(event); JsonLog json = new Gson().fromJson(log, JsonLog.class); assertThat(json.process).isEqualTo("web"); assertThat(json.timestamp).isEqualTo(DATE_FORMATTER.format(Instant.ofEpochMilli(event.getTimeStamp()))); assertThat(json.severity).isEqualTo("WARN"); assertThat(json.logger).isEqualTo("the.logger"); assertThat(json.message).isEqualTo("the message"); assertThat(json.stacktrace).isNull(); assertThat(json.fromMdc).isNull(); }
protected Object updateSchemaIn(Object keyOrValue, Schema updatedSchema) { if (keyOrValue instanceof Struct) { Struct origStruct = (Struct) keyOrValue; Struct newStruct = new Struct(updatedSchema); for (Field field : updatedSchema.fields()) { // assume both schemas have exact same fields with same names and schemas ... newStruct.put(field, getFieldValue(origStruct, field)); } return newStruct; } return keyOrValue; }
@Test public void updateSchemaOfNull() { Object updatedValue = xform.updateSchemaIn(null, Schema.INT32_SCHEMA); assertNull(updatedValue); }
@Override public boolean equals(Object obj) { if (obj == null || !(obj instanceof VulnerableSoftware)) { return false; } if (this == obj) { return true; } final VulnerableSoftware rhs = (VulnerableSoftware) obj; return new EqualsBuilder() .appendSuper(super.equals(obj)) .append(versionEndExcluding, rhs.versionEndExcluding) .append(versionEndIncluding, rhs.versionEndIncluding) .append(versionStartExcluding, rhs.versionStartExcluding) .append(versionStartIncluding, rhs.versionStartIncluding) .isEquals(); }
@Test public void testEquals() throws CpeValidationException { VulnerableSoftwareBuilder builder = new VulnerableSoftwareBuilder(); VulnerableSoftware obj = null; VulnerableSoftware instance = builder.part(Part.APPLICATION).vendor("mortbay").product("jetty").version("6.1").build(); assertFalse(instance.equals(obj)); obj = builder.part(Part.APPLICATION).vendor("mortbay").product("jetty").version("6.1.0").build(); instance = builder.part(Part.APPLICATION).vendor("mortbay").product("jetty").version("6.1").build(); assertFalse(instance.equals(obj)); obj = builder.part(Part.APPLICATION).vendor("mortbay").product("jetty").version("6.1.0").build(); instance = builder.part(Part.APPLICATION).vendor("mortbay").product("jetty").version("6.1.0").build(); assertTrue(instance.equals(obj)); }
public static void getSemanticPropsSingleFromString( SingleInputSemanticProperties result, String[] forwarded, String[] nonForwarded, String[] readSet, TypeInformation<?> inType, TypeInformation<?> outType) { getSemanticPropsSingleFromString( result, forwarded, nonForwarded, readSet, inType, outType, false); }
@Test void testNonForwardedNone() { String[] nonForwardedFields = {""}; SingleInputSemanticProperties sp = new SingleInputSemanticProperties(); SemanticPropUtil.getSemanticPropsSingleFromString( sp, null, nonForwardedFields, null, threeIntTupleType, threeIntTupleType); assertThat(sp.getForwardingTargetFields(0, 0)).contains(0); assertThat(sp.getForwardingTargetFields(0, 1)).contains(1); assertThat(sp.getForwardingTargetFields(0, 2)).contains(2); }
public void setExposureConfig(SAExposureConfig exposureConfig) { this.exposureConfig = exposureConfig; }
@Test public void setExposureConfig() { SAExposureData exposureData = new SAExposureData("ExposeEvent"); exposureData.setExposureConfig(new SAExposureConfig(1, 1,false)); Assert.assertNotNull(exposureData.getExposureConfig()); }
@CheckForNull public String get() { // branches will be empty in CE if (branchConfiguration.isPullRequest() || branches.isEmpty()) { return null; } return Optional.ofNullable(getFromProperties()).orElseGet(this::loadWs); }
@Test public void get_returns_null_if_pull_request() { when(branchConfiguration.isPullRequest()).thenReturn(true); assertThat(referenceBranchSupplier.get()).isNull(); verify(branchConfiguration).isPullRequest(); verifyNoInteractions(newCodePeriodLoader); verifyNoMoreInteractions(branchConfiguration); }
public void removeDataByIndex(int index) { scesimData.remove(index); }
@Test public void removeDataByIndex() { final Scenario dataByIndex = model.getDataByIndex(3); model.removeDataByIndex(3); assertThat(model.scesimData).hasSize(SCENARIO_DATA - 1).doesNotContain(dataByIndex); }
public String substring(final int beginIndex) { split(); final int beginChar = splitted.get(beginIndex); return input.substring(beginChar); }
@Test public void testSubstringGrapheme2() { final UnicodeHelper lh = new UnicodeHelper("a", Method.GRAPHEME); assertEquals("a", lh.substring(0, 1)); final UnicodeHelper lh2 = new UnicodeHelper(new String(Character.toChars(0x1f600)), Method.GRAPHEME); assertEquals(new String(Character.toChars(0x1f600)), lh2.substring(0, 1)); final UnicodeHelper lh3 = new UnicodeHelper(UCSTR, Method.GRAPHEME); assertEquals(UCSTR, lh3.substring(0, 1)); final UnicodeHelper lh4 = new UnicodeHelper("a" + UCSTR + "A", Method.GRAPHEME); assertEquals("a", lh4.substring(0, 1)); assertEquals(UCSTR, lh4.substring(1, 2)); assertEquals("A", lh4.substring(2, 3)); assertEquals("a" + UCSTR, lh4.substring(0, 2)); final UnicodeHelper lh5 = new UnicodeHelper("k\u035fh", Method.GRAPHEME); assertEquals("k\u035f", lh5.substring(0, 1)); assertEquals("h", lh5.substring(1, 2)); }
public String getName() { return name; }
@Test public void getServiceByName() { Optional<Service> serviceOptional = Optional.of(newService()); when(serviceRepositoryMock.findByName(anyString())).thenReturn(serviceOptional); Service result = serviceServiceMock.getServiceByName("service"); verify(serviceRepositoryMock, times(1)).findByName(anyString()); assertEquals(serviceOptional.get().getName(), result.getName()); }
@Override public void run() { SecurityManager sm = System.getSecurityManager(); if (!(sm instanceof SelSecurityManager)) { throw new IllegalStateException("Invalid security manager: " + sm); } Thread.currentThread().setContextClassLoader(this.classLoader); ((SelSecurityManager) sm).setAccessControl(this.acc); super.run(); }
@Test(expected = IllegalStateException.class) public void testRunWithoutSecurityManager() { t1.run(); }
public static CommonCertificateVerifier getVerifier(boolean isIndividual) { if (isIndividual) { if (individualVerifier == null) { individualVerifier = loadClass(System.getProperty("INDIVIDUAL_CERT_VERIFIER_CLASS_NAME")); } return individualVerifier; } else { if (corporateVerifier == null) { corporateVerifier = loadClass(System.getProperty("CORPORATE_CERT_VERIFIER_CLASS_NAME")); } return corporateVerifier; } }
@Test public void testGetVerifierReturnsSameInstance() throws Exception { Field singleton = CertificateVerifierHolder.class.getDeclaredField("individualVerifier"); singleton.setAccessible(true); singleton.set(null, null); String verifierClassName = CustomIndividualCertificateVerifier.class.getName(); System.setProperty("INDIVIDUAL_CERT_VERIFIER_CLASS_NAME", verifierClassName); CommonCertificateVerifier verifier = CertificateVerifierHolder.getVerifier(true); CommonCertificateVerifier anotherVerifier = CertificateVerifierHolder.getVerifier(true); assertThat(verifier).isEqualTo(anotherVerifier); assertThat(verifier).isInstanceOf(CustomIndividualCertificateVerifier.class); assertThat(anotherVerifier).isInstanceOf(CustomIndividualCertificateVerifier.class); }
protected static void parseSingleParam(String single, CommandRequest request) { if (single == null || single.length() < 3) { return; } int index = single.indexOf('='); if (index <= 0 || index >= single.length() - 1) { // empty key/val or nothing found return; } String value = StringUtil.trim(single.substring(index + 1)); String key = StringUtil.trim(single.substring(0, index)); try { key = URLDecoder.decode(key, SentinelConfig.charset()); value = URLDecoder.decode(value, SentinelConfig.charset()); } catch (UnsupportedEncodingException e) { } request.addParam(key, value); }
@Test public void parseSingleParam() { CommandRequest request; request = new CommandRequest(); HttpEventTask.parseSingleParam(null, request); assertEquals(0, request.getParameters().size()); request = new CommandRequest(); HttpEventTask.parseSingleParam("", request); assertEquals(0, request.getParameters().size()); request = new CommandRequest(); HttpEventTask.parseSingleParam("a", request); assertEquals(0, request.getParameters().size()); request = new CommandRequest(); HttpEventTask.parseSingleParam("=", request); assertEquals(0, request.getParameters().size()); request = new CommandRequest(); HttpEventTask.parseSingleParam("a=", request); assertEquals(0, request.getParameters().size()); request = new CommandRequest(); HttpEventTask.parseSingleParam("=a", request); assertEquals(0, request.getParameters().size()); request = new CommandRequest(); HttpEventTask.parseSingleParam("test=", request); assertEquals(0, request.getParameters().size()); request = new CommandRequest(); HttpEventTask.parseSingleParam("=test", request); assertEquals(0, request.getParameters().size()); request = new CommandRequest(); HttpEventTask.parseSingleParam("a=1", request); assertEquals(1, request.getParameters().size()); assertEquals("1", request.getParam("a")); request = new CommandRequest(); HttpEventTask.parseSingleParam("a_+=1+", request); assertEquals(1, request.getParameters().size()); assertEquals("1 ", request.getParam("a_ ")); }
@Override public Processor<KIn, VIn, KIn, Change<VIn>> get() { return new KTableSourceProcessor(); }
@Test public void testValueGetter() { final StreamsBuilder builder = new StreamsBuilder(); final String topic1 = "topic1"; @SuppressWarnings("unchecked") final KTableImpl<String, String, String> table1 = (KTableImpl<String, String, String>) builder.table(topic1, stringConsumed, Materialized.as("store")); final Topology topology = builder.build(); final KTableValueGetterSupplier<String, String> getterSupplier1 = table1.valueGetterSupplier(); final InternalTopologyBuilder topologyBuilder = TopologyWrapper.getInternalTopologyBuilder(topology); topologyBuilder.connectProcessorAndStateStores(table1.name, getterSupplier1.storeNames()); try (final TopologyTestDriverWrapper driver = new TopologyTestDriverWrapper(builder.build(), props)) { final TestInputTopic<String, String> inputTopic1 = driver.createInputTopic( topic1, new StringSerializer(), new StringSerializer(), Instant.ofEpochMilli(0L), Duration.ZERO ); final KTableValueGetter<String, String> getter1 = getterSupplier1.get(); getter1.init(driver.setCurrentNodeForProcessorContext(table1.name)); inputTopic1.pipeInput("A", "01", 10L); inputTopic1.pipeInput("B", "01", 20L); inputTopic1.pipeInput("C", "01", 15L); assertEquals(ValueAndTimestamp.make("01", 10L), getter1.get("A")); assertEquals(ValueAndTimestamp.make("01", 20L), getter1.get("B")); assertEquals(ValueAndTimestamp.make("01", 15L), getter1.get("C")); inputTopic1.pipeInput("A", "02", 30L); inputTopic1.pipeInput("B", "02", 5L); assertEquals(ValueAndTimestamp.make("02", 30L), getter1.get("A")); assertEquals(ValueAndTimestamp.make("02", 5L), getter1.get("B")); assertEquals(ValueAndTimestamp.make("01", 15L), getter1.get("C")); inputTopic1.pipeInput("A", "03", 29L); assertEquals(ValueAndTimestamp.make("03", 29L), getter1.get("A")); assertEquals(ValueAndTimestamp.make("02", 5L), getter1.get("B")); assertEquals(ValueAndTimestamp.make("01", 15L), getter1.get("C")); inputTopic1.pipeInput("A", null, 50L); inputTopic1.pipeInput("B", null, 3L); assertNull(getter1.get("A")); assertNull(getter1.get("B")); assertEquals(ValueAndTimestamp.make("01", 15L), getter1.get("C")); } }
public static SerializableFunction<BigDecimal, ? extends Number> forSqlType(TypeName typeName) { if (!CONVERTER_MAP.containsKey(typeName)) { throw new UnsupportedOperationException( "Conversion from " + typeName + " to BigDecimal is not supported"); } return CONVERTER_MAP.get(typeName); }
@Test public void testReturnsConverterForNumericTypes() { for (TypeName numericType : TypeName.NUMERIC_TYPES) { SerializableFunction<BigDecimal, ? extends Number> converter = BigDecimalConverter.forSqlType(numericType); assertNotNull(converter); assertNotNull(converter.apply(BigDecimal.TEN)); } }
public int getNumberOfDecorators() { return numberOfDecorators; }
@Test public void decoratorsCounts() { for (int i = 0; i < decoratorsLines.length; i++) { Decorators decorators = new Decorators(decoratorsLines[i]); assertEquals(i, decorators.getNumberOfDecorators()); } }
public UnitOfWorkAspect newAspect() { return new UnitOfWorkAspect(sessionFactories); }
@Test void testNewAspect() { final UnitOfWorkAwareProxyFactory unitOfWorkAwareProxyFactory = new UnitOfWorkAwareProxyFactory("default", sessionFactory); Map<String, SessionFactory> sessionFactories = Collections.singletonMap("default", sessionFactory); UnitOfWorkAspect aspect1 = unitOfWorkAwareProxyFactory.newAspect(sessionFactories); UnitOfWorkAspect aspect2 = unitOfWorkAwareProxyFactory.newAspect(sessionFactories); assertThat(aspect1).isNotSameAs(aspect2); }
@Override public double estimateRowCount(RelMetadataQuery mq) { BeamTableStatistics rowCountStatistics = calciteTable.getStatistic(); if (beamTable.isBounded() == PCollection.IsBounded.BOUNDED) { return rowCountStatistics.getRowCount(); } else { return rowCountStatistics.getRate(); } }
@Test public void unboundedRowCount() { String sql = "SELECT * FROM ORDER_DETAILS_UNBOUNDED"; RelNode root = env.parseQuery(sql); while (!(root instanceof BeamIOSourceRel)) { root = root.getInput(0); } Assert.assertEquals(2d, root.estimateRowCount(RelMetadataQuery.instance()), 0.001); }
public static int interpolationSearch(int[] a, int fromIndex, int toIndex, int key) { int low = fromIndex; int lowVal = a[low]; if (key - lowVal < LINEAR_SEARCH_THRESHOLD_2) { return linearSearch(a, low, toIndex, key); } int high = toIndex - 1; int diff = high - low; if (diff <= BINARY_SEARCH_THRESHOLD) { return binarySearch(a, low, toIndex, key); } int highVal = a[high]; do { if (key == lowVal) { return low + 1; } if (key >= highVal) { return high + 1; } int mean = (int) (diff * (long) (key - lowVal) / (highVal - lowVal)); int eps = diff >>> 4; int lowMid = low + Math.max(0, mean - eps); int highMid = low + Math.min(diff, mean + eps); assert lowMid <= highMid; assert lowMid >= low; assert highMid <= high; if (a[lowMid] > key) { high = lowMid; highVal = a[lowMid]; } else if (a[highMid] <= key) { low = highMid; lowVal = a[highMid]; } else { low = lowMid; lowVal = a[lowMid]; high = highMid; highVal = a[highMid]; } assert low <= high; diff = high - low; } while (diff >= BINARY_SEARCH_THRESHOLD); return binarySearch(a, low, high + 1, key); }
@Test void require_that_search_is_correct_for_large_arrays() { int length = 10000; int[] values = new int[length]; for (int i = 0; i < length; i++) { values[i] = 2 * i; } assertEquals(1, PostingListSearch.interpolationSearch(values, 1, length, 0)); assertEquals(1227, PostingListSearch.interpolationSearch(values, 1, length, 2452)); assertEquals(1227, PostingListSearch.interpolationSearch(values, 1, length, 2453)); assertEquals(1228, PostingListSearch.interpolationSearch(values, 1, length, 2454)); }
@Override public long getLen() { return myFs.getLen(); }
@Test public void testFileStatusSerialziation() throws IOException, URISyntaxException { String testfilename = "testFileStatusSerialziation"; TEST_DIR.mkdirs(); File infile = new File(TEST_DIR, testfilename); final byte[] content = "dingos".getBytes(); try (FileOutputStream fos = new FileOutputStream(infile)) { fos.write(content); } assertEquals((long)content.length, infile.length()); Configuration conf = new Configuration(); ConfigUtil.addLink(conf, "/foo/bar/baz", TEST_DIR.toURI()); try (FileSystem vfs = FileSystem.get(FsConstants.VIEWFS_URI, conf)) { assertEquals(ViewFileSystem.class, vfs.getClass()); Path path = new Path("/foo/bar/baz", testfilename); FileStatus stat = vfs.getFileStatus(path); assertEquals(content.length, stat.getLen()); ContractTestUtils.assertNotErasureCoded(vfs, path); assertTrue(path + " should have erasure coding unset in " + "FileStatus#toString(): " + stat, stat.toString().contains("isErasureCoded=false")); // check serialization/deserialization DataOutputBuffer dob = new DataOutputBuffer(); stat.write(dob); DataInputBuffer dib = new DataInputBuffer(); dib.reset(dob.getData(), 0, dob.getLength()); FileStatus deSer = new FileStatus(); deSer.readFields(dib); assertEquals(content.length, deSer.getLen()); assertFalse(deSer.isErasureCoded()); } }
public ConfigResponse getConfig() throws SharedServiceClientException { ConfigResponse configResponse = new ConfigResponse(); configResponse.setDigidAppSwitchEnabled(switchService.digidAppSwitchEnabled()); configResponse.setDigidRdaEnabled(switchService.digidRdaSwitchEnabled()); configResponse.setRequestStationEnabled(switchService.digidRequestStationEnabled()); configResponse.setEhaEnabled(switchService.digidEhaEnabled()); configResponse.setLetterRequestDelay(sharedServiceClient.getSSConfigInt("snelheid_aanvragen_digid_app")); configResponse.setMaxPinChangePerDay(sharedServiceClient.getSSConfigInt("change_app_pin_maximum_per_day")); return configResponse; }
@Test void getConfigTest() throws SharedServiceClientException { when(sharedServiceClient.getSSConfigInt("change_app_pin_maximum_per_day")).thenReturn(2); when(sharedServiceClient.getSSConfigInt("snelheid_aanvragen_digid_app")).thenReturn(1); when(switchService.digidRdaSwitchEnabled()).thenReturn(true); when(switchService.digidRequestStationEnabled()).thenReturn(false); when(switchService.digidEhaEnabled()).thenReturn(true); ConfigResponse configResponse = configService.getConfig(); Assertions.assertEquals(1, configResponse.getLetterRequestDelay()); Assertions.assertEquals(2, configResponse.getMaxPinChangePerDay()); Assertions.assertTrue(configResponse.isEhaEnabled()); }
@Override public void verify(String value) { checkNotNull(value); if (!"false".equalsIgnoreCase(value) && !"true".equalsIgnoreCase(value)) { throw new RuntimeException("boolean attribute format is wrong."); } }
@Test public void testVerify_ValidValue_NoExceptionThrown() { booleanAttribute.verify("true"); booleanAttribute.verify("false"); }
@NonNull static String getImageUrl(List<FastDocumentFile> files, Uri folderUri) { // look for special file names for (String iconLocation : PREFERRED_FEED_IMAGE_FILENAMES) { for (FastDocumentFile file : files) { if (iconLocation.equals(file.getName())) { return file.getUri().toString(); } } } // use the first image in the folder if existing for (FastDocumentFile file : files) { String mime = file.getType(); if (mime != null && (mime.startsWith("image/jpeg") || mime.startsWith("image/png"))) { return file.getUri().toString(); } } // use default icon as fallback return Feed.PREFIX_GENERATIVE_COVER + folderUri; }
@Test public void testUpdateFeed_FeedIconDefault() { callUpdateFeed(LOCAL_FEED_DIR1); Feed feedAfter = verifySingleFeedInDatabase(); assertThat(feedAfter.getImageUrl(), startsWith(Feed.PREFIX_GENERATIVE_COVER)); }
@Override protected Mono<Void> doExecute(final ServerWebExchange exchange, final ShenyuPluginChain chain, final SelectorData selector, final RuleData rule) { RateLimiterHandle limiterHandle = RateLimiterPluginDataHandler.CACHED_HANDLE.get() .obtainHandle(CacheKeyUtils.INST.getKey(rule)); String resolverKey = Optional.ofNullable(limiterHandle.getKeyResolverName()) .flatMap(name -> Optional.of("-" + RateLimiterKeyResolverFactory.newInstance(name).resolve(exchange))) .orElse(""); return redisRateLimiter.isAllowed(rule.getId() + resolverKey, limiterHandle) .flatMap(response -> { if (!response.isAllowed()) { exchange.getResponse().setStatusCode(HttpStatus.TOO_MANY_REQUESTS); final Consumer<HttpStatusCode> consumer = exchange.getAttribute(Constants.METRICS_RATE_LIMITER); Optional.ofNullable(consumer).ifPresent(c -> c.accept(exchange.getResponse().getStatusCode())); Object error = ShenyuResultWrap.error(exchange, ShenyuResultEnum.TOO_MANY_REQUESTS); return WebFluxResultUtils.result(exchange, error); } return chain.execute(exchange).doFinally(signalType -> { RateLimiterAlgorithm<?> rateLimiterAlgorithm = RateLimiterAlgorithmFactory.newInstance(limiterHandle.getAlgorithmName()); rateLimiterAlgorithm.callback(rateLimiterAlgorithm.getScript(), response.getKeys(), null); }); }); }
@Test public void doExecuteAllowedTest() { doExecutePreInit(); when(redisRateLimiter.isAllowed(anyString(), any(RateLimiterHandle.class))).thenReturn( Mono.just(new RateLimiterResponse(true, 1, null))); Mono<Void> result = rateLimiterPlugin.doExecute(exchange, chain, selectorData, ruleData); StepVerifier.create(result).expectSubscription().verifyComplete(); }
public static ConfigModelId fromName(String tagName) { return new ConfigModelId(tagName, new Version(1)); }
@Test void test_equality() { ConfigModelId a1 = ConfigModelId.fromName("a"); ConfigModelId a2 = ConfigModelId.fromName("a"); ConfigModelId b = ConfigModelId.fromName("b"); assertEquals(a1, a2); assertEquals(a2, a1); assertNotEquals(a1, b); assertNotEquals(a2, b); assertNotEquals(b, a1); assertNotEquals(b, a2); assertEquals(a1, a1); assertEquals(a2, a2); assertEquals(b, b); }
public static Config getConfig( Configuration configuration, @Nullable HostAndPort externalAddress) { return getConfig( configuration, externalAddress, null, PekkoUtils.getForkJoinExecutorConfig( ActorSystemBootstrapTools.getForkJoinExecutorConfiguration(configuration))); }
@Test void getConfigSslEngineProviderWithoutCertFingerprint() { final Configuration configuration = new Configuration(); configuration.set(SecurityOptions.SSL_INTERNAL_ENABLED, true); final Config config = PekkoUtils.getConfig(configuration, new HostAndPort("localhost", 31337)); final Config sslConfig = config.getConfig("pekko.remote.classic.netty.ssl"); assertThat(sslConfig.getString("ssl-engine-provider")) .isEqualTo("org.apache.flink.runtime.rpc.pekko.CustomSSLEngineProvider"); assertThat(sslConfig.getStringList("security.cert-fingerprints")).isEmpty(); }
public Optional<String> reasonAllControllersZkMigrationNotReady( MetadataVersion metadataVersion, Map<Integer, ControllerRegistration> controllers ) { if (!metadataVersion.isMigrationSupported()) { return Optional.of("The metadata.version too low at " + metadataVersion); } else if (!metadataVersion.isControllerRegistrationSupported()) { return Optional.empty(); } for (int quorumNodeId : quorumNodeIds) { ControllerRegistration registration = controllers.get(quorumNodeId); if (registration == null) { return Optional.of("No registration found for controller " + quorumNodeId); } else if (!registration.zkMigrationReady()) { return Optional.of("Controller " + quorumNodeId + " has not enabled " + "zookeeper.metadata.migration.enable"); } } return Optional.empty(); }
@Test public void testZkMigrationNotReadyIfNotAllControllersRegistered() { assertEquals(Optional.of("No registration found for controller 0"), QUORUM_FEATURES.reasonAllControllersZkMigrationNotReady( MetadataVersion.IBP_3_7_IV0, Collections.emptyMap())); }
public boolean checkReadyToExecuteFast() { KafkaRoutineLoadJob kafkaRoutineLoadJob = (KafkaRoutineLoadJob) job; for (Map.Entry<Integer, Long> entry : partitionIdToOffset.entrySet()) { int partitionId = entry.getKey(); Long consumeOffset = entry.getValue(); Long localLatestOffset = kafkaRoutineLoadJob.getPartitionOffset(partitionId); // If any partition has newer data, the task should be scheduled. if (localLatestOffset != null && localLatestOffset > consumeOffset) { return true; } } return false; }
@Test public void testCheckReadyToExecuteFast() { KafkaRoutineLoadJob kafkaRoutineLoadJob = new KafkaRoutineLoadJob(); kafkaRoutineLoadJob.setPartitionOffset(0, 101); new MockUp<RoutineLoadMgr>() { @Mock public RoutineLoadJob getJob(long jobId) { return kafkaRoutineLoadJob; } }; Map<Integer, Long> offset1 = Maps.newHashMap(); offset1.put(0, 100L); KafkaTaskInfo kafkaTaskInfo = new KafkaTaskInfo(UUID.randomUUID(), kafkaRoutineLoadJob, System.currentTimeMillis(), System.currentTimeMillis(), offset1, Config.routine_load_task_timeout_second * 1000); Assert.assertTrue(kafkaTaskInfo.checkReadyToExecuteFast()); }
@Override public Set<String> getHandledCeTaskTypes() { return HANDLED_TYPES; }
@Test public void getHandledCeTaskTypes() { Assertions.assertThat(underTest.getHandledCeTaskTypes()).containsExactly(AUDIT_PURGE); }
@Override public void visit(Entry entry) { if(Boolean.FALSE.equals(entry.getAttribute("allowed"))) return; if (containsSubmenu(entry)) addSubmenu(entry); else addActionItem(entry); }
@Test public void createsMenuItemWithSelectableAction() { menuEntry.addChild(actionEntry); when(action.isSelectable()).thenReturn(true); new EntryAccessor().setComponent(menuEntry, menu); menuActionGroupBuilder.visit(actionEntry); JAutoCheckBoxMenuItem item = (JAutoCheckBoxMenuItem)new EntryAccessor().getComponent(actionEntry); final AccelerateableAction itemAction = (AccelerateableAction) item.getAction(); assertThat(itemAction.getOriginalAction(), CoreMatchers.<Action> equalTo(action)); assertThat(item.getParent(), CoreMatchers.equalTo(menu.getPopupMenu())); }
@Override public void execute(GraphModel graphModel) { Graph graph; if (isDirected) { graph = graphModel.getDirectedGraphVisible(); } else { graph = graphModel.getUndirectedGraphVisible(); } execute(graph); }
@Test public void testColumnCreation() { GraphModel graphModel = GraphGenerator.generateNullUndirectedGraph(1); GraphDistance d = new GraphDistance(); d.execute(graphModel); Assert.assertTrue(graphModel.getNodeTable().hasColumn(GraphDistance.BETWEENNESS)); Assert.assertTrue(graphModel.getNodeTable().hasColumn(GraphDistance.ECCENTRICITY)); Assert.assertTrue(graphModel.getNodeTable().hasColumn(GraphDistance.CLOSENESS)); Assert.assertTrue(graphModel.getNodeTable().hasColumn(GraphDistance.HARMONIC_CLOSENESS)); }
@Override public WindowStoreIterator<V> backwardFetch(final K key, final Instant timeFrom, final Instant timeTo) throws IllegalArgumentException { Objects.requireNonNull(key, "key can't be null"); final List<ReadOnlyWindowStore<K, V>> stores = provider.stores(storeName, windowStoreType); for (final ReadOnlyWindowStore<K, V> windowStore : stores) { try { final WindowStoreIterator<V> result = windowStore.backwardFetch(key, timeFrom, timeTo); if (!result.hasNext()) { result.close(); } else { return result; } } catch (final InvalidStateStoreException e) { throw new InvalidStateStoreException( "State store is not available anymore and may have been migrated to another instance; " + "please re-discover its location from the state metadata."); } } return KeyValueIterators.emptyWindowStoreIterator(); }
@Test public void emptyBackwardIteratorNextShouldThrowNoSuchElementException() { final StateStoreProvider storeProvider = mock(StateStoreProvider.class); when(storeProvider.stores(anyString(), any())).thenReturn(emptyList()); final CompositeReadOnlyWindowStore<Object, Object> store = new CompositeReadOnlyWindowStore<>( storeProvider, QueryableStoreTypes.windowStore(), "foo" ); try (final WindowStoreIterator<Object> windowStoreIterator = store.backwardFetch("key", ofEpochMilli(1), ofEpochMilli(10))) { assertThrows(NoSuchElementException.class, windowStoreIterator::next); } }
@Override public Path copy(final Path source, final Path target, final TransferStatus status, final ConnectionCallback callback, final StreamListener listener) throws BackgroundException { try { final B2FileResponse response = session.getClient().copyFile(fileid.getVersionId(source), fileid.getVersionId(containerService.getContainer(target)), containerService.getKey(target)); listener.sent(status.getLength()); fileid.cache(target, response.getFileId()); return target.withAttributes(new B2AttributesFinderFeature(session, fileid).toAttributes(response)); } catch(B2ApiException e) { throw new B2ExceptionMappingService(fileid).map("Cannot copy {0}", e, source); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Cannot copy {0}", e, source); } }
@Test public void testCopy() throws Exception { final B2VersionIdProvider fileid = new B2VersionIdProvider(session); final Path container = new Path("test-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); final String name = new AlphanumericRandomStringService().random(); final Path test = new B2TouchFeature(session, fileid).touch(new Path(container, name, EnumSet.of(Path.Type.file)), new TransferStatus()); assertTrue(new B2FindFeature(session, fileid).find(test)); final B2CopyFeature feature = new B2CopyFeature(session, fileid); assertThrows(UnsupportedException.class, () -> feature.preflight(container, test)); try { feature.preflight(container, test); } catch(UnsupportedException e) { assertEquals("Unsupported", e.getMessage()); assertEquals("Cannot copy test-cyberduck.", e.getDetail(false)); } final Path copy = feature.copy(test, new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus(), new DisabledConnectionCallback(), new DisabledStreamListener()); assertNotEquals(test.attributes().getVersionId(), copy.attributes().getVersionId()); assertTrue(new B2FindFeature(session, fileid).find(new Path(container, name, EnumSet.of(Path.Type.file)))); assertTrue(new B2FindFeature(session, fileid).find(copy)); new B2DeleteFeature(session, fileid).delete(Arrays.asList(test, copy), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public static BuildInfo getBuildInfo() { if (Overrides.isEnabled()) { // never use cache when override is enabled -> we need to re-parse everything Overrides overrides = Overrides.fromProperties(); return getBuildInfoInternalVersion(overrides); } return BUILD_INFO_CACHE; }
@Test public void testCommitId() { BuildInfo buildInfo = BuildInfoProvider.getBuildInfo(); String revision = buildInfo.getRevision(); String commitId = buildInfo.getCommitId(); assertTrue(commitId.startsWith(revision)); }
@PublicAPI(usage = ACCESS) public JavaClasses importUrl(URL url) { return importUrls(singletonList(url)); }
@Test public void imports_base_class_in_class_hierarchy_correctly() { JavaClass baseClass = new ClassFileImporter().importUrl(getClass().getResource("testexamples/classhierarchyimport")).get(BaseClass.class); assertThat(baseClass.getConstructors()).as("Constructors of " + BaseClass.class.getSimpleName()).hasSize(2); assertThat(baseClass.getFields()).as("Fields of " + BaseClass.class.getSimpleName()).hasSize(1); assertThat(baseClass.getMethods()).as("Methods of " + BaseClass.class.getSimpleName()).hasSize(2); assertThat(baseClass.getStaticInitializer().get().getMethodCallsFromSelf()) .as("Calls from %s.<clinit>()", BaseClass.class.getSimpleName()).isNotEmpty(); }
@Override public ProcessConfigurable<?> toSink(Sink<V> sink) { DataStreamV2SinkTransformation<V, V> sinkTransformation = StreamUtils.addSinkOperator(this, sink, getType()); return StreamUtils.wrapWithConfigureHandle( new NonKeyedPartitionStreamImpl<>(environment, sinkTransformation)); }
@Test void testToSink() throws Exception { ExecutionEnvironmentImpl env = StreamTestUtils.getEnv(); GlobalStreamImpl<Integer> stream = new GlobalStreamImpl<>(env, new TestingTransformation<>("t1", Types.INT, 1)); stream.toSink(DataStreamV2SinkUtils.wrapSink(new DiscardingSink<>())); List<Transformation<?>> transformations = env.getTransformations(); assertThat(transformations) .hasSize(1) .element(0) .isInstanceOf(DataStreamV2SinkTransformation.class); }
@Override public Path move(final Path file, final Path renamed, final TransferStatus status, final Delete.Callback callback, final ConnectionCallback connectionCallback) throws BackgroundException { if(new DefaultPathPredicate(containerService.getContainer(file)).test(containerService.getContainer(renamed))) { // Either copy complete file contents (small file) or copy manifest (large file) final Path rename = proxy.copy(file, renamed, new TransferStatus().withLength(file.attributes().getSize()), connectionCallback, new DisabledStreamListener()); delete.delete(Collections.singletonMap(file, status), connectionCallback, callback, false); return rename; } else { final Path copy = new SwiftSegmentCopyService(session, regionService).copy(file, renamed, new TransferStatus().withLength(file.attributes().getSize()), connectionCallback, new DisabledStreamListener()); delete.delete(Collections.singletonMap(file, status), connectionCallback, callback); return copy; } }
@Test public void testMove() throws Exception { final Path container = new Path("test.cyberduck.ch", EnumSet.of(Path.Type.directory, Path.Type.volume)); container.attributes().setRegion("IAD"); final Path test = new Path(container, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)); test.attributes().setRegion("IAD"); new SwiftTouchFeature(session, new SwiftRegionService(session)).touch(test, new TransferStatus()); assertTrue(new SwiftFindFeature(session).find(test)); final Path target = new Path(container, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)); target.attributes().setRegion("IAD"); new SwiftMoveFeature(session).move(test, target, new TransferStatus(), new Delete.DisabledCallback(), new DisabledConnectionCallback()); assertFalse(new SwiftFindFeature(session).find(test)); assertTrue(new SwiftFindFeature(session).find(target)); new SwiftDeleteFeature(session).delete(Collections.<Path>singletonList(target), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public double interpolate(double x1p, double x2p) { int j = x1terp.search(x1p); int k = x2terp.search(x2p); double x1l = x1[j]; double x1u = x1[j + 1]; double x2l = x2[k]; double x2u = x2[k + 1]; y[0] = yv[j][k]; y[1] = yv[j+1][k]; y[2] = yv[j+1][k+1]; y[3] = yv[j][k+1]; y1[0] = (j - 1 < 0) ? (yv[j+1][k] - yv[j][k]) / (x1[j+1] - x1[j]) : (yv[j+1][k] - yv[j-1][k]) / (x1[j+1] - x1[j-1]); y1[1] = (j + 2 < m) ? (yv[j+2][k] - yv[j][k]) / (x1[j+2] - x1[j]) : (yv[j+1][k] - yv[j][k]) / (x1[j+1] - x1[j]); y1[2] = (j + 2 < m) ? (yv[j+2][k+1] - yv[j][k+1]) / (x1[j+2] - x1[j]) : (yv[j+1][k+1] - yv[j][k+1]) / (x1[j+1] - x1[j]); y1[3] = (j - 1 < 0) ? (yv[j+1][k+1] - yv[j][k+1]) / (x1[j+1] - x1[j]) : (yv[j+1][k+1] - yv[j-1][k+1]) / (x1[j+1] - x1[j-1]); y2[0] = (k - 1 < 0) ? (yv[j][k+1] - yv[j][k]) / (x2[k+1] - x2[k]) : (yv[j][k+1] - yv[j][k-1]) / (x2[k+1] - x2[k-1]); y2[1] = (k - 1 < 0) ? (yv[j+1][k+1] - yv[j+1][k]) / (x2[k+1] - x2[k]) : (yv[j+1][k+1] - yv[j+1][k-1]) / (x2[k+1] - x2[k-1]); y2[2] = (k + 2 < n) ? (yv[j+1][k+2] - yv[j+1][k]) / (x2[k+2] - x2[k]) : (yv[j+1][k+1] - yv[j+1][k]) / (x2[k+1] - x2[k]); y2[3] = (k + 2 < n) ? (yv[j][k+2] - yv[j][k]) / (x2[k+2] - x2[k]) : (yv[j][k+1] - yv[j][k]) / (x2[k+1] - x2[k]); if (k - 1 < 0 && j - 1 < 0) y12[0] = (yv[j+1][k+1] - yv[j+1][k] - yv[j][k+1] + yv[j][k]) / ((x1[j+1] - x1[j]) * (x2[k+1] - x2[k])); else if (k - 1 < 0) y12[0] = (yv[j+1][k+1] - yv[j+1][k] - yv[j-1][k+1] + yv[j-1][k]) / ((x1[j+1] - x1[j-1]) * (x2[k+1] - x2[k])); else if (j - 1 < 0) y12[0] = (yv[j+1][k+1] - yv[j+1][k-1] - yv[j][k+1] + yv[j][k-1]) / ((x1[j+1] - x1[j]) * (x2[k+1] - x2[k-1])); else y12[0] = (yv[j+1][k+1] - yv[j+1][k-1] - yv[j-1][k+1] + yv[j-1][k-1]) / ((x1[j+1] - x1[j-1]) * (x2[k+1] - x2[k-1])); if (j + 2 < m) { if (k - 1 < 0) { y12[1] = (yv[j + 2][k + 1] - yv[j + 2][k] - yv[j][k + 1] + yv[j][k]) / ((x1[j + 2] - x1[j]) * (x2[k + 1] - x2[k])); } else { y12[1] = (yv[j + 2][k + 1] - yv[j + 2][k - 1] - yv[j][k + 1] + yv[j][k - 1]) / ((x1[j + 2] - x1[j]) * (x2[k + 1] - x2[k - 1])); } } else { if (k - 1 < 0) { y12[1] = (yv[j + 1][k + 1] - yv[j + 1][k] - yv[j][k + 1] + yv[j][k]) / ((x1[j + 1] - x1[j]) * (x2[k + 1] - x2[k])); } else { y12[1] = (yv[j + 1][k + 1] - yv[j + 1][k - 1] - yv[j][k + 1] + yv[j][k - 1]) / ((x1[j + 1] - x1[j]) * (x2[k + 1] - x2[k - 1])); } } if (j + 2 < m && k + 2 < n) { y12[2] = (yv[j + 2][k + 2] - yv[j + 2][k] - yv[j][k + 2] + yv[j][k]) / ((x1[j + 2] - x1[j]) * (x2[k + 2] - x2[k])); } else if (j + 2 < m) { y12[2] = (yv[j + 2][k + 1] - yv[j + 2][k] - yv[j][k + 1] + yv[j][k]) / ((x1[j + 2] - x1[j]) * (x2[k + 1] - x2[k])); } else if (k + 2 < n) { y12[2] = (yv[j + 1][k + 2] - yv[j + 1][k] - yv[j][k + 2] + yv[j][k]) / ((x1[j + 1] - x1[j]) * (x2[k + 2] - x2[k])); } else { y12[2] = (yv[j + 1][k + 1] - yv[j + 1][k] - yv[j][k + 1] + yv[j][k]) / ((x1[j + 1] - x1[j]) * (x2[k + 1] - x2[k])); } if (k + 2 < n) { if (j - 1 < 0) { y12[3] = (yv[j + 1][k + 2] - yv[j + 1][k] - yv[j][k + 2] + yv[j][k]) / ((x1[j + 1] - x1[j]) * (x2[k + 2] - x2[k])); } else { y12[3] = (yv[j + 1][k + 2] - yv[j + 1][k] - yv[j - 1][k + 2] + yv[j - 1][k]) / ((x1[j + 1] - x1[j - 1]) * (x2[k + 2] - x2[k])); } } else { if (j - 1 < 0) { y12[3] = (yv[j + 1][k + 1] - yv[j + 1][k] - yv[j][k + 1] + yv[j][k]) / ((x1[j + 1] - x1[j]) * (x2[k + 1] - x2[k])); } else { y12[3] = (yv[j + 1][k + 1] - yv[j + 1][k] - yv[j - 1][k + 1] + yv[j - 1][k]) / ((x1[j + 1] - x1[j - 1]) * (x2[k + 1] - x2[k])); } } return bcuint(y, y1, y2, y12, x1l, x1u, x2l, x2u, x1p, x2p); }
@Test public void testInterpolate() { System.out.println("interpolate"); double[] x1 = {1950, 1960, 1970, 1980, 1990}; double[] x2 = {10, 20, 30}; double[][] y = { {150.697, 199.592, 187.625}, {179.323, 195.072, 250.287}, {203.212, 179.092, 322.767}, {226.505, 153.706, 426.730}, {249.633, 120.281, 598.243} }; BicubicInterpolation instance = new BicubicInterpolation(x1, x2, y); assertEquals(203.212, instance.interpolate(1970, 10), 1E-3); assertEquals(179.092, instance.interpolate(1970, 20), 1E-3); assertEquals(249.633, instance.interpolate(1990, 10), 1E-3); assertEquals(598.243, instance.interpolate(1990, 30), 1E-3); assertEquals(178.948375, instance.interpolate(1950, 15), 1E-4); assertEquals(146.99987, instance.interpolate(1990, 15), 1E-4); assertEquals(508.26462, instance.interpolate(1985, 30), 1E-4); assertEquals(175.667289, instance.interpolate(1975, 15), 1E-4); assertEquals(167.4893, instance.interpolate(1975, 20), 1E-4); assertEquals(252.493726, instance.interpolate(1975, 25), 1E-4); }
public static List<Transformation<?>> optimize(List<Transformation<?>> transformations) { final Map<Transformation<?>, Set<Transformation<?>>> outputMap = buildOutputMap(transformations); final LinkedHashSet<Transformation<?>> chainedTransformations = new LinkedHashSet<>(); final Set<Transformation<?>> alreadyTransformed = Sets.newIdentityHashSet(); final Queue<Transformation<?>> toTransformQueue = Queues.newArrayDeque(transformations); while (!toTransformQueue.isEmpty()) { final Transformation<?> transformation = toTransformQueue.poll(); if (!alreadyTransformed.contains(transformation)) { alreadyTransformed.add(transformation); final ChainInfo chainInfo = chainWithInputIfPossible(transformation, outputMap); chainedTransformations.add(chainInfo.newTransformation); chainedTransformations.removeAll(chainInfo.oldTransformations); alreadyTransformed.addAll(chainInfo.oldTransformations); // Add the chained transformation and its inputs to the to-optimize list toTransformQueue.add(chainInfo.newTransformation); toTransformQueue.addAll(chainInfo.newTransformation.getInputs()); } } return new ArrayList<>(chainedTransformations); }
@Test void testChainedTransformationPropertiesCorrectlySet() { ExternalPythonKeyedProcessOperator<?> keyedProcessOperator = createKeyedProcessOperator( "f1", new RowTypeInfo(Types.INT(), Types.INT()), Types.STRING()); ExternalPythonProcessOperator<?, ?> processOperator = createProcessOperator("f2", Types.STRING(), Types.STRING()); Transformation<?> sourceTransformation = mock(SourceTransformation.class); OneInputTransformation<?, ?> keyedProcessTransformation = new OneInputTransformation( sourceTransformation, "keyedProcess", keyedProcessOperator, keyedProcessOperator.getProducedType(), 2); keyedProcessTransformation.setUid("uid"); keyedProcessTransformation.setSlotSharingGroup("group"); keyedProcessTransformation.setCoLocationGroupKey("col"); keyedProcessTransformation.setMaxParallelism(64); keyedProcessTransformation.declareManagedMemoryUseCaseAtOperatorScope( ManagedMemoryUseCase.OPERATOR, 5); keyedProcessTransformation.declareManagedMemoryUseCaseAtSlotScope( ManagedMemoryUseCase.PYTHON); keyedProcessTransformation.declareManagedMemoryUseCaseAtSlotScope( ManagedMemoryUseCase.STATE_BACKEND); keyedProcessTransformation.setBufferTimeout(1000L); keyedProcessTransformation.setChainingStrategy(ChainingStrategy.HEAD); Transformation<?> processTransformation = new OneInputTransformation( keyedProcessTransformation, "process", processOperator, processOperator.getProducedType(), 2); processTransformation.setSlotSharingGroup("group"); processTransformation.declareManagedMemoryUseCaseAtOperatorScope( ManagedMemoryUseCase.OPERATOR, 10); processTransformation.declareManagedMemoryUseCaseAtSlotScope(ManagedMemoryUseCase.PYTHON); processTransformation.setMaxParallelism(64); processTransformation.setBufferTimeout(500L); List<Transformation<?>> transformations = new ArrayList<>(); transformations.add(sourceTransformation); transformations.add(keyedProcessTransformation); transformations.add(processTransformation); List<Transformation<?>> optimized = PythonOperatorChainingOptimizer.optimize(transformations); assertThat(optimized).hasSize(2); OneInputTransformation<?, ?> chainedTransformation = (OneInputTransformation<?, ?>) optimized.get(1); assertThat(chainedTransformation.getParallelism()).isEqualTo(2); assertThat(sourceTransformation.getOutputType()) .isEqualTo(chainedTransformation.getInputType()); assertThat(processOperator.getProducedType()) .isEqualTo(chainedTransformation.getOutputType()); assertThat(keyedProcessTransformation.getUid()).isEqualTo(chainedTransformation.getUid()); assertThat(chainedTransformation.getSlotSharingGroup().get().getName()).isEqualTo("group"); assertThat(chainedTransformation.getCoLocationGroupKey()).isEqualTo("col"); assertThat(chainedTransformation.getMaxParallelism()).isEqualTo(64); assertThat(chainedTransformation.getBufferTimeout()).isEqualTo(500L); assertThat( (int) chainedTransformation .getManagedMemoryOperatorScopeUseCaseWeights() .getOrDefault(ManagedMemoryUseCase.OPERATOR, 0)) .isEqualTo(15); assertThat(chainedTransformation.getOperatorFactory().getChainingStrategy()) .isEqualTo(ChainingStrategy.HEAD); assertThat(chainedTransformation.getManagedMemorySlotScopeUseCases()) .contains(ManagedMemoryUseCase.PYTHON); assertThat(chainedTransformation.getManagedMemorySlotScopeUseCases()) .contains(ManagedMemoryUseCase.STATE_BACKEND); OneInputStreamOperator<?, ?> chainedOperator = chainedTransformation.getOperator(); assertThat(chainedOperator).isInstanceOf(ExternalPythonKeyedProcessOperator.class); validateChainedPythonFunctions( ((ExternalPythonKeyedProcessOperator<?>) chainedOperator).getPythonFunctionInfo(), "f2", "f1"); }
@Override public ValidationResult validate(Object value) { ValidationResult result = super.validate(value); if (result instanceof ValidationResult.ValidationPassed) { final String sValue = (String)value; if (sValue.length() < minLength || sValue.length() > maxLength) { result = new ValidationResult.ValidationFailed("Value is not between " + minLength + " and " + maxLength + " in length!"); } } return result; }
@Test public void testValidateNullValue() { assertThat(new LimitedStringValidator(1, 1).validate(null)) .isInstanceOf(ValidationResult.ValidationFailed.class); }
@Override @PublicAPI(usage = ACCESS) public String getName() { return WILDCARD_TYPE_NAME + boundsToString(); }
@Test public void wildcard_name_upper_bounded() { @SuppressWarnings("unused") class UpperBounded<T extends List<? extends String>> { } JavaWildcardType wildcardType = importWildcardTypeOf(UpperBounded.class); assertThat(wildcardType.getName()).isEqualTo("? extends java.lang.String"); }
static void parseAuthority(final StringReader reader, final Host host, final Consumer<HostParserException> decorator) throws HostParserException { parseUserInfo(reader, host, decorator); parseHostname(reader, host, decorator); parsePath(reader, host, false, decorator); }
@Test public void testParseAuthorityUserPasswordDomainPort() throws HostParserException { final Host host = new Host(new TestProtocol()); final String authority = "user:password@domain.tld:1337"; final HostParser.StringReader reader = new HostParser.StringReader(authority); HostParser.parseAuthority(reader, host, null); assertEquals("user", host.getCredentials().getUsername()); assertEquals("password", host.getCredentials().getPassword()); assertEquals("domain.tld", host.getHostname()); assertEquals(1337, host.getPort()); }
public static String prepareUrl(@NonNull String url) { url = url.trim(); String lowerCaseUrl = url.toLowerCase(Locale.ROOT); // protocol names are case insensitive if (lowerCaseUrl.startsWith("feed://")) { Log.d(TAG, "Replacing feed:// with http://"); return prepareUrl(url.substring("feed://".length())); } else if (lowerCaseUrl.startsWith("pcast://")) { Log.d(TAG, "Removing pcast://"); return prepareUrl(url.substring("pcast://".length())); } else if (lowerCaseUrl.startsWith("pcast:")) { Log.d(TAG, "Removing pcast:"); return prepareUrl(url.substring("pcast:".length())); } else if (lowerCaseUrl.startsWith("itpc")) { Log.d(TAG, "Replacing itpc:// with http://"); return prepareUrl(url.substring("itpc://".length())); } else if (lowerCaseUrl.startsWith(AP_SUBSCRIBE)) { Log.d(TAG, "Removing antennapod-subscribe://"); return prepareUrl(url.substring(AP_SUBSCRIBE.length())); } else if (lowerCaseUrl.contains(AP_SUBSCRIBE_DEEPLINK)) { Log.d(TAG, "Removing " + AP_SUBSCRIBE_DEEPLINK); String query = Uri.parse(url).getQueryParameter("url"); try { return prepareUrl(URLDecoder.decode(query, "UTF-8")); } catch (UnsupportedEncodingException e) { return prepareUrl(query); } } else if (!(lowerCaseUrl.startsWith("http://") || lowerCaseUrl.startsWith("https://"))) { Log.d(TAG, "Adding http:// at the beginning of the URL"); return "http://" + url; } else { return url; } }
@Test public void testMissingProtocol() { final String in = "example.com"; final String out = UrlChecker.prepareUrl(in); assertEquals("http://example.com", out); }
@SuppressWarnings("unchecked") @Override public boolean canHandleReturnType(Class returnType) { return rxSupportedTypes.stream() .anyMatch(classType -> classType.isAssignableFrom(returnType)); }
@Test public void testCheckTypes() { assertThat(rxJava2CircuitBreakerAspectExt.canHandleReturnType(Flowable.class)).isTrue(); assertThat(rxJava2CircuitBreakerAspectExt.canHandleReturnType(Single.class)).isTrue(); }
public boolean write(final int msgTypeId, final DirectBuffer srcBuffer, final int offset, final int length) { checkTypeId(msgTypeId); checkMsgLength(length); final AtomicBuffer buffer = this.buffer; final int recordLength = length + HEADER_LENGTH; final int recordIndex = claimCapacity(buffer, recordLength); if (INSUFFICIENT_CAPACITY == recordIndex) { return false; } buffer.putIntOrdered(lengthOffset(recordIndex), -recordLength); MemoryAccess.releaseFence(); buffer.putBytes(encodedMsgOffset(recordIndex), srcBuffer, offset, length); buffer.putInt(typeOffset(recordIndex), msgTypeId); buffer.putIntOrdered(lengthOffset(recordIndex), recordLength); return true; }
@Test void shouldInsertPaddingRecordPlusMessageOnBufferWrapWithHeadEqualToTail() { final int length = 200; final int recordLength = length + HEADER_LENGTH; final int alignedRecordLength = align(recordLength, ALIGNMENT); final long tail = CAPACITY - HEADER_LENGTH; final long head = tail; when(buffer.getLongVolatile(HEAD_COUNTER_INDEX)).thenReturn(head); when(buffer.getLongVolatile(TAIL_COUNTER_INDEX)).thenReturn(tail); when(buffer.compareAndSetLong(TAIL_COUNTER_INDEX, tail, tail + alignedRecordLength + ALIGNMENT)) .thenReturn(TRUE); final UnsafeBuffer srcBuffer = new UnsafeBuffer(allocateDirect(1024)); final int srcIndex = 0; assertTrue(ringBuffer.write(MSG_TYPE_ID, srcBuffer, srcIndex, length)); final InOrder inOrder = inOrder(buffer); inOrder.verify(buffer).putIntOrdered(lengthOffset((int)tail), -HEADER_LENGTH); inOrder.verify(buffer).putInt(typeOffset((int)tail), PADDING_MSG_TYPE_ID); inOrder.verify(buffer).putIntOrdered(lengthOffset((int)tail), HEADER_LENGTH); inOrder.verify(buffer).putIntOrdered(lengthOffset(0), -recordLength); inOrder.verify(buffer).putBytes(encodedMsgOffset(0), srcBuffer, srcIndex, length); inOrder.verify(buffer).putInt(typeOffset(0), MSG_TYPE_ID); inOrder.verify(buffer).putIntOrdered(lengthOffset(0), recordLength); }
public Collection<String> getUsernames() { return provider.getUsernames(); }
@Test public void getUserNamesWillGetAListOfUserNames() throws Exception{ final Collection<String> result = userManager.getUsernames(); assertThat(result.contains(USER_ID), is(true)); assertThat(result.contains(USER_ID_2), is(true)); assertThat(result.contains("not exists name"), is(false)); }
public QueryCacheConfig setBufferSize(int bufferSize) { this.bufferSize = checkPositive("bufferSize", bufferSize); return this; }
@Test(expected = IllegalArgumentException.class) public void testSetBufferSize_throwsException_whenNotPositive() { QueryCacheConfig config = new QueryCacheConfig(); config.setBufferSize(-1); }
public static String generateDatabaseId(String baseString) { checkArgument(baseString.length() != 0, "baseString cannot be empty!"); String databaseId = generateResourceId( baseString, ILLEGAL_DATABASE_CHARS, REPLACE_DATABASE_CHAR, MAX_DATABASE_ID_LENGTH, DATABASE_TIME_FORMAT); // replace hyphen with underscore, so there's no need for backticks String trimmed = CharMatcher.is('_').trimTrailingFrom(databaseId); checkArgument( trimmed.length() > 0, "Database id is empty after removing illegal characters and trailing underscores"); // if first char is not a letter, replace with a padding letter, so it doesn't // violate spanner's database naming rules char padding = generatePadding(); if (!Character.isLetter(trimmed.charAt(0))) { trimmed = padding + trimmed.substring(1); } return trimmed; }
@Test public void testGenerateDatabaseIdShouldReplaceDollarSignWithUnderscore() { String testBaseString = "t$db"; String actual = generateDatabaseId(testBaseString); assertThat(actual).matches("t_db_\\d{8}_\\d{6}_\\d{6}"); }
public MediaType detect(InputStream input, Metadata metadata) throws IOException { if (input == null) { return MediaType.OCTET_STREAM; } input.mark(offsetRangeEnd + length); try { int offset = 0; // Skip bytes at the beginning, using skip() or read() while (offset < offsetRangeBegin) { long n = input.skip(offsetRangeBegin - offset); if (n > 0) { offset += n; } else if (input.read() != -1) { offset += 1; } else { return MediaType.OCTET_STREAM; } } // Fill in the comparison window byte[] buffer = new byte[length + (offsetRangeEnd - offsetRangeBegin)]; int n = input.read(buffer); if (n > 0) { offset += n; } while (n != -1 && offset < offsetRangeEnd + length) { int bufferOffset = offset - offsetRangeBegin; n = input.read(buffer, bufferOffset, buffer.length - bufferOffset); // increment offset - in case not all read (see testDetectStreamReadProblems) if (n > 0) { offset += n; } } if (this.isRegex) { int flags = 0; if (this.isStringIgnoreCase) { flags = Pattern.CASE_INSENSITIVE; } Pattern p = Pattern.compile(new String(this.pattern, UTF_8), flags); ByteBuffer bb = ByteBuffer.wrap(buffer); CharBuffer result = ISO_8859_1.decode(bb); Matcher m = p.matcher(result); boolean match = false; // Loop until we've covered the entire offset range for (int i = 0; i <= offsetRangeEnd - offsetRangeBegin; i++) { m.region(i, length + i); match = m.lookingAt(); // match regex from start of region if (match) { return type; } } } else { if (offset < offsetRangeBegin + length) { return MediaType.OCTET_STREAM; } // Loop until we've covered the entire offset range for (int i = 0; i <= offsetRangeEnd - offsetRangeBegin; i++) { boolean match = true; int masked; for (int j = 0; match && j < length; j++) { masked = (buffer[i + j] & mask[j]); if (this.isStringIgnoreCase) { masked = Character.toLowerCase(masked); } match = (masked == pattern[j]); } if (match) { return type; } } } return MediaType.OCTET_STREAM; } finally { input.reset(); } }
@Test public void testDetectApplicationEnviHdr() throws Exception { InputStream iStream = MagicDetectorTest.class .getResourceAsStream("/test-documents/ang20150420t182050_corr_v1e_img.hdr"); byte[] data = IOUtils.toByteArray(iStream); MediaType testMT = new MediaType("application", "envi.hdr"); Detector detector = new MagicDetector(testMT, data, null, false, 0, 0); // Deliberately prevent InputStream.read(...) from reading the entire // buffer in one go InputStream stream = new RestrictiveInputStream(data); assertEquals(testMT, detector.detect(stream, new Metadata())); }
static void checkFlags(String flags) { Set<Character> allowedChars = Set.of('s','i','x','m'); boolean isValidFlag= flags.chars() .mapToObj(c -> (char) c) .allMatch(allowedChars::contains) && flags.chars() .mapToObj(c -> (char) c) .collect(Collectors.toSet()) .size() == flags.length(); if(!isValidFlag){ throw new IllegalArgumentException("Not a valid flag parameter " +flags); } }
@Test void checkFlagsTest() { assertDoesNotThrow(() -> MatchesFunction.checkFlags("s")); assertDoesNotThrow(() -> MatchesFunction.checkFlags("i")); assertDoesNotThrow(() -> MatchesFunction.checkFlags("sx")); assertDoesNotThrow(() -> MatchesFunction.checkFlags("six")); assertDoesNotThrow(() -> MatchesFunction.checkFlags("sixm")); assertThrows(IllegalArgumentException.class, () -> MatchesFunction.checkFlags("a")); assertThrows(IllegalArgumentException.class, () -> MatchesFunction.checkFlags("sa")); assertThrows(IllegalArgumentException.class, () -> MatchesFunction.checkFlags("siU@")); assertThrows(IllegalArgumentException.class, () -> MatchesFunction.checkFlags("siUxU")); assertThrows(IllegalArgumentException.class, () -> MatchesFunction.checkFlags("ss")); assertThrows(IllegalArgumentException.class, () -> MatchesFunction.checkFlags("siiU")); assertThrows(IllegalArgumentException.class, () -> MatchesFunction.checkFlags("si U")); assertThrows(IllegalArgumentException.class, () -> MatchesFunction.checkFlags("U")); }
public static SchemaAndValue parseString(String value) { if (value == null) { return NULL_SCHEMA_AND_VALUE; } if (value.isEmpty()) { return new SchemaAndValue(Schema.STRING_SCHEMA, value); } ValueParser parser = new ValueParser(new Parser(value)); return parser.parse(false); }
@Test public void shouldParseStringsBeginningWithTrueAsStrings() { SchemaAndValue schemaAndValue = Values.parseString("true}"); assertEquals(Type.STRING, schemaAndValue.schema().type()); assertEquals("true}", schemaAndValue.value()); }
StateConfig getStateConfig(Map<String, Object> topoConf) throws Exception { StateConfig stateConfig; String providerConfig; ObjectMapper mapper = new ObjectMapper(); mapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY); if (topoConf.containsKey(Config.TOPOLOGY_STATE_PROVIDER_CONFIG)) { providerConfig = (String) topoConf.get(Config.TOPOLOGY_STATE_PROVIDER_CONFIG); stateConfig = mapper.readValue(providerConfig, StateConfig.class); } else { stateConfig = new StateConfig(); } return stateConfig; }
@Test public void testgetConfigWithProviderConfig() throws Exception { RedisKeyValueStateProvider provider = new RedisKeyValueStateProvider(); Map<String, Object> topoConf = new HashMap<>(); topoConf.put(Config.TOPOLOGY_STATE_PROVIDER_CONFIG, "{\"keyClass\":\"String\", \"valueClass\":\"String\"," + " \"jedisPoolConfig\":" + "{\"host\":\"localhost\", \"port\":1000}}"); RedisKeyValueStateProvider.StateConfig config = provider.getStateConfig(topoConf); //System.out.println(config); assertEquals("String", config.keyClass); assertEquals("String", config.valueClass); assertEquals("localhost", config.jedisPoolConfig.getHost()); assertEquals(1000, config.jedisPoolConfig.getPort()); }
@Override public ClusterInfo clusterGetClusterInfo() { RFuture<Map<String, String>> f = executorService.readAsync((String)null, StringCodec.INSTANCE, RedisCommands.CLUSTER_INFO); Map<String, String> entries = syncFuture(f); Properties props = new Properties(); for (Entry<String, String> entry : entries.entrySet()) { props.setProperty(entry.getKey(), entry.getValue()); } return new ClusterInfo(props); }
@Test public void testClusterGetClusterInfo() { ClusterInfo info = connection.clusterGetClusterInfo(); assertThat(info.getSlotsFail()).isEqualTo(0); assertThat(info.getSlotsOk()).isEqualTo(MasterSlaveConnectionManager.MAX_SLOT); assertThat(info.getSlotsAssigned()).isEqualTo(MasterSlaveConnectionManager.MAX_SLOT); }
public static String createApiUrl(String baseUrl, String... segments) { if(Arrays.stream(segments).anyMatch(Objects::isNull)) { return null; } var path = Arrays.stream(segments) .filter(StringUtils::isNotEmpty) .map(segment -> UriUtils.encodePathSegment(segment, StandardCharsets.UTF_8)) .collect(Collectors.joining("/")); if (baseUrl.isEmpty() || baseUrl.charAt(baseUrl.length() - 1) != '/') { baseUrl += '/'; } return baseUrl + path; }
@Test public void testApiUrl() throws Exception { var baseUrl = "http://localhost/"; assertThat(UrlUtil.createApiUrl(baseUrl, "api", "foo", "b\ta/r")) .isEqualTo("http://localhost/api/foo/b%09a%2Fr"); }
public RunResponse start( @NotNull String workflowId, @NotNull String version, @NotNull RunRequest runRequest) { WorkflowDefinition definition = workflowDao.getWorkflowDefinition(workflowId, version); validateRequest(version, definition, runRequest); RunProperties runProperties = RunProperties.from( Checks.notNull( definition.getPropertiesSnapshot(), "property snapshot cannot be null for workflow: " + workflowId)); // create and initiate a new instance with overrides and param evaluation WorkflowInstance instance = workflowHelper.createWorkflowInstance( definition.getWorkflow(), definition.getInternalId(), definition.getMetadata().getWorkflowVersionId(), runProperties, runRequest); RunStrategy runStrategy = definition.getRunStrategyOrDefault(); int ret = runStrategyDao.startWithRunStrategy(instance, runStrategy); RunResponse response = RunResponse.from(instance, ret); LOG.info("Created a workflow instance with response {}", response); return response; }
@Test public void testStartStopped() { when(runStrategyDao.startWithRunStrategy(any(), any())).thenReturn(-1); RunRequest request = RunRequest.builder() .initiator(new ManualInitiator()) .currentPolicy(RunPolicy.START_FRESH_NEW_RUN) .requestId(UUID.fromString("41f0281e-41a2-468d-b830-56141b2f768b")) .build(); RunResponse response = actionHandler.start("sample-minimal-wf", "active", request); verify(workflowDao, times(1)).getWorkflowDefinition("sample-minimal-wf", "active"); verify(runStrategyDao, times(1)).startWithRunStrategy(any(), any()); assertEquals(instance.getWorkflowId(), response.getWorkflowId()); assertEquals(1L, response.getWorkflowVersionId()); assertEquals(0L, response.getWorkflowInstanceId()); assertEquals(0L, response.getWorkflowRunId()); assertEquals("41f0281e-41a2-468d-b830-56141b2f768b", response.getWorkflowUuid()); assertEquals(RunResponse.Status.STOPPED, response.getStatus()); }
public static TriggerStateMachine stateMachineForTrigger(RunnerApi.Trigger trigger) { switch (trigger.getTriggerCase()) { case AFTER_ALL: return AfterAllStateMachine.of( stateMachinesForTriggers(trigger.getAfterAll().getSubtriggersList())); case AFTER_ANY: return AfterFirstStateMachine.of( stateMachinesForTriggers(trigger.getAfterAny().getSubtriggersList())); case AFTER_END_OF_WINDOW: return stateMachineForAfterEndOfWindow(trigger.getAfterEndOfWindow()); case ELEMENT_COUNT: return AfterPaneStateMachine.elementCountAtLeast( trigger.getElementCount().getElementCount()); case AFTER_SYNCHRONIZED_PROCESSING_TIME: return AfterSynchronizedProcessingTimeStateMachine.ofFirstElement(); case DEFAULT: return DefaultTriggerStateMachine.of(); case NEVER: return NeverStateMachine.ever(); case ALWAYS: return ReshuffleTriggerStateMachine.create(); case OR_FINALLY: return stateMachineForTrigger(trigger.getOrFinally().getMain()) .orFinally(stateMachineForTrigger(trigger.getOrFinally().getFinally())); case REPEAT: return RepeatedlyStateMachine.forever( stateMachineForTrigger(trigger.getRepeat().getSubtrigger())); case AFTER_EACH: return AfterEachStateMachine.inOrder( stateMachinesForTriggers(trigger.getAfterEach().getSubtriggersList())); case AFTER_PROCESSING_TIME: return stateMachineForAfterProcessingTime(trigger.getAfterProcessingTime()); case TRIGGER_NOT_SET: throw new IllegalArgumentException( String.format("Required field 'trigger' not set on %s", trigger)); default: throw new IllegalArgumentException(String.format("Unknown trigger type %s", trigger)); } }
@Test public void testAfterEachTranslation() { RunnerApi.Trigger trigger = RunnerApi.Trigger.newBuilder() .setAfterEach( RunnerApi.Trigger.AfterEach.newBuilder() .addSubtriggers(subtrigger1) .addSubtriggers(subtrigger2)) .build(); AfterEachStateMachine machine = (AfterEachStateMachine) TriggerStateMachines.stateMachineForTrigger(trigger); assertThat(machine, equalTo(AfterEachStateMachine.inOrder(submachine1, submachine2))); }
public static Set<String> getFieldsForRecordExtractor(@Nullable IngestionConfig ingestionConfig, Schema schema) { Set<String> fieldsForRecordExtractor = new HashSet<>(); if (null != ingestionConfig && (null != ingestionConfig.getSchemaConformingTransformerConfig() || null != ingestionConfig.getSchemaConformingTransformerV2Config())) { // The SchemaConformingTransformer requires that all fields are extracted, indicated by returning an empty set // here. Compared to extracting the fields specified below, extracting all fields should be a superset. return fieldsForRecordExtractor; } extractFieldsFromIngestionConfig(ingestionConfig, fieldsForRecordExtractor); extractFieldsFromSchema(schema, fieldsForRecordExtractor); fieldsForRecordExtractor = getFieldsToReadWithComplexType(fieldsForRecordExtractor, ingestionConfig); return fieldsForRecordExtractor; }
@Test public void testExtractFieldsSchema() { Schema schema; // from groovy function schema = new Schema(); DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec("d1", FieldSpec.DataType.STRING, true); dimensionFieldSpec.setTransformFunction("Groovy({function}, argument1, argument2)"); schema.addField(dimensionFieldSpec); List<String> extract = new ArrayList<>(IngestionUtils.getFieldsForRecordExtractor(null, schema)); Assert.assertEquals(extract.size(), 3); Assert.assertTrue(extract.containsAll(Arrays.asList("d1", "argument1", "argument2"))); // groovy function, no arguments schema = new Schema(); dimensionFieldSpec = new DimensionFieldSpec("d1", FieldSpec.DataType.STRING, true); dimensionFieldSpec.setTransformFunction("Groovy({function})"); schema.addField(dimensionFieldSpec); extract = new ArrayList<>(IngestionUtils.getFieldsForRecordExtractor(null, schema)); Assert.assertEquals(extract.size(), 1); Assert.assertTrue(extract.contains("d1")); // Map implementation for Avro - map__KEYS indicates map is source column schema = new Schema(); dimensionFieldSpec = new DimensionFieldSpec("map__KEYS", FieldSpec.DataType.INT, false); schema.addField(dimensionFieldSpec); extract = new ArrayList<>(IngestionUtils.getFieldsForRecordExtractor(null, schema)); Assert.assertEquals(extract.size(), 2); Assert.assertTrue(extract.containsAll(Arrays.asList("map", "map__KEYS"))); // Map implementation for Avro - map__VALUES indicates map is source column schema = new Schema(); dimensionFieldSpec = new DimensionFieldSpec("map__VALUES", FieldSpec.DataType.LONG, false); schema.addField(dimensionFieldSpec); extract = new ArrayList<>(IngestionUtils.getFieldsForRecordExtractor(null, schema)); Assert.assertEquals(extract.size(), 2); Assert.assertTrue(extract.containsAll(Arrays.asList("map", "map__VALUES"))); // Time field spec // only incoming schema = new Schema.SchemaBuilder().addTime( new TimeGranularitySpec(FieldSpec.DataType.LONG, TimeUnit.MILLISECONDS, "time"), null).build(); extract = new ArrayList<>(IngestionUtils.getFieldsForRecordExtractor(null, schema)); Assert.assertEquals(extract.size(), 1); Assert.assertTrue(extract.contains("time")); // incoming and outgoing different column name schema = new Schema.SchemaBuilder().addTime( new TimeGranularitySpec(FieldSpec.DataType.LONG, TimeUnit.MILLISECONDS, "in"), new TimeGranularitySpec(FieldSpec.DataType.LONG, TimeUnit.MILLISECONDS, "out")).build(); extract = new ArrayList<>(IngestionUtils.getFieldsForRecordExtractor(null, schema)); Assert.assertEquals(extract.size(), 2); Assert.assertTrue(extract.containsAll(Arrays.asList("in", "out"))); // inbuilt functions schema = new Schema(); dimensionFieldSpec = new DimensionFieldSpec("hoursSinceEpoch", FieldSpec.DataType.LONG, true); dimensionFieldSpec.setTransformFunction("toEpochHours(\"timestamp\")"); schema.addField(dimensionFieldSpec); extract = new ArrayList<>(IngestionUtils.getFieldsForRecordExtractor(null, schema)); Assert.assertEquals(extract.size(), 2); Assert.assertTrue(extract.containsAll(Arrays.asList("timestamp", "hoursSinceEpoch"))); // inbuilt functions with literal schema = new Schema(); dimensionFieldSpec = new DimensionFieldSpec("tenMinutesSinceEpoch", FieldSpec.DataType.LONG, true); dimensionFieldSpec.setTransformFunction("toEpochMinutesBucket(\"timestamp\", 10)"); schema.addField(dimensionFieldSpec); extract = new ArrayList<>(IngestionUtils.getFieldsForRecordExtractor(null, schema)); Assert.assertEquals(extract.size(), 2); Assert.assertTrue(extract.containsAll(Lists.newArrayList("tenMinutesSinceEpoch", "timestamp"))); // inbuilt functions on DateTimeFieldSpec schema = new Schema(); DateTimeFieldSpec dateTimeFieldSpec = new DateTimeFieldSpec("date", FieldSpec.DataType.STRING, "1:DAYS:SIMPLE_DATE_FORMAT:yyyy-MM-dd", "1:DAYS"); dateTimeFieldSpec.setTransformFunction("toDateTime(\"timestamp\", 'yyyy-MM-dd')"); schema.addField(dateTimeFieldSpec); extract = new ArrayList<>(IngestionUtils.getFieldsForRecordExtractor(null, schema)); Assert.assertEquals(extract.size(), 2); Assert.assertTrue(extract.containsAll(Lists.newArrayList("date", "timestamp"))); }
@Override public void calculate(TradePriceCalculateReqBO param, TradePriceCalculateRespBO result) { if (param.getDeliveryType() == null) { return; } if (DeliveryTypeEnum.PICK_UP.getType().equals(param.getDeliveryType())) { calculateByPickUp(param); } else if (DeliveryTypeEnum.EXPRESS.getType().equals(param.getDeliveryType())) { calculateExpress(param, result); } }
@Test @DisplayName("全场包邮") public void testCalculate_expressGlobalFree() { // mock 方法(全场包邮) when(tradeConfigService.getTradeConfig()).thenReturn(new TradeConfigDO().setDeliveryExpressFreeEnabled(true) .setDeliveryExpressFreePrice(2200)); // 调用 calculator.calculate(reqBO, resultBO); TradePriceCalculateRespBO.Price price = resultBO.getPrice(); assertThat(price) .extracting("totalPrice","discountPrice","couponPrice","pointPrice","deliveryPrice","payPrice") .containsExactly(2200, 0, 0, 0, 0, 2200); assertThat(resultBO.getItems()).hasSize(3); // 断言:SKU1 assertThat(resultBO.getItems().get(0)) .extracting("price", "count","discountPrice" ,"couponPrice", "pointPrice","deliveryPrice","payPrice") .containsExactly(100, 2, 0, 0, 0, 0, 200); // 断言:SKU2 assertThat(resultBO.getItems().get(1)) .extracting("price", "count","discountPrice" ,"couponPrice", "pointPrice","deliveryPrice","payPrice") .containsExactly(200, 10, 0, 0, 0, 0, 2000); // 断言:SKU3 未选中 assertThat(resultBO.getItems().get(2)) .extracting("price", "count","discountPrice" ,"couponPrice", "pointPrice","deliveryPrice","payPrice") .containsExactly(300, 1, 0, 0, 0, 0, 300); }
@Override public void readFully(byte[] bytes) throws IOException { readFully(stream, bytes, 0, bytes.length); }
@Test public void testReadFully() throws Exception { byte[] buffer = new byte[5]; MockInputStream stream = new MockInputStream(); DelegatingSeekableInputStream.readFully(stream, buffer, 0, buffer.length); Assert.assertArrayEquals("Byte array contents should match", Arrays.copyOfRange(TEST_ARRAY, 0, 5), buffer); Assert.assertEquals("Stream position should reflect bytes read", 5, stream.getPos()); }
protected static String toBrandedUri(final String guestUri, final String hostname) { final String user = StringUtils.substringBefore(StringUtils.substringAfter(StringUtils.substringAfter(guestUri, "guest"), Path.DELIMITER), Path.DELIMITER); final String share = StringUtils.substringBefore(StringUtils.substringAfter(StringUtils.substringAfter(guestUri, "share"), Path.DELIMITER), Path.DELIMITER); return String.format("https://%s/%s/%s", hostname, user, share); }
@Test public void testUrlFormatting() { assertEquals("https://c.gmx.net/%401015156902205593160/YK8VCl2GSGmR_UwjZALpEA", EueShareFeature.toBrandedUri( "../../../../guest/%401015156902205593160/share/YK8VCl2GSGmR_UwjZALpEA/resourceAlias/ROOT", "c.gmx.net")); }
@GET @Path("/{entityType}/{entityId}") @Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8 /* , MediaType.APPLICATION_XML */}) public TimelineEntity getEntity( @Context HttpServletRequest req, @Context HttpServletResponse res, @PathParam("entityType") String entityType, @PathParam("entityId") String entityId, @QueryParam("fields") String fields) { init(res); TimelineEntity entity = null; try { entity = timelineDataManager.getEntity( parseStr(entityType), parseStr(entityId), parseFieldsStr(fields, ","), getUser(req)); } catch (YarnException e) { // The user doesn't have the access to override the existing domain. LOG.info(e.getMessage(), e); throw new ForbiddenException(e); } catch (IllegalArgumentException e) { throw new BadRequestException(e); } catch (Exception e) { LOG.error("Error getting entity", e); throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR); } if (entity == null) { throw new NotFoundException("Timeline entity " + new EntityIdentifier(parseStr(entityId), parseStr(entityType)) + " is not found"); } return entity; }
@Test void testPrimaryFilterString() { WebResource r = resource(); ClientResponse response = r.path("ws").path("v1").path("timeline") .path("type_1").queryParam("primaryFilter", "user:username") .accept(MediaType.APPLICATION_JSON) .get(ClientResponse.class); assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8, response.getType().toString()); verifyEntities(response.getEntity(TimelineEntities.class)); }
protected Pair<LIMEExplanation, List<Example<Regressor>>> explainWithSamples(Map<String, String> input) { Optional<Example<Label>> optExample = generator.generateExample(input,false); if (optExample.isPresent()) { Example<Label> example = optExample.get(); if ((textDomain.size() == 0) && (binarisedCDFs.size() == 0)) { // Short circuit if there are no text or binarised fields. return explainWithSamples(example); } else { Prediction<Label> prediction = innerModel.predict(example); // Build the input example with simplified text features ArrayExample<Regressor> labelledExample = new ArrayExample<>(transformOutput(prediction)); // Add the tabular features for (Feature f : example) { if (tabularDomain.getID(f.getName()) != -1) { labelledExample.add(f); } } // Extract the tabular features into a SparseVector for later SparseVector tabularVector = SparseVector.createSparseVector(labelledExample,tabularDomain,false); // Tokenize the text fields, and generate the perturbed text representation Map<String, String> exampleTextValues = new HashMap<>(); Map<String, List<Token>> exampleTextTokens = new HashMap<>(); for (Map.Entry<String,FieldProcessor> e : textFields.entrySet()) { String value = input.get(e.getKey()); if (value != null) { List<Token> tokens = tokenizerThreadLocal.get().tokenize(value); for (int i = 0; i < tokens.size(); i++) { labelledExample.add(nameFeature(e.getKey(),tokens.get(i).text,i),1.0); } exampleTextValues.put(e.getKey(),value); exampleTextTokens.put(e.getKey(),tokens); } } // Sample a dataset. List<Example<Regressor>> sample = sampleData(tabularVector,exampleTextValues,exampleTextTokens); // Generate a sparse model on the sampled data. SparseModel<Regressor> model = trainExplainer(labelledExample, sample); // Test the sparse model against the predictions of the real model. List<Prediction<Regressor>> predictions = new ArrayList<>(model.predict(sample)); predictions.add(model.predict(labelledExample)); RegressionEvaluation evaluation = evaluator.evaluate(model,predictions,new SimpleDataSourceProvenance("LIMEColumnar sampled data",regressionFactory)); return new Pair<>(new LIMEExplanation(model, prediction, evaluation),sample); } } else { throw new IllegalArgumentException("Label not found in input " + input.toString()); } }
@Test public void testCategorical() throws URISyntaxException { Pair<RowProcessor<Label>,Dataset<Label>> pair = generateCategoricalDataset(); RowProcessor<Label> rp = pair.getA(); Dataset<Label> dataset = pair.getB(); XGBoostClassificationTrainer trainer = new XGBoostClassificationTrainer(50); Model<Label> model = trainer.train(dataset); SparseTrainer<Regressor> sparseTrainer = new CARTJointRegressionTrainer(4,true); LIMEColumnar lime = new LIMEColumnar(new SplittableRandom(1),model,sparseTrainer,5000,rp,tokenizer); Map<String,String> testExample = new HashMap<>(); testExample.put("A","Small"); testExample.put("B","4.0"); testExample.put("C","4.0"); testExample.put("D","Red"); testExample.put("TextField","The full text field has more words in it than other fields."); Pair<LIMEExplanation, List<Example<Regressor>>> explanation = lime.explainWithSamples(testExample); List<String> activeFeatures = explanation.getA().getActiveFeatures(); assertNotNull(activeFeatures); int[] aSampleCount = new int[3]; int[] dSampleCount = new int[3]; int aPresentCounter = 0; int dPresentCounter = 0; for (Example<Regressor> e : explanation.getB()) { boolean aPresent = false; boolean dPresent = false; int aCounter = 0; int bCounter = 0; int cCounter = 0; int dCounter = 0; int textCounter = 0; for (Feature f : e) { String featureName = f.getName(); if (featureName.equals("A"+ ColumnarFeature.JOINER+"Small")) { aSampleCount[0]++; aCounter++; aPresent = true; } else if (featureName.equals("A"+ ColumnarFeature.JOINER+"Medium")) { aSampleCount[1]++; aCounter++; aPresent = true; } else if (featureName.equals("A"+ ColumnarFeature.JOINER+"Large")) { aSampleCount[2]++; aCounter++; aPresent = true; } else if (featureName.startsWith("B")) { bCounter++; } else if (featureName.startsWith("C")) { cCounter++; } else if (featureName.equals("D"+ ColumnarFeature.JOINER+"Red")) { dSampleCount[0]++; dCounter++; dPresent = true; } else if (featureName.equals("D"+ ColumnarFeature.JOINER+"Yellow")) { dSampleCount[1]++; dCounter++; dPresent = true; } else if (featureName.equals("D"+ ColumnarFeature.JOINER+"Green")) { dSampleCount[2]++; dCounter++; dPresent = true; } else if (featureName.startsWith("TextField")) { textCounter++; } else { fail("Unknown feature with name " + featureName); } } if (!aPresent) { aPresentCounter++; } if (!dPresent) { dPresentCounter++; } // Real features should be sampled correctly if (bCounter != 1) { fail("Should only sample one B feature"); } if (cCounter != 1) { fail("Should only sample one C feature"); } // Categorical features may be sampled multiple times as they are not specified as BINARISED_CATEGORICAL if ((aCounter > 3) || (aCounter < 0)) { fail("Should sample between 0 and 3 A features"); } if ((dCounter > 3) || (dCounter < 0)) { fail("Should sample between 0 and 3 D features"); } } assertTrue(aSampleCount[0] > 1000); assertTrue(aSampleCount[0] < 2500); //System.out.println(aSampleCount[0]); assertTrue(aSampleCount[1] > 1000); assertTrue(aSampleCount[1] < 2500); //System.out.println(aSampleCount[1]); assertTrue(aSampleCount[2] > 1000); assertTrue(aSampleCount[2] < 2500); //System.out.println(aSampleCount[2]); assertTrue(aPresentCounter > 1000); assertTrue(aPresentCounter < 2500); //System.out.println(aPresentCounter); assertTrue(dSampleCount[0] > 1000); assertTrue(dSampleCount[0] < 2500); //System.out.println(dSampleCount[0]); assertTrue(dSampleCount[1] > 1000); assertTrue(dSampleCount[1] < 2500); //System.out.println(dSampleCount[1]); assertTrue(dSampleCount[2] > 1000); assertTrue(dSampleCount[2] < 2500); //System.out.println(dSampleCount[2]); assertTrue(dPresentCounter > 1000); assertTrue(dPresentCounter < 2500); //System.out.println(dPresentCounter); }
@Override public void write(int b) throws IOException { checkClosed(); if (chunkSize - currentBufferPointer <= 0) { expandBuffer(); } currentBuffer.put((byte) b); currentBufferPointer++; pointer++; if (pointer > size) { size = pointer; } }
@Test void testClose() throws IOException { RandomAccess randomAccessReadWrite = new RandomAccessReadWriteBuffer(); randomAccessReadWrite.write(new byte[] { 1, 2, 3, 4 }); assertFalse(randomAccessReadWrite.isClosed()); randomAccessReadWrite.close(); assertTrue(randomAccessReadWrite.isClosed()); }
@Override public boolean isDryRun() { return configurationParameters .getBoolean(EXECUTION_DRY_RUN_PROPERTY_NAME) .orElse(false); }
@Test void isDryRun() { ConfigurationParameters dryRun = new MapConfigurationParameters( Constants.EXECUTION_DRY_RUN_PROPERTY_NAME, "true"); assertTrue(new CucumberEngineOptions(dryRun).isDryRun()); ConfigurationParameters noDryRun = new MapConfigurationParameters( Constants.EXECUTION_DRY_RUN_PROPERTY_NAME, "false"); assertFalse(new CucumberEngineOptions(noDryRun).isDryRun()); }
@Override public CheckResult runCheck() { try { // Create an absolute range from the relative range to make sure it doesn't change during the two // search requests. (count and find messages) // This is needed because the RelativeRange computes the range from NOW on every invocation of getFrom() and // getTo(). // See: https://github.com/Graylog2/graylog2-server/issues/2382 final RelativeRange relativeRange = RelativeRange.create(time * 60); final AbsoluteRange range = AbsoluteRange.create(relativeRange.getFrom(), relativeRange.getTo()); final String filter = buildQueryFilter(stream.getId(), query); final CountResult result = searches.count("*", range, filter); final long count = result.count(); LOG.debug("Alert check <{}> result: [{}]", id, count); final boolean triggered; switch (thresholdType) { case MORE: triggered = count > threshold; break; case LESS: triggered = count < threshold; break; default: triggered = false; } if (triggered) { final List<MessageSummary> summaries = Lists.newArrayList(); if (getBacklog() > 0) { final SearchResult backlogResult = searches.search("*", filter, range, getBacklog(), 0, new Sorting(Message.FIELD_TIMESTAMP, Sorting.Direction.DESC)); for (ResultMessage resultMessage : backlogResult.getResults()) { final Message msg = resultMessage.getMessage(); summaries.add(new MessageSummary(resultMessage.getIndex(), msg)); } } final String resultDescription = "Stream had " + count + " messages in the last " + time + " minutes with trigger condition " + thresholdType.toString().toLowerCase(Locale.ENGLISH) + " than " + threshold + " messages. " + "(Current grace time: " + grace + " minutes)"; return new CheckResult(true, this, resultDescription, Tools.nowUTC(), summaries); } else { return new NegativeCheckResult(); } } catch (InvalidRangeParametersException e) { // cannot happen lol LOG.error("Invalid timerange.", e); return null; } }
@Test public void testRunCheckLessPositive() throws Exception { final MessageCountAlertCondition.ThresholdType type = MessageCountAlertCondition.ThresholdType.LESS; final MessageCountAlertCondition messageCountAlertCondition = getConditionWithParameters(type, threshold); searchCountShouldReturn(threshold - 1); final AlertCondition.CheckResult result = messageCountAlertCondition.runCheck(); assertTriggered(messageCountAlertCondition, result); }
@Override public void setUserGroupIfNeeded(AlluxioURI uri) { // By default, Alluxio Fuse/client sets user/group to the user launches the Fuse application // no extra user group setting required }
@Test public void setUserGroupIfNeeded() { AlluxioURI uri = new AlluxioURI("/TestSetUserGroupIfNeeded"); mAuthPolicy.setUserGroupIfNeeded(uri); // No need to set user group Assert.assertThrows(FileDoesNotExistException.class, () -> mFileSystem.getStatus(uri)); }
@Override public int getColumnLength(final Object value) { throw new UnsupportedSQLOperationException("PostgreSQLInt2ArrayBinaryProtocolValue.getColumnLength()"); }
@Test void assertGetColumnLength() { assertThrows(UnsupportedSQLOperationException.class, () -> newInstance().getColumnLength("val")); }
@Override public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { super.onDataReceived(device, data); if (data.size() != 9 && data.size() != 11) { onInvalidDataReceived(device, data); return; } final boolean crcPresent = data.size() == 11; if (crcPresent) { final int actualCrc = CRC16.MCRF4XX(data.getValue(), 0, 9); final int expectedCrc = data.getIntValue(Data.FORMAT_UINT16_LE, 9); if (actualCrc != expectedCrc) { onContinuousGlucoseMonitorSessionStartTimeReceivedWithCrcError(device, data); return; } } final Calendar calendar = DateTimeDataCallback.readDateTime(data, 0); final Integer timeZoneOffset = TimeZoneDataCallback.readTimeZone(data, 7); // [minutes] final DSTOffsetCallback.DSTOffset dstOffset = DSTOffsetDataCallback.readDSTOffset(data, 8); if (calendar == null || timeZoneOffset == null || dstOffset == null) { onInvalidDataReceived(device, data); return; } final TimeZone timeZone = new TimeZone() { @Override public int getOffset(final int era, final int year, final int month, final int day, final int dayOfWeek, final int milliseconds) { return (timeZoneOffset + dstOffset.offset) * 60000; // convert minutes to milliseconds } @Override public void setRawOffset(final int offsetMillis) { throw new UnsupportedOperationException("Can't set raw offset for this TimeZone"); } @Override public int getRawOffset() { return timeZoneOffset * 60000; } @Override public boolean useDaylightTime() { return true; } @Override public boolean inDaylightTime(final Date date) { // Use of DST is dependent on the input data only return dstOffset.offset > 0; } @Override public int getDSTSavings() { return dstOffset.offset * 60000; } // TODO add TimeZone ID // @Override // public String getID() { // return super.getID(); // } }; calendar.setTimeZone(timeZone); onContinuousGlucoseMonitorSessionStartTimeReceived(device, calendar, crcPresent); }
@Test public void onContinuousGlucoseMonitorSessionStartTimeReceived() { final Data data = new Data(new byte[] {(byte) 0xE2, 0x07, 4, 24, 13, 8, 24, 8, 4, (byte) 0xE0, (byte) 0xC2 }); //noinspection DataFlowIssue callback.onDataReceived(null, data); assertTrue(success); assertTrue(verified); assertEquals(2018, result.get(Calendar.YEAR)); assertEquals(Calendar.APRIL, result.get(Calendar.MONTH)); assertEquals(24, result.get(Calendar.DATE)); assertEquals(13, result.get(Calendar.HOUR_OF_DAY)); assertEquals(8, result.get(Calendar.MINUTE)); assertEquals(24, result.get(Calendar.SECOND)); assertEquals(8 * 60000, result.get(Calendar.ZONE_OFFSET)); assertEquals(4 * 15 * 60000, result.get(Calendar.DST_OFFSET)); }
public void onFragment(final DirectBuffer buffer, final int offset, final int length, final Header header) { final byte flags = header.flags(); if ((flags & UNFRAGMENTED) == UNFRAGMENTED) { delegate.onFragment(buffer, offset, length, header); } else { handleFragment(buffer, offset, length, header, flags); } }
@Test void shouldPassThroughUnfragmentedMessage() { headerFlyweight.flags(FrameDescriptor.UNFRAGMENTED); final UnsafeBuffer srcBuffer = new UnsafeBuffer(new byte[128]); final int offset = 8; final int length = 32; assembler.onFragment(srcBuffer, offset, length, header); verify(delegateFragmentHandler, times(1)).onFragment(srcBuffer, offset, length, header); }
public static String squeezeStatement(String sql) { TokenSource tokens = getLexer(sql, ImmutableSet.of()); StringBuilder sb = new StringBuilder(); while (true) { Token token = tokens.nextToken(); if (token.getType() == Token.EOF) { break; } if (token.getType() == SqlBaseLexer.WS) { sb.append(' '); } else { sb.append(token.getText()); } } return sb.toString().trim(); }
@Test public void testSqueezeStatement() { String sql = "select * from\n foo\n order by x ; "; assertEquals(squeezeStatement(sql), "select * from foo order by x ;"); }
@Override public MastershipRole getRole(NetworkId networkId, NodeId nodeId, DeviceId deviceId) { Map<DeviceId, NodeId> masterMap = getMasterMap(networkId); Map<DeviceId, List<NodeId>> backups = getBackups(networkId); //just query NodeId current = masterMap.get(deviceId); MastershipRole role; if (current != null && current.equals(nodeId)) { return MastershipRole.MASTER; } if (backups.getOrDefault(deviceId, Collections.emptyList()).contains(nodeId)) { role = MastershipRole.STANDBY; } else { role = MastershipRole.NONE; } return role; }
@Test public void getRole() { //special case, no backup or master put(VNID1, VDID1, N1, false, false); assertEquals("wrong role", NONE, sms.getRole(VNID1, N1, VDID1)); //backup exists but we aren't mapped put(VNID1, VDID2, N1, false, true); assertEquals("wrong role", STANDBY, sms.getRole(VNID1, N1, VDID2)); //N2 is master put(VNID1, VDID3, N2, true, true); assertEquals("wrong role", MASTER, sms.getRole(VNID1, N2, VDID3)); //N2 is master but N1 is only in backups set put(VNID1, VDID4, N1, false, true); put(VNID1, VDID4, N2, true, false); assertEquals("wrong role", STANDBY, sms.getRole(VNID1, N1, VDID4)); }
@GetMapping public Mono<String> getProductPage(@ModelAttribute("product") Mono<Product> productMono, Model model) { model.addAttribute("inFavourite", false); return productMono.flatMap( product -> this.productReviewsClient.findProductReviewsByProductId(product.id()) .collectList() .doOnNext(productReviews -> model.addAttribute("reviews", productReviews)) .then(this.favouriteProductsClient.findFavouriteProductByProductId(product.id()) .doOnNext(favouriteProduct -> model.addAttribute("inFavourite", true))) .thenReturn("customer/products/product") ); }
@Test void getProductPage_ReturnsProductPage() { // given var model = new ConcurrentModel(); var productReviews = List.of( new ProductReview(UUID.fromString("6a8512d8-cbaa-11ee-b986-376cc5867cf5"), 1, 5, "На пятёрочку"), new ProductReview(UUID.fromString("849c3fac-cbaa-11ee-af68-737c6d37214a"), 1, 4, "Могло быть и лучше")); doReturn(Flux.fromIterable(productReviews)).when(this.productReviewsClient).findProductReviewsByProductId(1); var favouriteProduct = new FavouriteProduct(UUID.fromString("af5f9496-cbaa-11ee-a407-27b46917819e"), 1); doReturn(Mono.just(favouriteProduct)).when(this.favouriteProductsClient).findFavouriteProductByProductId(1); // when StepVerifier.create(this.controller.getProductPage( Mono.just(new Product(1, "Товар №1", "Описание товара №1")), model)) // then .expectNext("customer/products/product") .verifyComplete(); assertEquals(productReviews, model.getAttribute("reviews")); assertEquals(true, model.getAttribute("inFavourite")); verify(this.productReviewsClient).findProductReviewsByProductId(1); verify(this.favouriteProductsClient).findFavouriteProductByProductId(1); verifyNoMoreInteractions(this.productsClient, this.favouriteProductsClient); verifyNoInteractions(this.productsClient); }
public TaskRunScheduler getTaskRunScheduler() { return taskRunScheduler; }
@Test public void testTaskRunMergePriorityFirst() { TaskRunManager taskRunManager = new TaskRunManager(); Task task = new Task("test"); task.setDefinition("select 1"); long taskId = 1; TaskRun taskRun1 = TaskRunBuilder .newBuilder(task) .setExecuteOption(makeExecuteOption(true, false)) .build(); long now = System.currentTimeMillis(); taskRun1.setTaskId(taskId); taskRun1.initStatus("1", now); taskRun1.getStatus().setPriority(0); TaskRun taskRun2 = TaskRunBuilder .newBuilder(task) .setExecuteOption(DEFAULT_MERGE_OPTION) .build(); taskRun2.setTaskId(taskId); taskRun2.initStatus("2", now); taskRun2.getStatus().setPriority(10); taskRunManager.arrangeTaskRun(taskRun1, false); taskRunManager.arrangeTaskRun(taskRun2, false); TaskRunScheduler taskRunScheduler = taskRunManager.getTaskRunScheduler(); List<TaskRun> taskRuns = Lists.newArrayList(taskRunScheduler.getPendingTaskRunsByTaskId(taskId)); Assert.assertTrue(taskRuns != null); Assert.assertEquals(1, taskRuns.size()); Assert.assertEquals(10, taskRuns.get(0).getStatus().getPriority()); }
public FEELFnResult<Boolean> invoke(@ParameterName( "point" ) Comparable point, @ParameterName( "range" ) Range range) { if ( point == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "point", "cannot be null")); } if ( range == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "range", "cannot be null")); } try { boolean result = ( range.getLowBoundary() == Range.RangeBoundary.CLOSED && point.compareTo( range.getLowEndPoint() ) == 0 ); return FEELFnResult.ofResult( result ); } catch( Exception e ) { // points are not comparable return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "point", "cannot be compared to range")); } }
@Test void invokeParamSingleAndRange() { FunctionTestUtil.assertResult( startsFunction.invoke( "a", new RangeImpl( Range.RangeBoundary.CLOSED, "a", "f", Range.RangeBoundary.CLOSED )), Boolean.TRUE ); FunctionTestUtil.assertResult( startsFunction.invoke( "f", new RangeImpl( Range.RangeBoundary.CLOSED, "a", "f", Range.RangeBoundary.CLOSED )), Boolean.FALSE ); FunctionTestUtil.assertResult( startsFunction.invoke( "a", new RangeImpl( Range.RangeBoundary.OPEN, "a", "f", Range.RangeBoundary.CLOSED )), Boolean.FALSE ); FunctionTestUtil.assertResult( startsFunction.invoke( "a", new RangeImpl( Range.RangeBoundary.CLOSED, "b", "f", Range.RangeBoundary.CLOSED )), Boolean.FALSE ); }
@Override public List<BlockWorkerInfo> getPreferredWorkers(WorkerClusterView workerClusterView, String fileId, int count) throws ResourceExhaustedException { if (workerClusterView.size() < count) { throw new ResourceExhaustedException(String.format( "Not enough workers in the cluster %d workers in the cluster but %d required", workerClusterView.size(), count)); } Set<WorkerIdentity> workerIdentities = workerClusterView.workerIds(); mHashProvider.refresh(workerIdentities); List<WorkerIdentity> workers = mHashProvider.getMultiple(fileId, count); if (workers.size() != count) { throw new ResourceExhaustedException(String.format( "Found %d workers from the hash ring but %d required", workers.size(), count)); } ImmutableList.Builder<BlockWorkerInfo> builder = ImmutableList.builder(); for (WorkerIdentity worker : workers) { Optional<WorkerInfo> optionalWorkerInfo = workerClusterView.getWorkerById(worker); final WorkerInfo workerInfo; if (optionalWorkerInfo.isPresent()) { workerInfo = optionalWorkerInfo.get(); } else { // the worker returned by the policy does not exist in the cluster view // supplied by the client. // this can happen when the membership changes and some callers fail to update // to the latest worker cluster view. // in this case, just skip this worker LOG.debug("Inconsistency between caller's view of cluster and that of " + "the consistent hash policy's: worker {} selected by policy does not exist in " + "caller's view {}. Skipping this worker.", worker, workerClusterView); continue; } BlockWorkerInfo blockWorkerInfo = new BlockWorkerInfo( worker, workerInfo.getAddress(), workerInfo.getCapacityBytes(), workerInfo.getUsedBytes(), workerInfo.getState() == WorkerState.LIVE ); builder.add(blockWorkerInfo); } List<BlockWorkerInfo> infos = builder.build(); return infos; }
@Test public void getMultipleWorkers() throws Exception { WorkerLocationPolicy policy = WorkerLocationPolicy.Factory.create(mConf); assertTrue(policy instanceof MultiProbeHashPolicy); // Prepare a worker list WorkerClusterView workers = new WorkerClusterView(Arrays.asList( new WorkerInfo() .setIdentity(WorkerIdentityTestUtils.ofLegacyId(1)) .setAddress(new WorkerNetAddress() .setHost("master1").setRpcPort(29998).setDataPort(29999).setWebPort(30000)) .setCapacityBytes(1024) .setUsedBytes(0), new WorkerInfo() .setIdentity(WorkerIdentityTestUtils.ofLegacyId(2)) .setAddress(new WorkerNetAddress() .setHost("master2").setRpcPort(29998).setDataPort(29999).setWebPort(30000)) .setCapacityBytes(1024) .setUsedBytes(0))); List<BlockWorkerInfo> assignedWorkers = policy.getPreferredWorkers(workers, "hdfs://a/b/c", 2); assertEquals(2, assignedWorkers.size()); assertTrue(assignedWorkers.stream().allMatch(w -> contains(workers, w))); // The order of the workers should be consistent assertEquals(assignedWorkers.get(0).getNetAddress().getHost(), "master1"); assertEquals(assignedWorkers.get(1).getNetAddress().getHost(), "master2"); assertThrows(ResourceExhaustedException.class, () -> { // Getting 2 out of 1 worker will result in an error policy.getPreferredWorkers( new WorkerClusterView(Arrays.asList( new WorkerInfo() .setIdentity(WorkerIdentityTestUtils.ofLegacyId(1)) .setAddress(new WorkerNetAddress() .setHost("master1").setRpcPort(29998).setDataPort(29999).setWebPort(30000)) .setCapacityBytes(1024) .setUsedBytes(0))), "hdfs://a/b/c", 2); }); }
public static String getSelectQuery(@Nullable String table, @Nullable String query) { if (table != null && query != null) { throw new IllegalArgumentException("withTable() can not be used together with withQuery()"); } else if (table != null) { return "SELECT * FROM " + SingleStoreUtil.escapeIdentifier(table); } else if (query != null) { return query; } else { throw new IllegalArgumentException("One of withTable() or withQuery() is required"); } }
@Test public void testGetSelectQueryBothNonNulls() { assertThrows( "withTable() can not be used together with withQuery()", IllegalArgumentException.class, () -> SingleStoreUtil.getSelectQuery("table", "SELECT * FROM table")); }
@Udf public <T extends Comparable<? super T>> List<T> arraySortDefault(@UdfParameter( description = "The array to sort") final List<T> input) { return arraySortWithDirection(input, "ASC"); }
@Test public void shouldSortStrings() { final List<String> input = Arrays.asList("foo", "food", "bar"); final List<String> output = udf.arraySortDefault(input); assertThat(output, contains("bar", "foo", "food")); }
void placeOrder(Order order) { sendShippingRequest(order); }
@Test void testPlaceOrderShortDuration2() throws Exception { long paymentTime = timeLimits.paymentTime(); long queueTaskTime = timeLimits.queueTaskTime(); long messageTime = timeLimits.messageTime(); long employeeTime = timeLimits.employeeTime(); long queueTime = timeLimits.queueTime(); for (double d = 0.1; d < 2; d = d + 0.1) { paymentTime *= d; queueTaskTime *= d; messageTime *= d; employeeTime *= d; queueTime *= d; Commander c = buildCommanderObject(false); var order = new Order(new User("K", "J"), "pen", 1f); for (Order.MessageSent ms : Order.MessageSent.values()) { c.placeOrder(order); assertFalse(StringUtils.isBlank(order.id)); } } }
public void createMapping( String mappingName, String tableName, List<SqlColumnMetadata> mappingColumns, String dataConnectionRef, String idColumn ) { sqlService.execute( createMappingQuery(mappingName, tableName, mappingColumns, dataConnectionRef, idColumn) ).close(); }
@Test @SuppressWarnings("OperatorWrap") public void when_createMapping_then_quoteParameters() { mappingHelper.createMapping( "myMapping", "myTable", singletonList(new SqlColumnMetadata("id", SqlColumnType.INTEGER, true)), "dataConnectionRef", "idColumn" ); verify(sqlService).execute( "CREATE MAPPING \"myMapping\" " + "EXTERNAL NAME \"myTable\" " + "( \"id\" INTEGER ) " + "DATA CONNECTION \"dataConnectionRef\" " + "OPTIONS (" + " 'idColumn' = 'idColumn' " + ")" ); }
public String toString() { return getClass().getSimpleName(); }
@Test public void testToString() throws Exception { RepositoriesMeta repositoriesMeta = new RepositoriesMeta(); assertEquals( "RepositoriesMeta", repositoriesMeta.toString() ); }
public void estimatorStats() { expressionContext.getOp().accept(this, expressionContext); }
@Test public void testLogicalAggregationRowCount() throws Exception { ColumnRefOperator v1 = columnRefFactory.create("v1", Type.INT, true); ColumnRefOperator v2 = columnRefFactory.create("v2", Type.INT, true); List<ColumnRefOperator> groupByColumns = Lists.newArrayList(v1); Map<ColumnRefOperator, CallOperator> aggCall = new HashMap<>(); Statistics.Builder builder = Statistics.builder(); builder.setOutputRowCount(10000); builder.addColumnStatistics(ImmutableMap.of(v1, new ColumnStatistic(0, 100, 0, 10, 50))); builder.addColumnStatistics(ImmutableMap.of(v2, new ColumnStatistic(0, 100, 0, 10, 50))); Group childGroup = new Group(0); childGroup.setStatistics(builder.build()); LogicalAggregationOperator aggNode = new LogicalAggregationOperator(AggType.GLOBAL, groupByColumns, aggCall); GroupExpression groupExpression = new GroupExpression(aggNode, Lists.newArrayList(childGroup)); groupExpression.setGroup(new Group(1)); ExpressionContext expressionContext = new ExpressionContext(groupExpression); StatisticsCalculator statisticsCalculator = new StatisticsCalculator(expressionContext, columnRefFactory, optimizerContext); statisticsCalculator.estimatorStats(); Assert.assertEquals(50, expressionContext.getStatistics().getOutputRowCount(), 0.001); groupByColumns = Lists.newArrayList(v1, v2); aggNode = new LogicalAggregationOperator(AggType.GLOBAL, groupByColumns, aggCall); groupExpression = new GroupExpression(aggNode, Lists.newArrayList(childGroup)); groupExpression.setGroup(new Group(1)); expressionContext = new ExpressionContext(groupExpression); statisticsCalculator = new StatisticsCalculator(expressionContext, columnRefFactory, optimizerContext); statisticsCalculator.estimatorStats(); Assert.assertEquals( 50 * 50 * Math.pow(StatisticsEstimateCoefficient.UNKNOWN_GROUP_BY_CORRELATION_COEFFICIENT, 2), expressionContext.getStatistics().getOutputRowCount(), 0.001); }
public byte[] encodeToByteArray(List<Integer> input) { checkEncodeInputValidity(input); // Find address byte length by rounding up (totalBitLength / 8) byte[] address = new byte[(totalBitLength + 7) >> 3]; if (!positiveIntegersOnly) { // Modify sign bits to preserve ordering between positive and negative integers int bitIndex = totalBitLength - 1; for (int value : input) { byte signBit = (value < 0) ? (byte) 0 : 1; address[bitIndex >> 3] |= signBit << (bitIndex & 7); bitIndex--; } } int bitIndex = positiveIntegersOnly ? totalBitLength - 1 : totalBitLength - encodingBits.size() - 1; // Interweave input bits into address from the most significant bit to preserve data locality for (int bitsProcessed = 0; bitsProcessed < maxBitLength; bitsProcessed++) { for (int index = 0; index < input.size(); index++) { if (bitsProcessed >= encodingBits.get(index)) { continue; } int bitPosition = encodingBits.get(index) - bitsProcessed - 1; byte maskedBit = (byte) ((input.get(index) >> bitPosition) & 1); address[bitIndex >> 3] |= maskedBit << (bitIndex & 7); bitIndex--; } } return address; }
@Test public void testZOrderDifferentListSizes() { List<Integer> bitPositions = ImmutableList.of(8, 8, 8, 8, 8, 8, 8, 8); ZOrder zOrder = new ZOrder(bitPositions, true); List<Integer> intColumns = ImmutableList.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); try { zOrder.encodeToByteArray(intColumns); fail("Expected test to fail: input list size is greater than bit position list size."); } catch (IllegalArgumentException e) { String expectedMessage = format( "Input list size (%d) does not match encoding bits list size (%d).", intColumns.size(), bitPositions.size()); assertEquals(e.getMessage(), expectedMessage, format("Expected exception message '%s' to match '%s'", e.getMessage(), expectedMessage)); } }
@SuppressWarnings("unchecked") public T getValue() { final T value = (T) FROM_STRING.get(getConverterClass()).apply(JiveGlobals.getProperty(key), this); if (value == null || (Collection.class.isAssignableFrom(value.getClass()) && ((Collection) value).isEmpty())) { return defaultValue; } if (minValue != null && ((Comparable) minValue).compareTo(value) > 0) { LOGGER.warn("Configured value of {} is less than the minimum value of {} for the SystemProperty {} - will use default value of {} instead", value, minValue, key, defaultValue); return defaultValue; } if (maxValue != null && ((Comparable) maxValue).compareTo(value) < 0) { LOGGER.warn("Configured value of {} is more than the maximum value of {} for the SystemProperty {} - will use default value of {} instead", value, maxValue, key, defaultValue); return defaultValue; } return value; }
@Test public void willCreateAListOfCommaSeparatedString() { final String key = "a csv list property"; final SystemProperty<List<String>> property = SystemProperty.Builder.ofType(List.class) .setKey(key) .setDefaultValue(Collections.emptyList()) .setDynamic(true) .buildList(String.class); JiveGlobals.setProperty(key, "3,2,1"); assertThat(property.getValue(), is(Arrays.asList("3", "2", "1"))); }
public Device.Type type() { return get(TYPE, null, Device.Type.class); }
@Test public void testSetType() { SW_BDC.type(OTN); assertEquals("Incorrect type", OTN, SW_BDC.type()); }
static Properties resolveProducerProperties(Map<String, String> options, Object keySchema, Object valueSchema) { Properties properties = from(options); withSerdeProducerProperties(true, options, keySchema, properties); withSerdeProducerProperties(false, options, valueSchema, properties); return properties; }
@Test public void test_producerProperties_avro_schemaRegistry() { // key assertThat(resolveProducerProperties(Map.of( OPTION_KEY_FORMAT, AVRO_FORMAT, "schema.registry.url", "http://localhost:8081" ))).containsExactlyInAnyOrderEntriesOf(Map.of( KEY_SERIALIZER, KafkaAvroSerializer.class.getCanonicalName(), "schema.registry.url", "http://localhost:8081" )); // value assertThat(resolveProducerProperties(Map.of( OPTION_KEY_FORMAT, UNKNOWN_FORMAT, OPTION_VALUE_FORMAT, AVRO_FORMAT, "schema.registry.url", "http://localhost:8081" ))).containsExactlyInAnyOrderEntriesOf(Map.of( VALUE_SERIALIZER, KafkaAvroSerializer.class.getCanonicalName(), "schema.registry.url", "http://localhost:8081" )); }
static void validateRegion(String region) { if (region == null) { throw new InvalidConfigurationException("The provided region is null."); } if (!AWS_REGION_PATTERN.matcher(region).matches()) { String message = String.format("The provided region %s is not a valid AWS region.", region); throw new InvalidConfigurationException(message); } }
@Test public void validateNullRegion() { // given String expectedMessage = "The provided region is null."; // when ThrowingRunnable validateRegion = () -> RegionValidator.validateRegion(null); //then InvalidConfigurationException thrownEx = assertThrows(InvalidConfigurationException.class, validateRegion); assertEquals(expectedMessage, thrownEx.getMessage()); }
@Override public V put(final K key, final V value) { final Entry<K, V>[] table = this.table; final int hash = key.hashCode(); final int index = HashUtil.indexFor(hash, table.length, mask); for (Entry<K, V> e = table[index]; e != null; e = e.hashNext) { final K entryKey; if ((entryKey = e.key) == key || entryKey.equals(key)) { moveToTop(e); return e.setValue(value); } } final Entry<K, V> e = new Entry<>(key, value); e.hashNext = table[index]; table[index] = e; final Entry<K, V> top = this.top; e.next = top; if (top != null) { top.previous = e; } else { back = e; } this.top = e; _size += 1; if (removeEldestEntry(back)) { remove(eldestKey()); } else if (_size > capacity) { rehash(HashUtil.nextCapacity(capacity)); } return null; }
@Test public void testPutGet2() { final LinkedHashMap<Integer, String> tested = new LinkedHashMap<>(); for (int i = 0; i < 1000; ++i) { tested.put(i - 500, Integer.toString(i)); } Assert.assertEquals(1000, tested.size()); for (int i = 0; i < 1000; ++i) { Assert.assertEquals(Integer.toString(i), tested.get(i - 500)); } for (int i = 0; i < 1000; ++i) { Assert.assertEquals(Integer.toString(i), tested.put(i - 500, Integer.toString(i + 1))); } Assert.assertEquals(1000, tested.size()); for (int i = 0; i < 1000; ++i) { Assert.assertEquals(Integer.toString(i + 1), tested.get(i - 500)); } }
@Nullable public String getValue(@Nullable TraceContext context) { if (context == null) return null; return this.context.getValue(this, context); }
@Test void getValue_context_null() { // permits unguarded use of CurrentTraceContext.get() assertThat(REQUEST_ID.getValue((TraceContext) null)) .isNull(); }
@Override public long putIfAbsent(K key, long newValue) { return complete(asyncCounterMap.putIfAbsent(key, newValue)); }
@Test public void testPutIfAbsent() { atomicCounterMap.putIfAbsent(KEY1, VALUE1); Long afterIncrement = atomicCounterMap.addAndGet(KEY1, DELTA1); assertThat(afterIncrement, is(VALUE1 + DELTA1)); }