focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
public static String sign(String data, String key) throws Exception { try { byte[] signature = sign(data.getBytes(UTF8), key.getBytes(UTF8), SignUtil.SigningAlgorithm.HmacSHA1); return new String(Base64.encodeBase64(signature)); } catch (Exception ex) { throw new Exception("Unable to calculate a request signature: " + ex.getMessage(), ex); } }
@Test void testSignWithException() throws Exception { assertThrows(Exception.class, () -> { SignUtil.sign(null, "b"); }); }
public abstract long getFirstDataPageOffset();
@Test public void testConversionNeg() { long neg = -1; ColumnChunkMetaData md = newMD(neg); assertTrue(md instanceof LongColumnChunkMetaData); assertEquals(neg, md.getFirstDataPageOffset()); }
public ValidationBean handleException(Exception e, GitVersion gitVersion) { ValidationBean defaultResponse = ValidationBean.notValid(e.getMessage()); try { if (!gitVersion.isMinimumSupportedVersionOrHigher()) { return ValidationBean.notValid(ERR_GIT_OLD_VERSION + gitVersion.getVersion().toString()); } else { return defaultResponse; } } catch (Exception ex) { return defaultResponse; } }
@Test void shouldReturnInvalidBeanWithRootCauseAsRepositoryURLIsNotFoundIfVersionIsAbove19() { ValidationBean validationBean = new GitMaterial("http://0.0.0.0").handleException(new Exception("not found!"), GIT_VERSION_1_9); assertThat(validationBean.isValid()).isFalse(); assertThat(validationBean.getError()).contains("not found!"); }
public boolean unblock() { final AtomicBuffer buffer = this.buffer; final long headPosition = buffer.getLongVolatile(headPositionIndex); final long tailPosition = buffer.getLongVolatile(tailPositionIndex); if (headPosition == tailPosition) { return false; } final int mask = capacity - 1; final int consumerIndex = (int)(headPosition & mask); final int producerIndex = (int)(tailPosition & mask); boolean unblocked = false; int length = buffer.getIntVolatile(consumerIndex); if (length < 0) { buffer.putInt(typeOffset(consumerIndex), PADDING_MSG_TYPE_ID); buffer.putIntOrdered(lengthOffset(consumerIndex), -length); unblocked = true; } else if (0 == length) { // go from (consumerIndex to producerIndex) or (consumerIndex to capacity) final int limit = producerIndex > consumerIndex ? producerIndex : capacity; int i = consumerIndex + ALIGNMENT; do { // read the top int of every long (looking for length aligned to 8=ALIGNMENT) length = buffer.getIntVolatile(i); if (0 != length) { if (scanBackToConfirmStillZeroed(buffer, i, consumerIndex)) { buffer.putInt(typeOffset(consumerIndex), PADDING_MSG_TYPE_ID); buffer.putIntOrdered(lengthOffset(consumerIndex), i - consumerIndex); unblocked = true; } break; } i += ALIGNMENT; } while (i < limit); } return unblocked; }
@Test void shouldUnblockGapWithZeros() { final int messageLength = ALIGNMENT * 4; when(buffer.getLongVolatile(HEAD_COUNTER_INDEX)).thenReturn((long)messageLength); when(buffer.getLongVolatile(TAIL_COUNTER_INDEX)).thenReturn((long)messageLength * 3); when(buffer.getIntVolatile(messageLength * 2)).thenReturn(messageLength); assertTrue(ringBuffer.unblock()); final InOrder inOrder = inOrder(buffer); inOrder.verify(buffer).putInt(typeOffset(messageLength), PADDING_MSG_TYPE_ID); inOrder.verify(buffer).putIntOrdered(lengthOffset(messageLength), messageLength); }
public synchronized boolean deregister(String id) { assert !(Thread.currentThread() instanceof PartitionOperationThread); if (!id2InterceptorMap.containsKey(id)) { return false; } Map<String, MapInterceptor> tmpMap = new HashMap<>(id2InterceptorMap); MapInterceptor removedInterceptor = tmpMap.remove(id); id2InterceptorMap = unmodifiableMap(tmpMap); List<MapInterceptor> tmpInterceptors = new ArrayList<>(interceptors); tmpInterceptors.remove(removedInterceptor); interceptors = unmodifiableList(tmpInterceptors); return true; }
@Test public void testDeregister() { registry.register(interceptor.id, interceptor); registry.deregister(interceptor.id); assertInterceptorRegistryContainsNotInterceptor(); }
@Override public boolean find(Path file, final ListProgressListener listener) throws BackgroundException { if(file.isRoot()) { return true; } try { try { final boolean found; if(containerService.isContainer(file)) { final CloudBlobContainer container = session.getClient().getContainerReference(containerService.getContainer(file).getName()); return container.exists(null, null, context); } if(file.isFile() || file.isPlaceholder()) { try { final CloudBlob blob = session.getClient().getContainerReference(containerService.getContainer(file).getName()) .getBlobReferenceFromServer(containerService.getKey(file)); return blob.exists(null, null, context); } catch(StorageException e) { switch(e.getHttpStatusCode()) { case HttpStatus.SC_NOT_FOUND: if(file.isPlaceholder()) { // Ignore failure and look for common prefix break; } default: throw e; } } } if(log.isDebugEnabled()) { log.debug(String.format("Search for common prefix %s", file)); } // Check for common prefix try { new AzureObjectListService(session, context).list(file, new CancellingListProgressListener()); return true; } catch(ListCanceledException l) { // Found common prefix return true; } } catch(StorageException e) { throw new AzureExceptionMappingService().map("Failure to read attributes of {0}", e, file); } catch(URISyntaxException e) { return false; } } catch(NotfoundException e) { return false; } }
@Test public void testFindDirectory() throws Exception { final Path container = new Path("cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path folder = new AzureDirectoryFeature(session, null).mkdir( new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory)), new TransferStatus()); assertTrue(new AzureFindFeature(session, null).find(folder)); assertFalse(new AzureFindFeature(session, null).find(new Path(folder.getAbsolute(), EnumSet.of(Path.Type.file)))); new AzureDeleteFeature(session, null).delete(Collections.singletonList(folder), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { if (!(request instanceof HttpServletRequest) || !(response instanceof HttpServletResponse) || monitoringDisabled || !instanceEnabled) { // si ce n'est pas une requête http ou si le monitoring est désactivé, on fait suivre chain.doFilter(request, response); return; } final HttpServletRequest httpRequest = (HttpServletRequest) request; final HttpServletResponse httpResponse = (HttpServletResponse) response; if (httpRequest.getRequestURI().equals(getMonitoringUrl(httpRequest))) { doMonitoring(httpRequest, httpResponse); return; } if (!httpCounter.isDisplayed() || isRequestExcluded((HttpServletRequest) request)) { // si cette url est exclue ou si le counter http est désactivé, on ne monitore pas cette requête http chain.doFilter(request, response); return; } doFilter(chain, httpRequest, httpResponse); }
@Test public void testDoFilterWithSessionBis() throws ServletException, IOException { final HttpServletRequest request = createNiceMock(HttpServletRequest.class); final HttpSession session = createNiceMock(HttpSession.class); expect(request.getSession(false)).andReturn(session); // Locale sans pays expect(request.getLocale()).andReturn(Locale.FRENCH).anyTimes(); // "X-Forwarded-For" expect(request.getHeader("X-Forwarded-For")).andReturn("somewhere").anyTimes(); // getRemoteUser expect(request.getRemoteUser()).andReturn("me").anyTimes(); replay(session); doFilter(request); verify(session); }
public static boolean fitsIn(Resource smaller, Resource bigger) { int maxLength = ResourceUtils.getNumberOfCountableResourceTypes(); for (int i = 0; i < maxLength; i++) { try { ResourceInformation rhsValue = bigger.getResourceInformation(i); ResourceInformation lhsValue = smaller.getResourceInformation(i); if (lhsValue.getValue() > rhsValue.getValue()) { return false; } } catch (ResourceNotFoundException ye) { LOG.warn("Resource is missing:" + ye.getMessage()); continue; } } return true; }
@Test @Timeout(1000) void testFitsIn() { assertTrue(fitsIn(createResource(1, 1), createResource(2, 2))); assertTrue(fitsIn(createResource(2, 2), createResource(2, 2))); assertFalse(fitsIn(createResource(2, 2), createResource(1, 1))); assertFalse(fitsIn(createResource(1, 2), createResource(2, 1))); assertFalse(fitsIn(createResource(2, 1), createResource(1, 2))); assertTrue(fitsIn(createResource(1, 1, 1), createResource(2, 2, 2))); assertTrue(fitsIn(createResource(1, 1, 0), createResource(2, 2, 0))); assertTrue(fitsIn(createResource(1, 1, 1), createResource(2, 2, 2))); }
public @CheckForNull V start() throws Exception { V result = null; int currentAttempt = 0; boolean success = false; while (currentAttempt < attempts && !success) { currentAttempt++; try { if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, Messages.Retrier_Attempt(currentAttempt, action)); } result = callable.call(); } catch (Exception e) { if (duringActionExceptions == null || Stream.of(duringActionExceptions).noneMatch(exception -> exception.isAssignableFrom(e.getClass()))) { // if the raised exception is not considered as a controlled exception doing the action, rethrow it LOGGER.log(Level.WARNING, Messages.Retrier_ExceptionThrown(currentAttempt, action), e); throw e; } else { // if the exception is considered as a failed action, notify it to the listener LOGGER.log(Level.INFO, Messages.Retrier_ExceptionFailed(currentAttempt, action), e); if (duringActionExceptionListener != null) { LOGGER.log(Level.INFO, Messages.Retrier_CallingListener(e.getLocalizedMessage(), currentAttempt, action)); result = duringActionExceptionListener.apply(currentAttempt, e); } } } // After the call and the call to the listener, which can change the result, test the result success = checkResult.test(currentAttempt, result); if (!success) { if (currentAttempt < attempts) { LOGGER.log(Level.WARNING, Messages.Retrier_AttemptFailed(currentAttempt, action)); LOGGER.log(Level.FINE, Messages.Retrier_Sleeping(delay, action)); try { Thread.sleep(delay); } catch (InterruptedException ie) { LOGGER.log(Level.FINE, Messages.Retrier_Interruption(action)); Thread.currentThread().interrupt(); // flag this thread as interrupted currentAttempt = attempts; // finish } } else { // Failed to perform the action LOGGER.log(Level.INFO, Messages.Retrier_NoSuccess(action, attempts)); } } else { LOGGER.log(Level.INFO, Messages.Retrier_Success(action, currentAttempt)); } } return result; }
@Test public void failedActionWithExceptionAfterThreeAttemptsWithoutListenerTest() throws Exception { final int ATTEMPTS = 3; final String ACTION = "print"; RingBufferLogHandler handler = new RingBufferLogHandler(20); Logger.getLogger(Retrier.class.getName()).addHandler(handler); // Set the required params Retrier<Boolean> r = new Retrier.Builder<>( // action to perform (Callable<Boolean>) () -> { throw new IndexOutOfBoundsException("Exception allowed considered as failure"); }, // check the result and return true (boolean primitive type) if success (currentAttempt, result) -> result != null && result, //name of the action ACTION ) // Set the optional parameters .withAttempts(ATTEMPTS) .withDelay(100) .withDuringActionExceptions(new Class[]{IndexOutOfBoundsException.class}) // Construct the object .build(); // Begin the process without catching the allowed exceptions Boolean finalResult = r.start(); Assert.assertNull(finalResult); String textNoSuccess = Messages.Retrier_NoSuccess(ACTION, ATTEMPTS); assertTrue(String.format("The log should contain '%s'", textNoSuccess), handler.getView().stream().anyMatch(m -> m.getMessage().contains(textNoSuccess))); String testException = Messages.Retrier_ExceptionFailed(ATTEMPTS, ACTION); assertTrue(String.format("The log should contain '%s'", testException), handler.getView().stream().anyMatch(m -> m.getMessage().startsWith(testException))); }
public static NotificationDispatcherMetadata newMetadata() { return METADATA; }
@Test public void verify_qgChange_notification_dispatcher_key() { NotificationDispatcherMetadata metadata = QGChangeNotificationHandler.newMetadata(); assertThat(metadata.getDispatcherKey()).isEqualTo(QG_CHANGE_DISPATCHER_KEY); }
@Override public RouterFunction<ServerResponse> create(String prefix) { return RouterFunctions .route(GET(PathUtils.combinePath(prefix, "/{slug}")) .or(GET(PathUtils.combinePath(prefix, "/{slug}/page/{page:\\d+}"))) .and(accept(MediaType.TEXT_HTML)), handlerFunction()); }
@Test void create() { when(client.listBy(eq(Tag.class), any(), any(PageRequest.class))) .thenReturn(Mono.just(ListResult.emptyResult())); WebTestClient webTestClient = getWebTestClient(tagPostRouteFactory.create("/new-tags")); webTestClient.get() .uri("/new-tags/tag-slug-1") .exchange() .expectStatus().isNotFound(); Tag tag = new Tag(); tag.setMetadata(new Metadata()); tag.getMetadata().setName("fake-tag-name"); tag.setSpec(new Tag.TagSpec()); tag.getSpec().setSlug("tag-slug-2"); when(client.listBy(eq(Tag.class), any(), any(PageRequest.class))) .thenReturn(Mono.just(new ListResult<>(List.of(tag)))); when(tagFinder.getByName(eq(tag.getMetadata().getName()))) .thenReturn(Mono.just(TagVo.from(tag))); webTestClient.get() .uri("/new-tags/tag-slug-2") .exchange() .expectStatus().isOk(); webTestClient.get() .uri("/new-tags/tag-slug-2/page/1") .exchange() .expectStatus().isOk(); }
protected static SimpleDateFormat getLog4j2Appender() { Optional<Appender> log4j2xmlAppender = configuration.getAppenders().values().stream() .filter( a -> a.getName().equalsIgnoreCase( log4J2Appender ) ).findFirst(); if ( log4j2xmlAppender.isPresent() ) { ArrayList<String> matchesArray = new ArrayList<>(); String dateFormatFromLog4j2xml = log4j2xmlAppender.get().getLayout().getContentFormat().get( "format" ); Pattern pattern = Pattern.compile( "(\\{(.*?)})" ); Matcher matcher = pattern.matcher( dateFormatFromLog4j2xml ); while ( matcher.find() ) { matchesArray.add( matcher.group( 2 ) ); } if ( !matchesArray.isEmpty() ) { return processMatches( matchesArray ); } } return new SimpleDateFormat( "yyyy/MM/dd HH:mm:ss" ); }
@Test public void testGetLog4j2UsingAppender9() { // Testing adding empty {} for TimeZone KettleLogLayout.log4J2Appender = "pdi-execution-appender-test-9"; Assert.assertEquals( "MMM/dd,yyyy HH:mm:ss", KettleLogLayout.getLog4j2Appender().toPattern() ); }
protected String completeLine(String line, int indent) { return " ".repeat(Math.max(0, indent)) + line + ";\n"; }
@Test public void testCompleteLine() { MessageCodeGen messageCodeGen = new MessageCodeGen(); assertEquals(messageCodeGen.completeLine("add indentation", 3), " add indentation;\n"); assertEquals(messageCodeGen.completeLine("add indentation", 0), "add indentation;\n"); }
@Override public Collection<RedisServer> slaves(NamedNode master) { List<Map<String, String>> slaves = connection.sync(StringCodec.INSTANCE, RedisCommands.SENTINEL_SLAVES, master.getName()); return toRedisServersList(slaves); }
@Test public void testSlaves() { Collection<RedisServer> masters = connection.masters(); Collection<RedisServer> slaves = connection.slaves(masters.iterator().next()); assertThat(slaves).hasSize(2); }
@Override public ParSeqBasedCompletionStage<T> whenComplete(BiConsumer<? super T, ? super Throwable> action) { return nextStageByComposingTask(_task.transform("whenComplete", prevTaskResult -> { if (prevTaskResult.isFailed()) { try { action.accept(null, prevTaskResult.getError()); } catch (Throwable e) { // no ops } return Failure.of(prevTaskResult.getError()); } else { try { action.accept(prevTaskResult.get(), prevTaskResult.getError()); } catch (Throwable e) { return Failure.of(e); } return Success.of(prevTaskResult.get()); } })); }
@Test public void testWhenComplete() throws Exception { BiConsumer<String, Throwable> biConsumer = mock(BiConsumer.class); CompletionStage<String> stage = createTestStage(TESTVALUE1).whenComplete(biConsumer); finish(stage); verify(biConsumer).accept(TESTVALUE1, null); }
@Override public SortedSet<Path> convertFrom(String value) { if (value == null) { throw new ParameterException("Path list must not be null."); } return Arrays.stream(value.split(SEPARATOR)) .map(StringUtils::trimToNull) .filter(Objects::nonNull) .map(Paths::get) .collect(Collectors.toCollection(sortedPathSupplier())); }
@Test public void testConvertFromEmpty() { assertEquals(new TreeSet<>(), converter.convertFrom("")); }
public void set(PropertyKey key, Object value) { set(key, value, Source.RUNTIME); }
@Test public void getMalformedDoubleThrowsException() { mThrown.expect(IllegalArgumentException.class); mConfiguration.set(PropertyKey.USER_CLIENT_CACHE_EVICTOR_LFU_LOGBASE, true); }
@Override public long getAvailablePhysicalMemorySize() { refreshIfNeeded(); return memAvailable; }
@Test(timeout = 10000) public void errorInGetSystemInfo() { SysInfoWindowsMock tester = new SysInfoWindowsMock(); // info str derived from windows shell command is null tester.setSysinfoString(null); // call a method to refresh values tester.getAvailablePhysicalMemorySize(); // info str derived from windows shell command with no \r\n termination tester.setSysinfoString(""); // call a method to refresh values tester.getAvailablePhysicalMemorySize(); }
@Override public Serializable read(final MySQLBinlogColumnDef columnDef, final MySQLPacketPayload payload) { int seconds = payload.getByteBuf().readInt(); if (0 == seconds) { return MySQLTimeValueUtils.DATETIME_OF_ZERO; } int nanos = columnDef.getColumnMeta() > 0 ? new MySQLFractionalSeconds(columnDef.getColumnMeta(), payload).getNanos() : 0; Timestamp result = new Timestamp(seconds * 1000L); result.setNanos(nanos); return result; }
@Test void assertReadWithFraction() { columnDef.setColumnMeta(1); long currentTimeMillis = 1678795614082L; int currentSeconds = Long.valueOf(currentTimeMillis / 1000L).intValue(); int currentMilliseconds = Long.valueOf(currentTimeMillis % 100L).intValue(); when(payload.readInt1()).thenReturn(currentMilliseconds); when(byteBuf.readInt()).thenReturn(currentSeconds); assertThat("currentTimeMillis:" + currentTimeMillis, new MySQLTimestamp2BinlogProtocolValue().read(columnDef, payload), is(new Timestamp(currentSeconds * 1000L + currentMilliseconds * 10L))); }
private ContentType getContentType(Exchange exchange) throws ParseException { String contentTypeStr = ExchangeHelper.getContentType(exchange); if (contentTypeStr == null) { contentTypeStr = DEFAULT_CONTENT_TYPE; } ContentType contentType = new ContentType(contentTypeStr); String contentEncoding = ExchangeHelper.getContentEncoding(exchange); // add a charset parameter for text subtypes if (contentEncoding != null && contentType.match("text/*")) { contentType.setParameter("charset", MimeUtility.mimeCharset(contentEncoding)); } return contentType; }
@Test public void roundtripWithTextAttachments() throws IOException { String attContentType = "text/plain"; String attText = "Attachment Text"; String attFileName = "Attachment File Name"; in.setBody("Body text"); in.setHeader(Exchange.CONTENT_TYPE, "text/plain;charset=iso8859-1;other-parameter=true"); in.setHeader(Exchange.CONTENT_ENCODING, "UTF8"); Map<String, String> headers = new HashMap<>(); headers.put("Content-Description", "Sample Attachment Data"); headers.put("X-AdditionalData", "additional data"); addAttachment(attContentType, attText, attFileName, headers); Exchange result = template.send("direct:roundtrip", exchange); AttachmentMessage out = result.getMessage(AttachmentMessage.class); assertEquals("Body text", out.getBody(String.class)); assertTrue(out.getHeader(Exchange.CONTENT_TYPE, String.class).startsWith("text/plain")); assertEquals("UTF8", out.getHeader(Exchange.CONTENT_ENCODING)); assertTrue(out.hasAttachments()); assertEquals(1, out.getAttachmentNames().size()); assertTrue(out.getAttachmentNames().contains(attFileName)); Attachment att = out.getAttachmentObject(attFileName); DataHandler dh = att.getDataHandler(); assertNotNull(dh); assertEquals(attContentType, dh.getContentType()); InputStream is = dh.getInputStream(); ByteArrayOutputStream os = new ByteArrayOutputStream(); IOHelper.copyAndCloseInput(is, os); assertEquals(attText, new String(os.toByteArray())); assertEquals("Sample Attachment Data", att.getHeader("content-description")); assertEquals("additional data", att.getHeader("X-AdditionalData")); }
public void markNewPartitionForDeletion(NewPartition newPartition) { ByteString rowKey = convertPartitionToNewPartitionRowKey(newPartition.getPartition()); RowMutation rowMutation = RowMutation.create(tableId, rowKey); for (ChangeStreamContinuationToken token : newPartition.getChangeStreamContinuationTokens()) { rowMutation.setCell( MetadataTableAdminDao.CF_SHOULD_DELETE, ByteStringRange.serializeToByteString(token.getPartition()), 1); } mutateRowWithHardTimeout(rowMutation); }
@Test public void testMarkNewPartitionForDeletion() throws InvalidProtocolBufferException { ByteStringRange childPartition = ByteStringRange.create("A", "C"); ChangeStreamContinuationToken token = ChangeStreamContinuationToken.create(ByteStringRange.create("B", "C"), "BC"); NewPartition newPartition = new NewPartition(childPartition, Collections.singletonList(token), Instant.now()); metadataTableDao.writeNewPartition(newPartition); List<NewPartition> newPartitions = metadataTableDao.readNewPartitions(); assertEquals(1, newPartitions.size()); assertEquals(childPartition, newPartitions.get(0).getPartition()); // Once it's marked for deletion, it is not visible to readNewPartitions. metadataTableDao.markNewPartitionForDeletion(newPartitions.get(0)); assertEquals(0, metadataTableDao.readNewPartitions().size()); }
@Udf(description = "Returns the sine of an INT value") public Double sin( @UdfParameter( value = "value", description = "The value in radians to get the sine of." ) final Integer value ) { return sin(value == null ? null : value.doubleValue()); }
@Test public void shouldHandleLessThanNegative2Pi() { assertThat(udf.sin(-9.1), closeTo(-0.3190983623493521, 0.000000000000001)); assertThat(udf.sin(-6.3), closeTo(-0.016813900484349713, 0.000000000000001)); assertThat(udf.sin(-7), closeTo(-0.6569865987187891, 0.000000000000001)); assertThat(udf.sin(-7L), closeTo(-0.6569865987187891, 0.000000000000001)); }
@Override public boolean checkIndexExists( Database database, String schemaName, String tableName, String[] idxFields ) throws KettleDatabaseException { String tablename = database.getDatabaseMeta().getQuotedSchemaTableCombination( schemaName, tableName ); boolean[] exists = new boolean[ idxFields.length]; for ( int i = 0; i < exists.length; i++ ) { exists[i] = false; } try { // // Get the info from the data dictionary... // String sql = "SELECT * FROM USER_IND_COLUMNS WHERE TABLE_NAME = '" + tableName + "'"; ResultSet res = null; try { res = database.openQuery( sql ); if ( res != null ) { Object[] row = database.getRow( res ); while ( row != null ) { String column = database.getReturnRowMeta().getString( row, "COLUMN_NAME", "" ); int idx = Const.indexOfString( column, idxFields ); if ( idx >= 0 ) { exists[idx] = true; } row = database.getRow( res ); } } else { return false; } } finally { if ( res != null ) { database.closeQuery( res ); } } // See if all the fields are indexed... boolean all = true; for ( int i = 0; i < exists.length && all; i++ ) { if ( !exists[i] ) { all = false; } } return all; } catch ( Exception e ) { throw new KettleDatabaseException( "Unable to determine if indexes exists on table [" + tablename + "]", e ); } }
@Test public void testCheckIndexExists() throws Exception { String expectedSQL = "SELECT * FROM USER_IND_COLUMNS WHERE TABLE_NAME = 'FOO'"; Database db = Mockito.mock( Database.class ); RowMetaInterface rm = Mockito.mock( RowMetaInterface.class ); ResultSet rs = Mockito.mock( ResultSet.class ); DatabaseMeta dm = Mockito.mock( DatabaseMeta.class ); Mockito.when( dm.getQuotedSchemaTableCombination( "", "FOO" ) ).thenReturn( "FOO" ); Mockito.when( rs.next() ).thenReturn( rowCnt < 2 ); Mockito.when( db.openQuery( expectedSQL ) ).thenReturn( rs ); Mockito.when( db.getReturnRowMeta() ).thenReturn( rm ); Mockito.when( rm.getString( row1, "COLUMN_NAME", "" ) ).thenReturn( "ROW1COL2" ); Mockito.when( rm.getString( row2, "COLUMN_NAME", "" ) ).thenReturn( "ROW2COL2" ); Mockito.when( db.getRow( rs ) ).thenAnswer( new Answer<Object[]>() { @Override public Object[] answer( InvocationOnMock invocation ) throws Throwable { rowCnt++; if ( rowCnt == 1 ) { return row1; } else if ( rowCnt == 2 ) { return row2; } else { return null; } } } ); Mockito.when( db.getDatabaseMeta() ).thenReturn( dm ); assertTrue( nativeMeta.checkIndexExists( db, "", "FOO", new String[] { "ROW1COL2", "ROW2COL2" } ) ); assertFalse( nativeMeta.checkIndexExists( db, "", "FOO", new String[] { "ROW2COL2", "NOTTHERE" } ) ); assertFalse( nativeMeta.checkIndexExists( db, "", "FOO", new String[] { "NOTTHERE", "ROW1COL2" } ) ); }
public static int getTypeOid(final String columnTypeName) { Preconditions.checkArgument(COLUMN_TYPE_NAME_OID_MAP.containsKey(columnTypeName), "Cannot find PostgreSQL type oid for columnTypeName '%s'", columnTypeName); return COLUMN_TYPE_NAME_OID_MAP.get(columnTypeName); }
@Test void assertGetTypeOidFailed() { assertThrows(IllegalArgumentException.class, () -> PostgreSQLArrayColumnType.getTypeOid("not_exist_type")); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { return this.list(directory, listener, new HostPreferences(session.getHost()).getInteger("eue.listing.chunksize")); }
@Test(expected = NotfoundException.class) public void testNotFound() throws Exception { final EueResourceIdProvider fileid = new EueResourceIdProvider(session); new EueListService(session, fileid).list(new Path(new AlphanumericRandomStringService().random(), EnumSet.of(directory)), new DisabledListProgressListener()); }
public void handleReceive(RpcClientResponse response, Span span) { handleFinish(response, span); }
@Test void handleReceive_responseRequired() { brave.Span span = mock(brave.Span.class); assertThatThrownBy(() -> handler.handleReceive(null, span)) .isInstanceOf(NullPointerException.class) .hasMessage("response == null"); }
private void addCredentials( HttpClientContext context ) { String userName; String password; String host; int port; String proxyHost; lock.readLock().lock(); try { host = environmentSubstitute( hostname ); port = Const.toInt( environmentSubstitute( this.port ), 80 ); userName = environmentSubstitute( username ); password = Encr.decryptPasswordOptionallyEncrypted( environmentSubstitute( this.password ) ); proxyHost = environmentSubstitute( proxyHostname ); } finally { lock.readLock().unlock(); } CredentialsProvider provider = new BasicCredentialsProvider(); UsernamePasswordCredentials credentials = new UsernamePasswordCredentials( userName, password ); if ( !Utils.isEmpty( proxyHost ) && host.equals( "localhost" ) ) { host = "127.0.0.1"; } provider.setCredentials( new AuthScope( host, port ), credentials ); context.setCredentialsProvider( provider ); // Generate BASIC scheme object and add it to the local auth cache HttpHost target = new HttpHost( host, port, isSslMode() ? HTTPS : HTTP ); AuthCache authCache = new BasicAuthCache(); BasicScheme basicAuth = new BasicScheme(); authCache.put( target, basicAuth ); context.setAuthCache( authCache ); }
@Test public void testAddCredentials() throws IOException, ClassNotFoundException { String testUser = "test_username"; slaveServer.setUsername( testUser ); String testPassword = "test_password"; slaveServer.setPassword( testPassword ); String host = "somehost"; slaveServer.setHostname( host ); int port = 1000; slaveServer.setPort( "" + port ); HttpClientContext auth = slaveServer.getAuthContext(); Credentials cred = auth.getCredentialsProvider().getCredentials( new AuthScope( host, port ) ); assertEquals( testUser, cred.getUserPrincipal().getName() ); assertEquals( testPassword, cred.getPassword() ); String user2 = "user2"; slaveServer.setUsername( user2 ); slaveServer.setPassword( "pass2" ); auth = slaveServer.getAuthContext(); cred = auth.getCredentialsProvider().getCredentials( new AuthScope( host, port ) ); assertEquals( user2, cred.getUserPrincipal().getName() ); }
@CanIgnoreReturnValue public final Ordered containsExactly() { return containsExactlyEntriesIn(ImmutableMap.of()); }
@Test public void containsExactlyMissingKeyAndWrongValue() { ImmutableMap<String, Integer> actual = ImmutableMap.of("jan", 1, "march", 3); expectFailureWhenTestingThat(actual).containsExactly("jan", 1, "march", 33, "feb", 2); assertFailureKeys( "keys with wrong values", "for key", "expected value", "but got value", "missing keys", "for key", "expected value", "---", "expected", "but was"); assertFailureValueIndexed("for key", 0, "march"); assertFailureValueIndexed("expected value", 0, "33"); assertFailureValue("but got value", "3"); assertFailureValueIndexed("for key", 1, "feb"); assertFailureValueIndexed("expected value", 1, "2"); }
@Override public ClientPoolHandler addFirst(String name, ChannelHandler handler) { super.addFirst(name, handler); return this; }
@Test public void testAddFirst() { ClientPoolHandler handler = new ClientPoolHandler(); Assert.assertTrue(handler.isEmpty()); handler.addFirst(test, new TestHandler()); Assert.assertFalse(handler.isEmpty()); }
public ClientAuth getClientAuth() { String clientAuth = getString(SSL_CLIENT_AUTHENTICATION_CONFIG); if (originals().containsKey(SSL_CLIENT_AUTH_CONFIG)) { if (originals().containsKey(SSL_CLIENT_AUTHENTICATION_CONFIG)) { log.warn( "The {} configuration is deprecated. Since a value has been supplied for the {} " + "configuration, that will be used instead", SSL_CLIENT_AUTH_CONFIG, SSL_CLIENT_AUTHENTICATION_CONFIG ); } else { log.warn( "The configuration {} is deprecated and should be replaced with {}", SSL_CLIENT_AUTH_CONFIG, SSL_CLIENT_AUTHENTICATION_CONFIG ); clientAuth = getBoolean(SSL_CLIENT_AUTH_CONFIG) ? SSL_CLIENT_AUTHENTICATION_REQUIRED : SSL_CLIENT_AUTHENTICATION_NONE; } } return getClientAuth(clientAuth); }
@Test public void shouldUseClientAuthIfNoClientAuthenticationProvided() { // Given: final KsqlRestConfig config = new KsqlRestConfig(ImmutableMap.<String, Object>builder() .put(KsqlRestConfig.SSL_CLIENT_AUTH_CONFIG, true) .build()); // When: final ClientAuth clientAuth = config.getClientAuth(); // Then: assertThat(clientAuth, is(ClientAuth.REQUIRED)); }
public static String getDatabaseRuleVersionNode(final String databaseName, final String ruleName, final String key, final String version) { return String.join("/", getDatabaseRuleNode(databaseName, ruleName), key, VERSIONS, version); }
@Test void assertGetDatabaseRuleVersionNode() { assertThat(DatabaseRuleMetaDataNode.getDatabaseRuleVersionNode("foo_db", "foo_rule", "foo_tables", "1"), is("/metadata/foo_db/rules/foo_rule/foo_tables/versions/1")); }
public final void hasCount(@Nullable Object element, int expectedCount) { checkArgument(expectedCount >= 0, "expectedCount(%s) must be >= 0", expectedCount); int actualCount = checkNotNull(actual).count(element); check("count(%s)", element).that(actualCount).isEqualTo(expectedCount); }
@Test public void hasCountFail() { ImmutableMultiset<String> multiset = ImmutableMultiset.of("kurt", "kurt", "kluever"); expectFailureWhenTestingThat(multiset).hasCount("kurt", 3); assertFailureValue("value of", "multiset.count(kurt)"); }
public void requireAtLeast(final int requiredMajor, final int requiredMinor) { final Version required = new Version(requiredMajor, requiredMinor); if (this.compareTo(required) < 0) { throw new UnsupportedOperationException( "This operation requires API version at least " + requiredMajor + "." + requiredMinor + ", currently configured for " + major + "." + minor); } }
@Test public void shouldObserveApiLimits() { V34_0.requireAtLeast(34, 0); V34_0.requireAtLeast(33, 9); V35_0.requireAtLeast(34, 0); }
@Override public Response updateReservation(ReservationUpdateRequestInfo resContext, HttpServletRequest hsr) throws AuthorizationException, IOException, InterruptedException { // parameter verification if (resContext == null || resContext.getReservationId() == null || resContext.getReservationDefinition() == null) { routerMetrics.incrUpdateReservationFailedRetrieved(); String errMsg = "Missing updateReservation resContext or reservationId " + "or reservation definition."; RouterAuditLogger.logFailure(getUser().getShortUserName(), UPDATE_RESERVATION, UNKNOWN, TARGET_WEB_SERVICE, errMsg); return Response.status(Status.BAD_REQUEST).entity(errMsg).build(); } // get reservationId String reservationId = resContext.getReservationId(); // Check that the reservationId format is accurate try { RouterServerUtil.validateReservationId(reservationId); } catch (IllegalArgumentException e) { routerMetrics.incrUpdateReservationFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), UPDATE_RESERVATION, UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage()); throw e; } try { SubClusterInfo subClusterInfo = getHomeSubClusterInfoByReservationId(reservationId); DefaultRequestInterceptorREST interceptor = getOrCreateInterceptorForSubCluster( subClusterInfo.getSubClusterId(), subClusterInfo.getRMWebServiceAddress()); HttpServletRequest hsrCopy = clone(hsr); Response response = interceptor.updateReservation(resContext, hsrCopy); if (response != null) { RouterAuditLogger.logSuccess(getUser().getShortUserName(), UPDATE_RESERVATION, TARGET_WEB_SERVICE); return response; } } catch (Exception e) { routerMetrics.incrUpdateReservationFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), UPDATE_RESERVATION, UNKNOWN, TARGET_WEB_SERVICE, e.getLocalizedMessage()); RouterServerUtil.logAndThrowRunTimeException("updateReservation Failed.", e); } // throw an exception routerMetrics.incrUpdateReservationFailedRetrieved(); RouterAuditLogger.logFailure(getUser().getShortUserName(), UPDATE_RESERVATION, UNKNOWN, TARGET_WEB_SERVICE, "updateReservation Failed, reservationId = " + reservationId); throw new YarnRuntimeException("updateReservation Failed, reservationId = " + reservationId); }
@Test public void testUpdateReservation() throws Exception { // submit reservation ReservationId reservationId = ReservationId.newInstance(Time.now(), 3); Response response = submitReservation(reservationId); Assert.assertNotNull(response); Assert.assertEquals(Status.ACCEPTED.getStatusCode(), response.getStatus()); // update reservation ReservationSubmissionRequest resSubRequest = getReservationSubmissionRequest(reservationId, 6, 2048, 2); ReservationDefinition reservationDefinition = resSubRequest.getReservationDefinition(); ReservationDefinitionInfo reservationDefinitionInfo = new ReservationDefinitionInfo(reservationDefinition); ReservationUpdateRequestInfo updateRequestInfo = new ReservationUpdateRequestInfo(); updateRequestInfo.setReservationId(reservationId.toString()); updateRequestInfo.setReservationDefinition(reservationDefinitionInfo); Response updateReservationResp = interceptor.updateReservation(updateRequestInfo, null); Assert.assertNotNull(updateReservationResp); Assert.assertEquals(Status.OK.getStatusCode(), updateReservationResp.getStatus()); String applyReservationId = reservationId.toString(); Response reservationResponse = interceptor.listReservation( QUEUE_DEDICATED_FULL, applyReservationId, -1, -1, false, null); Assert.assertNotNull(reservationResponse); Object entity = reservationResponse.getEntity(); Assert.assertNotNull(entity); Assert.assertNotNull(entity instanceof ReservationListInfo); Assert.assertTrue(entity instanceof ReservationListInfo); ReservationListInfo listInfo = (ReservationListInfo) entity; Assert.assertNotNull(listInfo); List<ReservationInfo> reservationInfos = listInfo.getReservations(); Assert.assertNotNull(reservationInfos); Assert.assertEquals(1, reservationInfos.size()); ReservationInfo reservationInfo = reservationInfos.get(0); Assert.assertNotNull(reservationInfo); Assert.assertEquals(reservationInfo.getReservationId(), applyReservationId); ReservationDefinitionInfo resDefinitionInfo = reservationInfo.getReservationDefinition(); Assert.assertNotNull(resDefinitionInfo); ReservationRequestsInfo reservationRequestsInfo = resDefinitionInfo.getReservationRequests(); Assert.assertNotNull(reservationRequestsInfo); ArrayList<ReservationRequestInfo> reservationRequestInfoList = reservationRequestsInfo.getReservationRequest(); Assert.assertNotNull(reservationRequestInfoList); Assert.assertEquals(1, reservationRequestInfoList.size()); ReservationRequestInfo reservationRequestInfo = reservationRequestInfoList.get(0); Assert.assertNotNull(reservationRequestInfo); Assert.assertEquals(6, reservationRequestInfo.getNumContainers()); ResourceInfo resourceInfo = reservationRequestInfo.getCapability(); Assert.assertNotNull(resourceInfo); int vCore = resourceInfo.getvCores(); long memory = resourceInfo.getMemorySize(); Assert.assertEquals(2, vCore); Assert.assertEquals(2048, memory); }
public String getFormattedMessage() { if (formattedMessage != null) { return formattedMessage; } if (argumentArray != null) { formattedMessage = MessageFormatter.arrayFormat(message, argumentArray) .getMessage(); } else { formattedMessage = message; } return formattedMessage; }
@Test public void testNoFormattingWithoutArgs() { String message = "testNoFormatting"; Throwable throwable = null; Object[] argArray = null; LoggingEvent event = new LoggingEvent("", logger, Level.INFO, message, throwable, argArray); assertNull(event.formattedMessage); assertEquals(message, event.getFormattedMessage()); }
public static URI getURIFromRequestUrl(String source) { // try without encoding the URI try { return new URI(source); } catch (URISyntaxException ignore) { System.out.println("Source is not encoded, encoding now"); } try { URL url = new URL(source); return new URI(url.getProtocol(), url.getUserInfo(), url.getHost(), url.getPort(), url.getPath(), url.getQuery(), url.getRef()); } catch (MalformedURLException | URISyntaxException e) { throw new IllegalArgumentException(e); } }
@Test public void testGetURIFromRequestUrlShouldNotEncode() { final String testUrl = "http://example.com/this%20is%20encoded"; assertEquals(testUrl, UriUtil.getURIFromRequestUrl(testUrl).toString()); }
@SuppressWarnings("unchecked") public static <K, V> Map<K, List<V>> aggByKeyToList(String key, List<?> list) { Map<K, List<V>> map = new HashMap<>(); if (CollectionUtils.isEmpty(list)) {// 防止外面传入空list return map; } try { Class<?> clazz = list.get(0).getClass(); Field field = deepFindField(clazz, key); if (field == null) { throw new IllegalArgumentException("Could not find the key"); } field.setAccessible(true); for (Object o : list) { K k = (K) field.get(o); map.computeIfAbsent(k, k1 -> new ArrayList<>()); map.get(k).add((V) o); } } catch (Exception e) { throw new BeanUtilsException(e); } return map; }
@Test(expected = BeanUtilsException.class) public void testAggByKeyToListNotEmptyThrowsEx() { someAnotherList.add(new KeyClass()); assertNotNull(BeanUtils.aggByKeyToList("wrongKey", someAnotherList)); }
public ProcessContinuation run( PartitionRecord partitionRecord, RestrictionTracker<StreamProgress, StreamProgress> tracker, OutputReceiver<KV<ByteString, ChangeStreamRecord>> receiver, ManualWatermarkEstimator<Instant> watermarkEstimator) throws IOException { BytesThroughputEstimator<KV<ByteString, ChangeStreamRecord>> throughputEstimator = new BytesThroughputEstimator<>(sizeEstimator, Instant.now()); // Lock the partition if (tracker.currentRestriction().isEmpty()) { boolean lockedPartition = metadataTableDao.lockAndRecordPartition(partitionRecord); // Clean up NewPartition on the first run regardless of locking result. If locking fails it // means this partition is being streamed, then cleaning up NewPartitions avoids lingering // NewPartitions. for (NewPartition newPartition : partitionRecord.getParentPartitions()) { metadataTableDao.deleteNewPartition(newPartition); } if (!lockedPartition) { LOG.info( "RCSP {} : Could not acquire lock with uid: {}, because this is a " + "duplicate and another worker is working on this partition already.", formatByteStringRange(partitionRecord.getPartition()), partitionRecord.getUuid()); StreamProgress streamProgress = new StreamProgress(); streamProgress.setFailToLock(true); metrics.decPartitionStreamCount(); tracker.tryClaim(streamProgress); return ProcessContinuation.stop(); } } else if (tracker.currentRestriction().getCloseStream() == null && !metadataTableDao.doHoldLock( partitionRecord.getPartition(), partitionRecord.getUuid())) { // We only verify the lock if we are not holding CloseStream because if this is a retry of // CloseStream we might have already cleaned up the lock in a previous attempt. // Failed correctness check on this worker holds the lock on this partition. This shouldn't // fail because there's a restriction tracker which means this worker has already acquired the // lock and once it has acquired the lock it shouldn't fail the lock check. LOG.warn( "RCSP {} : Subsequent run that doesn't hold the lock {}. This is not unexpected and " + "should probably be reviewed.", formatByteStringRange(partitionRecord.getPartition()), partitionRecord.getUuid()); StreamProgress streamProgress = new StreamProgress(); streamProgress.setFailToLock(true); metrics.decPartitionStreamCount(); tracker.tryClaim(streamProgress); return ProcessContinuation.stop(); } // Process CloseStream if it exists CloseStream closeStream = tracker.currentRestriction().getCloseStream(); if (closeStream != null) { LOG.debug("RCSP: Processing CloseStream"); metrics.decPartitionStreamCount(); if (closeStream.getStatus().getCode() == Status.Code.OK) { // We need to update watermark here. We're terminating this stream because we have reached // endTime. Instant.now is greater or equal to endTime. The goal here is // DNP will need to know this stream has passed the endTime so DNP can eventually terminate. Instant terminatingWatermark = Instant.ofEpochMilli(Long.MAX_VALUE); Instant endTime = partitionRecord.getEndTime(); if (endTime != null) { terminatingWatermark = endTime; } watermarkEstimator.setWatermark(terminatingWatermark); metadataTableDao.updateWatermark( partitionRecord.getPartition(), watermarkEstimator.currentWatermark(), null); LOG.info( "RCSP {}: Reached end time, terminating...", formatByteStringRange(partitionRecord.getPartition())); return ProcessContinuation.stop(); } if (closeStream.getStatus().getCode() != Status.Code.OUT_OF_RANGE) { LOG.error( "RCSP {}: Reached unexpected terminal state: {}", formatByteStringRange(partitionRecord.getPartition()), closeStream.getStatus()); return ProcessContinuation.stop(); } // Release the lock only if the uuid matches. In normal operation this doesn't change // anything. However, it's possible for this RCSP to crash while processing CloseStream but // after the side effects of writing the new partitions to the metadata table. New partitions // can be created while this RCSP restarts from the previous checkpoint and processes the // CloseStream again. In certain race scenarios the child partitions may merge back to this // partition, but as a new RCSP. The new partition (same as this partition) would write the // exact same content to the metadata table but with a different uuid. We don't want to // accidentally delete the StreamPartition because it now belongs to the new RCSP. // If the uuid is the same (meaning this race scenario did not take place) we release the lock // and mark the StreamPartition to be deleted, so we can delete it after we have written the // NewPartitions. metadataTableDao.releaseStreamPartitionLockForDeletion( partitionRecord.getPartition(), partitionRecord.getUuid()); // The partitions in the continuation tokens must cover the same key space as this partition. // If there's only 1 token, then the token's partition is equals to this partition. // If there are more than 1 tokens, then the tokens form a continuous row range equals to this // partition. List<ByteStringRange> childPartitions = new ArrayList<>(); List<ByteStringRange> tokenPartitions = new ArrayList<>(); // Check if NewPartitions field exists, if not we default to using just the // ChangeStreamContinuationTokens. boolean useNewPartitionsField = closeStream.getNewPartitions().size() == closeStream.getChangeStreamContinuationTokens().size(); for (int i = 0; i < closeStream.getChangeStreamContinuationTokens().size(); i++) { ByteStringRange childPartition; if (useNewPartitionsField) { childPartition = closeStream.getNewPartitions().get(i); } else { childPartition = closeStream.getChangeStreamContinuationTokens().get(i).getPartition(); } childPartitions.add(childPartition); ChangeStreamContinuationToken token = getTokenWithCorrectPartition( partitionRecord.getPartition(), closeStream.getChangeStreamContinuationTokens().get(i)); tokenPartitions.add(token.getPartition()); metadataTableDao.writeNewPartition( new NewPartition( childPartition, Collections.singletonList(token), watermarkEstimator.getState())); } LOG.info( "RCSP {}: Split/Merge into {}", formatByteStringRange(partitionRecord.getPartition()), partitionsToString(childPartitions)); if (!coverSameKeySpace(tokenPartitions, partitionRecord.getPartition())) { LOG.warn( "RCSP {}: CloseStream has tokens {} that don't cover the entire keyspace", formatByteStringRange(partitionRecord.getPartition()), partitionsToString(tokenPartitions)); } // Perform the real cleanup. This step is no op if the race mentioned above occurs (splits and // merges results back to this partition again) because when we register the "new" partition, // we unset the deletion bit. metadataTableDao.deleteStreamPartitionRow(partitionRecord.getPartition()); return ProcessContinuation.stop(); } // Update the metadata table with the watermark metadataTableDao.updateWatermark( partitionRecord.getPartition(), watermarkEstimator.getState(), tracker.currentRestriction().getCurrentToken()); // Start to stream the partition. ServerStream<ChangeStreamRecord> stream = null; try { stream = changeStreamDao.readChangeStreamPartition( partitionRecord, tracker.currentRestriction(), partitionRecord.getEndTime(), heartbeatDuration); for (ChangeStreamRecord record : stream) { Optional<ProcessContinuation> result = changeStreamAction.run( partitionRecord, record, tracker, receiver, watermarkEstimator, throughputEstimator); // changeStreamAction will usually return Optional.empty() except for when a checkpoint // (either runner or pipeline initiated) is required. if (result.isPresent()) { return result.get(); } } } catch (Exception e) { throw e; } finally { if (stream != null) { stream.cancel(); } } return ProcessContinuation.resume(); }
@Test public void testCloseStreamTerminateOKStatus() throws IOException { // Force lock fail because CloseStream should not depend on locking when(metadataTableDao.doHoldLock(partition, uuid)).thenReturn(false); CloseStream mockCloseStream = Mockito.mock(CloseStream.class); Status statusProto = Status.newBuilder().setCode(0).build(); Mockito.when(mockCloseStream.getStatus()) .thenReturn(com.google.cloud.bigtable.common.Status.fromProto(statusProto)); when(restriction.getCloseStream()).thenReturn(mockCloseStream); when(restriction.isEmpty()).thenReturn(false); final DoFn.ProcessContinuation result = action.run(partitionRecord, tracker, receiver, watermarkEstimator); assertEquals(DoFn.ProcessContinuation.stop(), result); // Should terminate before reaching processing stream partition responses. verify(changeStreamAction, never()).run(any(), any(), any(), any(), any(), any()); // Should not try claim any restriction when processing CloseStream verify(tracker, (never())).tryClaim(any()); // Should decrement the metric on termination. verify(metrics).decPartitionStreamCount(); // Should not try to write any new partition to the metadata table. verify(metadataTableDao, never()).writeNewPartition(any()); verify(metadataTableDao, never()).releaseStreamPartitionLockForDeletion(any(), any()); verify(metadataTableDao, never()).deleteStreamPartitionRow(any()); }
@Override public final void checkConnection() { LOGGER.info("[TFS] Checking Connection: Server {}, Domain {}, User {}, Project Path {}", url, domain, userName, projectPath); try { List<Modification> modifications = latestInHistory(); if (modifications.isEmpty()) { throw new IllegalStateException("There might be no commits on the project path, project path might be invalid or user may have insufficient permissions."); } } catch (Exception e) { String message = String.format("Failed while checking connection using Url: %s, Project Path: %s, Username: %s, Domain: %s, Root Cause: %s", url, projectPath, userName, domain, e.getMessage()); throw new RuntimeException(message, e); } }
@Test public void testCheckConnection() throws Exception { tfsCommand.checkConnection(); }
public PhysicalPartition getPhysicalPartition(long physicalPartitionId) { for (Partition partition : idToPartition.values().stream() .map(RecyclePartitionInfo::getPartition) .collect(Collectors.toList())) { for (PhysicalPartition subPartition : partition.getSubPartitions()) { if (subPartition.getId() == physicalPartitionId) { return subPartition; } } } return null; }
@Test public void testGetPhysicalPartition() throws Exception { CatalogRecycleBin bin = new CatalogRecycleBin(); List<Column> columns = Lists.newArrayList(new Column("k1", ScalarType.createVarcharType(10))); Range<PartitionKey> range = Range.range(PartitionKey.createPartitionKey(Lists.newArrayList(new PartitionValue("1")), columns), BoundType.CLOSED, PartitionKey.createPartitionKey(Lists.newArrayList(new PartitionValue("3")), columns), BoundType.CLOSED); DataProperty dataProperty = new DataProperty(TStorageMedium.HDD); Partition partition = new Partition(1L, "pt", new MaterializedIndex(), null); bin.recyclePartition(new RecycleRangePartitionInfo(11L, 22L, partition, range, dataProperty, (short) 1, false, null)); Partition partition2 = new Partition(2L, "pt", new MaterializedIndex(), null); bin.recyclePartition(new RecycleRangePartitionInfo(11L, 22L, partition2, range, dataProperty, (short) 1, false, null)); PhysicalPartition recycledPart = bin.getPhysicalPartition(1L); Assert.assertNotNull(recycledPart); recycledPart = bin.getPartition(2L); Assert.assertEquals(2L, recycledPart.getId()); }
@Override public boolean next() { if (closed || !values.hasNext()) { currentValue = null; return false; } currentValue = values.next(); return true; }
@Test void assertNext() { assertTrue(actualResultSet.next()); assertTrue(actualResultSet.next()); assertFalse(actualResultSet.next()); }
public static byte[] plus(byte[] in, int add) { if (in.length == 0) return in; final byte[] out = in.clone(); add(out, add); return out; }
@Test public void plusShouldCarryMoreThanOne() { assertArrayEquals(new byte[] { 0, 1, 2}, ByteArrayUtils.plus(new byte[] { 0, 0, -2}, 4)); }
public static MessageDigest digest(String algorithm) { final Matcher matcher = WITH_PATTERN.matcher(algorithm); final String digestAlgorithm = matcher.matches() ? matcher.group(1) : algorithm; try { return MessageDigest.getInstance(digestAlgorithm); } catch (NoSuchAlgorithmException e) { throw new CryptoException("Invalid algorithm", e); } }
@Test public void digestUsingObjectIdentifier() { assertEquals("SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS", Hex.toHexString(DigestUtils.digest("2.16.840.1.101.3.4.2.1").digest(new byte[0])) ); }
private static void nearCache(XmlGenerator gen, String name, NearCacheConfig nearCache) { EvictionConfig eviction = nearCache.getEvictionConfig(); NearCachePreloaderConfig preloader = nearCache.getPreloaderConfig(); gen.open("near-cache", "name", name) .node("in-memory-format", nearCache.getInMemoryFormat()) .node("serialize-keys", nearCache.isSerializeKeys()) .node("invalidate-on-change", nearCache.isInvalidateOnChange()) .node("time-to-live-seconds", nearCache.getTimeToLiveSeconds()) .node("max-idle-seconds", nearCache.getMaxIdleSeconds()) .node("local-update-policy", nearCache.getLocalUpdatePolicy()) .node("eviction", null, "size", eviction.getSize(), "max-size-policy", eviction.getMaxSizePolicy(), "eviction-policy", eviction.getEvictionPolicy(), "comparator-class-name", classNameOrImplClass( eviction.getComparatorClassName(), eviction.getComparator())) .node("preloader", null, "enabled", preloader.isEnabled(), "directory", preloader.getDirectory(), "store-initial-delay-seconds", preloader.getStoreInitialDelaySeconds(), "store-interval-seconds", preloader.getStoreIntervalSeconds()); //close near-cache gen.close(); }
@Test public void nearCache() { NearCacheConfig expected = createNearCacheConfig(randomString()) .setPreloaderConfig( new NearCachePreloaderConfig() .setEnabled(true) .setDirectory(randomString()) .setStoreInitialDelaySeconds(randomInt()) .setStoreIntervalSeconds(randomInt()) ) .setEvictionConfig( new EvictionConfig() .setEvictionPolicy(LFU) .setMaxSizePolicy(USED_NATIVE_MEMORY_SIZE) .setComparatorClassName(randomString()) .setSize(randomInt()) ); clientConfig.addNearCacheConfig(expected); Map<String, NearCacheConfig> actual = newConfigViaGenerator().getNearCacheConfigMap(); assertMap(clientConfig.getNearCacheConfigMap(), actual); }
public static int count(DataIterator it) { @SuppressWarnings("unused") // used in while loop int count = 0; for(DataElement element = it.next(); element !=null; element = it.next()) { count++; } return count; }
@Test public void testCountByPredicateAtPath() throws Exception { SimpleTestData data = IteratorTestData.createSimpleTestData(); int count = Builder.create(data.getDataElement(), IterationOrder.PRE_ORDER) .filterBy(Predicates.pathMatchesPathSpec(IteratorTestData.PATH_TO_ID)) .count(); assertEquals(3, count); }
@Override public int size() { return contents.size(); }
@Test public void testGetSetByType1() throws HCatException { HCatRecord inpRec = getHCatRecords()[0]; HCatRecord newRec = new DefaultHCatRecord(inpRec.size()); HCatSchema hsch = HCatSchemaUtils.getHCatSchema( "a:tinyint,b:smallint,c:int,d:bigint,e:float,f:double,g:boolean,h:string,i:binary,j:string"); newRec.setByte("a", hsch, inpRec.getByte("a", hsch)); newRec.setShort("b", hsch, inpRec.getShort("b", hsch)); newRec.setInteger("c", hsch, inpRec.getInteger("c", hsch)); newRec.setLong("d", hsch, inpRec.getLong("d", hsch)); newRec.setFloat("e", hsch, inpRec.getFloat("e", hsch)); newRec.setDouble("f", hsch, inpRec.getDouble("f", hsch)); newRec.setBoolean("g", hsch, inpRec.getBoolean("g", hsch)); newRec.setString("h", hsch, inpRec.getString("h", hsch)); newRec.setByteArray("i", hsch, inpRec.getByteArray("i", hsch)); newRec.setString("j", hsch, inpRec.getString("j", hsch)); Assert.assertTrue(HCatDataCheckUtil.recordsEqual(newRec, inpRec)); }
@Override public Path mkdir(final Path folder, final TransferStatus status) throws BackgroundException { try { if(containerService.isContainer(folder)) { final B2BucketResponse response = session.getClient().createBucket(containerService.getContainer(folder).getName(), null == status.getRegion() ? BucketType.valueOf(new B2BucketTypeFeature(session, fileid).getDefault().getIdentifier()) : BucketType.valueOf(status.getRegion())); final EnumSet<Path.Type> type = EnumSet.copyOf(folder.getType()); type.add(Path.Type.volume); return folder.withType(type).withAttributes(new B2AttributesFinderFeature(session, fileid).toAttributes(response)); } else { final EnumSet<Path.Type> type = EnumSet.copyOf(folder.getType()); type.add(Path.Type.placeholder); return new B2TouchFeature(session, fileid).touch(folder.withType(type), status .withMime(MimeTypeService.DEFAULT_CONTENT_TYPE) .withChecksum(writer.checksum(folder, status).compute(new NullInputStream(0L), status))); } } catch(B2ApiException e) { throw new B2ExceptionMappingService(fileid).map("Cannot create folder {0}", e, folder); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map("Cannot create folder {0}", e, folder); } }
@Test public void testCreatePlaceholder() throws Exception { final Path bucket = new Path("/test-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume)); final B2VersionIdProvider fileid = new B2VersionIdProvider(session); final String filename = new AlphanumericRandomStringService().random(); final Path directory = new B2DirectoryFeature(session, fileid, new B2WriteFeature(session, fileid)).mkdir(new Path(bucket, filename, EnumSet.of(Path.Type.directory)), new TransferStatus()); assertTrue(directory.getType().contains(Path.Type.placeholder)); assertTrue(new B2FindFeature(session, fileid).find(directory)); assertTrue(new DefaultFindFeature(session).find(directory)); assertNotEquals(PathAttributes.EMPTY, new B2AttributesFinderFeature(session, fileid).find(directory)); assertEquals(directory.attributes().getVersionId(), new B2VersionIdProvider(session).getVersionId(new Path(bucket, filename, EnumSet.of(Path.Type.directory)))); // Mark as hidden new B2DeleteFeature(session, fileid).delete(Collections.singletonList(new Path(directory).withAttributes(PathAttributes.EMPTY)), new DisabledLoginCallback(), new Delete.DisabledCallback()); // .bzEmpty is deleted not hidden assertFalse(new B2FindFeature(session, fileid).find(directory)); assertFalse(new DefaultFindFeature(session).find(directory)); new B2DeleteFeature(session, fileid).delete(Collections.singletonList(directory), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public ServerConfiguration getServerConfiguration(String issuer) { ServerConfiguration server = staticServerService.getServerConfiguration(issuer); if (server != null) { return server; } else { return dynamicServerService.getServerConfiguration(issuer); } }
@Test public void getServerConfiguration_useDynamic() { Mockito.when(mockStaticService.getServerConfiguration(issuer)).thenReturn(null); Mockito.when(mockDynamicService.getServerConfiguration(issuer)).thenReturn(mockServerConfig); ServerConfiguration result = hybridService.getServerConfiguration(issuer); Mockito.verify(mockStaticService).getServerConfiguration(issuer); Mockito.verify(mockDynamicService).getServerConfiguration(issuer); assertEquals(mockServerConfig, result); }
public static ImmutableList<HttpRequest> fuzzGetParametersExpectingPathValues( HttpRequest request, String payload) { return fuzzGetParameters( request, payload, Optional.empty(), ImmutableSet.of(FuzzingModifier.FUZZING_PATHS)); }
@Test public void fuzzGetParametersExpectingPathValues_whenGetParameterValueHasPathPrefixAndFileExtension_prefixesPayloadAndAppendsFileExtension() { HttpRequest requestWithPathPrefixAndFileExtension = HttpRequest.get("https://google.com?key=resources/value.jpg").withEmptyHeaders().build(); HttpRequest requestWithFuzzedGetParameterWithPathPrefixAndFileExtension = HttpRequest.get("https://google.com?key=resources/<payload>%00.jpg") .withEmptyHeaders() .build(); assertThat( FuzzingUtils.fuzzGetParametersExpectingPathValues( requestWithPathPrefixAndFileExtension, "<payload>")) .contains(requestWithFuzzedGetParameterWithPathPrefixAndFileExtension); }
static List<ClassLoader> selectClassLoaders(ClassLoader classLoader) { // list prevents reordering! List<ClassLoader> classLoaders = new ArrayList<>(); if (classLoader != null) { classLoaders.add(classLoader); } // check if TCCL is same as given classLoader ClassLoader tccl = Thread.currentThread().getContextClassLoader(); if (tccl != null && tccl != classLoader) { classLoaders.add(tccl); } // Hazelcast core classLoader ClassLoader coreClassLoader = ServiceLoader.class.getClassLoader(); if (coreClassLoader != classLoader && coreClassLoader != tccl) { classLoaders.add(coreClassLoader); } // Hazelcast client classLoader try { Class<?> hzClientClass = Class.forName("com.hazelcast.client.HazelcastClient"); ClassLoader clientClassLoader = hzClientClass.getClassLoader(); if (clientClassLoader != classLoader && clientClassLoader != tccl && clientClassLoader != coreClassLoader) { classLoaders.add(clientClassLoader); } } catch (ClassNotFoundException ignore) { // ignore since we may not have the HazelcastClient in the classpath ignore(ignore); } return classLoaders; }
@Test public void selectingSimpleSingleClassLoader() { List<ClassLoader> classLoaders = ServiceLoader.selectClassLoaders(null); assertEquals(1, classLoaders.size()); }
@Override public CompletableFuture<JobResourceRequirements> requestJobResourceRequirements() { return CompletableFuture.completedFuture(schedulerNG.requestJobResourceRequirements()); }
@Test public void testResourceRequirementsAreRequestedFromTheScheduler() throws Exception { final JobResourceRequirements jobResourceRequirements = JobResourceRequirements.empty(); final TestingSchedulerNG scheduler = TestingSchedulerNG.newBuilder() .setRequestJobResourceRequirementsSupplier(() -> jobResourceRequirements) .build(); try (final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withSlotPoolServiceSchedulerFactory( DefaultSlotPoolServiceSchedulerFactory.create( TestingSlotPoolServiceBuilder.newBuilder(), new TestingSchedulerNGFactory(scheduler))) .createJobMaster()) { jobMaster.start(); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); assertThatFuture(jobMasterGateway.requestJobResourceRequirements()) .eventuallySucceeds() .isEqualTo(jobResourceRequirements); } }
@Override public PipelineDef parse(Path pipelineDefPath, Configuration globalPipelineConfig) throws Exception { return parse(mapper.readTree(pipelineDefPath.toFile()), globalPipelineConfig); }
@Test void testParsingNecessaryOnlyDefinition() throws Exception { URL resource = Resources.getResource("definitions/pipeline-definition-with-optional.yaml"); YamlPipelineDefinitionParser parser = new YamlPipelineDefinitionParser(); PipelineDef pipelineDef = parser.parse(Paths.get(resource.toURI()), new Configuration()); assertThat(pipelineDef).isEqualTo(defWithOptional); }
@ApiOperation(value = "Delete a group", tags = { "Groups" }, code = 204) @ApiResponses(value = { @ApiResponse(code = 204, message = "Indicates the group was found and has been deleted. Response-body is intentionally empty."), @ApiResponse(code = 404, message = "Indicates the requested group does not exist.") }) @DeleteMapping("/identity/groups/{groupId}") @ResponseStatus(HttpStatus.NO_CONTENT) public void deleteGroup(@ApiParam(name = "groupId") @PathVariable String groupId) { Group group = getGroupFromRequest(groupId); if (restApiInterceptor != null) { restApiInterceptor.deleteGroup(group); } identityService.deleteGroup(group.getId()); }
@Test public void testUpdateGroupNullFields() throws Exception { try { Group testGroup = identityService.newGroup("testgroup"); testGroup.setName("Test group"); testGroup.setType("Test type"); identityService.saveGroup(testGroup); ObjectNode requestNode = objectMapper.createObjectNode(); requestNode.set("name", null); requestNode.set("type", null); HttpPut httpPut = new HttpPut(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_GROUP, "testgroup")); httpPut.setEntity(new StringEntity(requestNode.toString())); CloseableHttpResponse response = executeRequest(httpPut, HttpStatus.SC_OK); JsonNode responseNode = objectMapper.readTree(response.getEntity().getContent()); closeResponse(response); assertThat(responseNode).isNotNull(); assertThatJson(responseNode) .when(Option.IGNORING_EXTRA_FIELDS) .isEqualTo("{" + " id: 'testgroup'," + " name: null," + " type: null," + " url: '" + SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_GROUP, testGroup.getId()) + "'" + "}"); Group createdGroup = identityService.createGroupQuery().groupId("testgroup").singleResult(); assertThat(createdGroup).isNotNull(); assertThat(createdGroup.getName()).isNull(); assertThat(createdGroup.getType()).isNull(); } finally { try { identityService.deleteGroup("testgroup"); } catch (Throwable ignore) { // Ignore, since the group may not have been created in the test // or already deleted } } }
synchronized Map<String, SchemaTransformProvider> getAllProviders() { if (this.providersCached) { return schemaTransformProvidersCache; } try { for (SchemaTransformProvider schemaTransformProvider : ServiceLoader.load(SchemaTransformProvider.class)) { if (schemaTransformProvidersCache.containsKey(schemaTransformProvider.identifier())) { throw new IllegalArgumentException( "Found multiple SchemaTransformProvider implementations with the same identifier " + schemaTransformProvider.identifier()); } if (supportedIdentifiers == null || supportedIdentifiers.contains(schemaTransformProvider.identifier())) { if (schemaTransformProvider.identifier().equals("beam:transform:managed:v1")) { // Prevent recursively adding the 'ManagedSchemaTransformProvider'. continue; } schemaTransformProvidersCache.put( schemaTransformProvider.identifier(), schemaTransformProvider); } } this.providersCached = true; return schemaTransformProvidersCache; } catch (Exception e) { throw new RuntimeException(e.getMessage()); } }
@Test public void testDiscoverTestProvider() { ManagedSchemaTransformProvider provider = new ManagedSchemaTransformProvider(Arrays.asList(TestSchemaTransformProvider.IDENTIFIER)); assertTrue(provider.getAllProviders().containsKey(TestSchemaTransformProvider.IDENTIFIER)); }
public EventJournalConfig setCapacity(int capacity) { checkPositive(capacity, "capacity can't be smaller than 1"); this.capacity = capacity; return this; }
@Test(expected = UnsupportedOperationException.class) public void testReadOnlyClass_setCapacity_throwsException() { getReadOnlyConfig().setCapacity(27); }
@Override public Integer call() throws Exception { super.call(); to = to.startsWith("/") ? to : "/" + to; to = to.endsWith("/") ? to : to + "/"; try (var files = Files.walk(from); DefaultHttpClient client = client()) { if (delete) { client.toBlocking().exchange(this.requestOptions(HttpRequest.DELETE(apiUri("/namespaces/") + namespace + "/files?path=" + to, null))); } KestraIgnore kestraIgnore = new KestraIgnore(from); List<Path> paths = files .filter(Files::isRegularFile) .filter(path -> !kestraIgnore.isIgnoredFile(path.toString(), true)) .toList(); paths.forEach(path -> { MultipartBody body = MultipartBody.builder() .addPart("fileContent", path.toFile()) .build(); String relativizedPath = from.relativize(path).toString(); String destination = to + relativizedPath; client.toBlocking().exchange( this.requestOptions( HttpRequest.POST( apiUri("/namespaces/") + namespace + "/files?path=" + destination, body ).contentType(MediaType.MULTIPART_FORM_DATA) ) ); stdOut("Successfully uploaded {0} to {1}", path.toString(), destination); }); } catch (HttpClientResponseException e) { AbstractValidateCommand.handleHttpException(e, "namespace"); return 1; } return 0; }
@Test void runWithoutIgnore() throws URISyntaxException { URL directory = NamespaceFilesUpdateCommandTest.class.getClassLoader().getResource("namespacefiles/noignore/"); ByteArrayOutputStream out = new ByteArrayOutputStream(); System.setOut(new PrintStream(out)); try (ApplicationContext ctx = ApplicationContext.run(Environment.CLI, Environment.TEST)) { EmbeddedServer embeddedServer = ctx.getBean(EmbeddedServer.class); embeddedServer.start(); String[] args = { "--server", embeddedServer.getURL().toString(), "--user", "myuser:pass:word", "--delete", "io.kestra.cli", directory.getPath() }; PicocliRunner.call(NamespaceFilesUpdateCommand.class, ctx, args); assertTransferMessage(out, "2", null); assertTransferMessage(out, "1", null); assertTransferMessage(out, "flows/flow.yml", null); out.reset(); } }
public boolean initWithCommittedOffsetsIfNeeded(Timer timer) { final Set<TopicPartition> initializingPartitions = subscriptions.initializingPartitions(); final Map<TopicPartition, OffsetAndMetadata> offsets = fetchCommittedOffsets(initializingPartitions, timer); // "offsets" will be null if the offset fetch requests did not receive responses within the given timeout if (offsets == null) return false; refreshCommittedOffsets(offsets, this.metadata, this.subscriptions); return true; }
@Test public void testRefreshOffsetsGroupNotAuthorized() { client.prepareResponse(groupCoordinatorResponse(node, Errors.NONE)); coordinator.ensureCoordinatorReady(time.timer(Long.MAX_VALUE)); subscriptions.assignFromUser(singleton(t1p)); client.prepareResponse(offsetFetchResponse(Errors.GROUP_AUTHORIZATION_FAILED, Collections.emptyMap())); try { coordinator.initWithCommittedOffsetsIfNeeded(time.timer(Long.MAX_VALUE)); fail("Expected group authorization error"); } catch (GroupAuthorizationException e) { assertEquals(groupId, e.groupId()); } }
public Lease acquire() throws Exception { String path = internals.attemptLock(-1, null, null); return makeLease(path); }
@Test public void testGetParticipantNodes() throws Exception { final int LEASES = 3; Timing timing = new Timing(); CuratorFramework client = CuratorFrameworkFactory.newClient( server.getConnectString(), timing.session(), timing.connection(), new RetryOneTime(1)); List<Lease> leases = Lists.newArrayList(); client.start(); try { InterProcessSemaphoreV2 semaphore = new InterProcessSemaphoreV2(client, "/test", LEASES); for (int i = 0; i < LEASES; ++i) { leases.add(semaphore.acquire()); } assertEquals(semaphore.getParticipantNodes().size(), LEASES); } finally { for (Lease l : leases) { CloseableUtils.closeQuietly(l); } TestCleanState.closeAndTestClean(client); } }
public void init() { add(ICONST, "iconst"); add(GET_VARP, "get_varp"); add(SET_VARP, "set_varp"); add(SCONST, "sconst"); add(JUMP, "jump"); add(IF_ICMPNE, "if_icmpne"); add(IF_ICMPEQ, "if_icmpeq"); add(IF_ICMPLT, "if_icmplt"); add(IF_ICMPGT, "if_icmpgt"); add(RETURN, "return"); add(GET_VARBIT, "get_varbit"); add(SET_VARBIT, "set_varbit"); add(IF_ICMPLE, "if_icmple"); add(IF_ICMPGE, "if_icmpge"); add(ILOAD, "iload"); add(ISTORE, "istore"); add(SLOAD, "sload"); add(SSTORE, "sstore"); add(JOIN_STRING, "join_string"); add(POP_INT, "pop_int"); add(POP_STRING, "pop_string"); add(INVOKE, "invoke"); add(GET_VARC_INT, "get_varc_int"); add(SET_VARC_INT, "set_varc_int"); add(DEFINE_ARRAY, "define_array"); add(GET_ARRAY_INT, "get_array_int"); add(SET_ARRAY_INT, "set_array_int"); add(GET_VARC_STRING_OLD, "get_varc_string_old"); add(SET_VARC_STRING_OLD, "set_varc_string_old"); add(GET_VARC_STRING, "get_varc_string"); add(SET_VARC_STRING, "set_varc_string"); add(SWITCH, "switch"); add(GET_VARCLANSETTING, "get_varclansetting"); add(GET_VARCLAN, "get_varclan"); add(CC_CREATE, "cc_create"); add(CC_DELETE, "cc_delete"); add(CC_DELETEALL, "cc_deleteall"); add(CC_FIND, "cc_find"); add(IF_FIND, "if_find"); add(CC_SETPOSITION, "cc_setposition"); add(CC_SETSIZE, "cc_setsize"); add(CC_SETHIDE, "cc_sethide"); add(CC_SETNOCLICKTHROUGH, "cc_setnoclickthrough"); add(CC_SETNOSCROLLTHROUGH, "cc_setnoscrollthrough"); add(CC_SETSCROLLPOS, "cc_setscrollpos"); add(CC_SETCOLOUR, "cc_setcolour"); add(CC_SETFILL, "cc_setfill"); add(CC_SETTRANS, "cc_settrans"); add(CC_SETLINEWID, "cc_setlinewid"); add(CC_SETGRAPHIC, "cc_setgraphic"); add(CC_SET2DANGLE, "cc_set2dangle"); add(CC_SETTILING, "cc_settiling"); add(CC_SETMODEL, "cc_setmodel"); add(CC_SETMODELANGLE, "cc_setmodelangle"); add(CC_SETMODELANIM, "cc_setmodelanim"); add(CC_SETMODELORTHOG, "cc_setmodelorthog"); add(CC_SETTEXT, "cc_settext"); add(CC_SETTEXTFONT, "cc_settextfont"); add(CC_SETTEXTALIGN, "cc_settextalign"); add(CC_SETTEXTSHADOW, "cc_settextshadow"); add(CC_SETOUTLINE, "cc_setoutline"); add(CC_SETGRAPHICSHADOW, "cc_setgraphicshadow"); add(CC_SETVFLIP, "cc_setvflip"); add(CC_SETHFLIP, "cc_sethflip"); add(CC_SETSCROLLSIZE, "cc_setscrollsize"); add(CC_RESUME_PAUSEBUTTON, "cc_resume_pausebutton"); add(CC_SETFILLCOLOUR, "cc_setfillcolour"); add(CC_SETLINEDIRECTION, "cc_setlinedirection"); add(CC_SETMODELTRANSPARENT, "cc_setmodeltransparent"); add(CC_SETOBJECT, "cc_setobject"); add(CC_SETNPCHEAD, "cc_setnpchead"); add(CC_SETPLAYERHEAD_SELF, "cc_setplayerhead_self"); add(CC_SETOBJECT_NONUM, "cc_setobject_nonum"); add(CC_SETOBJECT_ALWAYS_NUM, "cc_setobject_always_num"); add(CC_SETOP, "cc_setop"); add(CC_SETDRAGGABLE, "cc_setdraggable"); add(CC_SETDRAGGABLEBEHAVIOR, "cc_setdraggablebehavior"); add(CC_SETDRAGDEADZONE, "cc_setdragdeadzone"); add(CC_SETDRAGDEADTIME, "cc_setdragdeadtime"); add(CC_SETOPBASE, "cc_setopbase"); add(CC_SETTARGETVERB, "cc_settargetverb"); add(CC_CLEAROPS, "cc_clearops"); add(CC_SETOPKEY, "cc_setopkey"); add(CC_SETOPTKEY, "cc_setoptkey"); add(CC_SETOPKEYRATE, "cc_setopkeyrate"); add(CC_SETOPTKEYRATE, "cc_setoptkeyrate"); add(CC_SETOPKEYIGNOREHELD, "cc_setopkeyignoreheld"); add(CC_SETOPTKEYIGNOREHELD, "cc_setoptkeyignoreheld"); add(CC_SETONCLICK, "cc_setonclick"); add(CC_SETONHOLD, "cc_setonhold"); add(CC_SETONRELEASE, "cc_setonrelease"); add(CC_SETONMOUSEOVER, "cc_setonmouseover"); add(CC_SETONMOUSELEAVE, "cc_setonmouseleave"); add(CC_SETONDRAG, "cc_setondrag"); add(CC_SETONTARGETLEAVE, "cc_setontargetleave"); add(CC_SETONVARTRANSMIT, "cc_setonvartransmit"); add(CC_SETONTIMER, "cc_setontimer"); add(CC_SETONOP, "cc_setonop"); add(CC_SETONDRAGCOMPLETE, "cc_setondragcomplete"); add(CC_SETONCLICKREPEAT, "cc_setonclickrepeat"); add(CC_SETONMOUSEREPEAT, "cc_setonmouserepeat"); add(CC_SETONINVTRANSMIT, "cc_setoninvtransmit"); add(CC_SETONSTATTRANSMIT, "cc_setonstattransmit"); add(CC_SETONTARGETENTER, "cc_setontargetenter"); add(CC_SETONSCROLLWHEEL, "cc_setonscrollwheel"); add(CC_SETONCHATTRANSMIT, "cc_setonchattransmit"); add(CC_SETONKEY, "cc_setonkey"); add(CC_SETONFRIENDTRANSMIT, "cc_setonfriendtransmit"); add(CC_SETONCLANTRANSMIT, "cc_setonclantransmit"); add(CC_SETONMISCTRANSMIT, "cc_setonmisctransmit"); add(CC_SETONDIALOGABORT, "cc_setondialogabort"); add(CC_SETONSUBCHANGE, "cc_setonsubchange"); add(CC_SETONSTOCKTRANSMIT, "cc_setonstocktransmit"); add(CC_SETONRESIZE, "cc_setonresize"); add(CC_SETONCLANSETTINGSTRANSMIT, "cc_setonclansettingstransmit"); add(CC_SETONCLANCHANNELTRANSMIT, "cc_setonclanchanneltransmit"); add(CC_GETX, "cc_getx"); add(CC_GETY, "cc_gety"); add(CC_GETWIDTH, "cc_getwidth"); add(CC_GETHEIGHT, "cc_getheight"); add(CC_GETHIDE, "cc_gethide"); add(CC_GETLAYER, "cc_getlayer"); add(CC_GETSCROLLX, "cc_getscrollx"); add(CC_GETSCROLLY, "cc_getscrolly"); add(CC_GETTEXT, "cc_gettext"); add(CC_GETSCROLLWIDTH, "cc_getscrollwidth"); add(CC_GETSCROLLHEIGHT, "cc_getscrollheight"); add(CC_GETMODELZOOM, "cc_getmodelzoom"); add(CC_GETMODELANGLE_X, "cc_getmodelangle_x"); add(CC_GETMODELANGLE_Z, "cc_getmodelangle_z"); add(CC_GETMODELANGLE_Y, "cc_getmodelangle_y"); add(CC_GETTRANS, "cc_gettrans"); add(CC_GETCOLOUR, "cc_getcolour"); add(CC_GETFILLCOLOUR, "cc_getfillcolour"); add(CC_GETMODELTRANSPARENT, "cc_getmodeltransparent"); add(CC_GETINVOBJECT, "cc_getinvobject"); add(CC_GETINVCOUNT, "cc_getinvcount"); add(CC_GETID, "cc_getid"); add(CC_GETTARGETMASK, "cc_gettargetmask"); add(CC_GETOP, "cc_getop"); add(CC_GETOPBASE, "cc_getopbase"); add(CC_CALLONRESIZE, "cc_callonresize"); add(CC_TRIGGEROP, "cc_triggerop"); add(IF_SETPOSITION, "if_setposition"); add(IF_SETSIZE, "if_setsize"); add(IF_SETHIDE, "if_sethide"); add(IF_SETNOCLICKTHROUGH, "if_setnoclickthrough"); add(IF_SETNOSCROLLTHROUGH, "if_setnoscrollthrough"); add(IF_SETSCROLLPOS, "if_setscrollpos"); add(IF_SETCOLOUR, "if_setcolour"); add(IF_SETFILL, "if_setfill"); add(IF_SETTRANS, "if_settrans"); add(IF_SETLINEWID, "if_setlinewid"); add(IF_SETGRAPHIC, "if_setgraphic"); add(IF_SET2DANGLE, "if_set2dangle"); add(IF_SETTILING, "if_settiling"); add(IF_SETMODEL, "if_setmodel"); add(IF_SETMODELANGLE, "if_setmodelangle"); add(IF_SETMODELANIM, "if_setmodelanim"); add(IF_SETMODELORTHOG, "if_setmodelorthog"); add(IF_SETTEXT, "if_settext"); add(IF_SETTEXTFONT, "if_settextfont"); add(IF_SETTEXTALIGN, "if_settextalign"); add(IF_SETTEXTSHADOW, "if_settextshadow"); add(IF_SETOUTLINE, "if_setoutline"); add(IF_SETGRAPHICSHADOW, "if_setgraphicshadow"); add(IF_SETVFLIP, "if_setvflip"); add(IF_SETHFLIP, "if_sethflip"); add(IF_SETSCROLLSIZE, "if_setscrollsize"); add(IF_RESUME_PAUSEBUTTON, "if_resume_pausebutton"); add(IF_SETFILLCOLOUR, "if_setfillcolour"); add(IF_SETLINEDIRECTION, "if_setlinedirection"); add(IF_SETMODELTRANSPARENT, "if_setmodeltransparent"); add(IF_SETOBJECT, "if_setobject"); add(IF_SETNPCHEAD, "if_setnpchead"); add(IF_SETPLAYERHEAD_SELF, "if_setplayerhead_self"); add(IF_SETOBJECT_NONUM, "if_setobject_nonum"); add(IF_SETOBJECT_ALWAYS_NUM, "if_setobject_always_num"); add(IF_SETOP, "if_setop"); add(IF_SETDRAGGABLE, "if_setdraggable"); add(IF_SETDRAGGABLEBEHAVIOR, "if_setdraggablebehavior"); add(IF_SETDRAGDEADZONE, "if_setdragdeadzone"); add(IF_SETDRAGDEADTIME, "if_setdragdeadtime"); add(IF_SETOPBASE, "if_setopbase"); add(IF_SETTARGETVERB, "if_settargetverb"); add(IF_CLEAROPS, "if_clearops"); add(IF_SETOPKEY, "if_setopkey"); add(IF_SETOPTKEY, "if_setoptkey"); add(IF_SETOPKEYRATE, "if_setopkeyrate"); add(IF_SETOPTKEYRATE, "if_setoptkeyrate"); add(IF_SETOPKEYIGNOREHELD, "if_setopkeyignoreheld"); add(IF_SETOPTKEYIGNOREHELD, "if_setoptkeyignoreheld"); add(IF_SETONCLICK, "if_setonclick"); add(IF_SETONHOLD, "if_setonhold"); add(IF_SETONRELEASE, "if_setonrelease"); add(IF_SETONMOUSEOVER, "if_setonmouseover"); add(IF_SETONMOUSELEAVE, "if_setonmouseleave"); add(IF_SETONDRAG, "if_setondrag"); add(IF_SETONTARGETLEAVE, "if_setontargetleave"); add(IF_SETONVARTRANSMIT, "if_setonvartransmit"); add(IF_SETONTIMER, "if_setontimer"); add(IF_SETONOP, "if_setonop"); add(IF_SETONDRAGCOMPLETE, "if_setondragcomplete"); add(IF_SETONCLICKREPEAT, "if_setonclickrepeat"); add(IF_SETONMOUSEREPEAT, "if_setonmouserepeat"); add(IF_SETONINVTRANSMIT, "if_setoninvtransmit"); add(IF_SETONSTATTRANSMIT, "if_setonstattransmit"); add(IF_SETONTARGETENTER, "if_setontargetenter"); add(IF_SETONSCROLLWHEEL, "if_setonscrollwheel"); add(IF_SETONCHATTRANSMIT, "if_setonchattransmit"); add(IF_SETONKEY, "if_setonkey"); add(IF_SETONFRIENDTRANSMIT, "if_setonfriendtransmit"); add(IF_SETONCLANTRANSMIT, "if_setonclantransmit"); add(IF_SETONMISCTRANSMIT, "if_setonmisctransmit"); add(IF_SETONDIALOGABORT, "if_setondialogabort"); add(IF_SETONSUBCHANGE, "if_setonsubchange"); add(IF_SETONSTOCKTRANSMIT, "if_setonstocktransmit"); add(IF_SETONRESIZE, "if_setonresize"); add(IF_SETONCLANSETTINGSTRANSMIT, "if_setonclansettingstransmit"); add(IF_SETONCLANCHANNELTRANSMIT, "if_setonclanchanneltransmit"); add(IF_GETX, "if_getx"); add(IF_GETY, "if_gety"); add(IF_GETWIDTH, "if_getwidth"); add(IF_GETHEIGHT, "if_getheight"); add(IF_GETHIDE, "if_gethide"); add(IF_GETLAYER, "if_getlayer"); add(IF_GETSCROLLX, "if_getscrollx"); add(IF_GETSCROLLY, "if_getscrolly"); add(IF_GETTEXT, "if_gettext"); add(IF_GETSCROLLWIDTH, "if_getscrollwidth"); add(IF_GETSCROLLHEIGHT, "if_getscrollheight"); add(IF_GETMODELZOOM, "if_getmodelzoom"); add(IF_GETMODELANGLE_X, "if_getmodelangle_x"); add(IF_GETMODELANGLE_Z, "if_getmodelangle_z"); add(IF_GETMODELANGLE_Y, "if_getmodelangle_y"); add(IF_GETTRANS, "if_gettrans"); add(IF_GETCOLOUR, "if_getcolour"); add(IF_GETFILLCOLOUR, "if_getfillcolour"); add(IF_GETMODELTRANSPARENT, "if_getmodeltransparent"); add(IF_GETINVOBJECT, "if_getinvobject"); add(IF_GETINVCOUNT, "if_getinvcount"); add(IF_HASSUB, "if_hassub"); add(IF_GETTOP, "if_gettop"); add(IF_GETTARGETMASK, "if_gettargetmask"); add(IF_GETOP, "if_getop"); add(IF_GETOPBASE, "if_getopbase"); add(IF_CALLONRESIZE, "if_callonresize"); add(IF_TRIGGEROP, "if_triggerop"); add(MES, "mes"); add(ANIM, "anim"); add(IF_CLOSE, "if_close"); add(RESUME_COUNTDIALOG, "resume_countdialog"); add(RESUME_NAMEDIALOG, "resume_namedialog"); add(RESUME_STRINGDIALOG, "resume_stringdialog"); add(OPPLAYER, "opplayer"); add(IF_DRAGPICKUP, "if_dragpickup"); add(CC_DRAGPICKUP, "cc_dragpickup"); add(MOUSECAM, "mousecam"); add(GETREMOVEROOFS, "getremoveroofs"); add(SETREMOVEROOFS, "setremoveroofs"); add(OPENURL, "openurl"); add(RESUME_OBJDIALOG, "resume_objdialog"); add(BUG_REPORT, "bug_report"); add(SETSHIFTCLICKDROP, "setshiftclickdrop"); add(SETSHOWMOUSEOVERTEXT, "setshowmouseovertext"); add(RENDERSELF, "renderself"); add(SETSHOWMOUSECROSS, "setshowmousecross"); add(SETSHOWLOADINGMESSAGES, "setshowloadingmessages"); add(SETTAPTODROP, "settaptodrop"); add(GETTAPTODROP, "gettaptodrop"); add(GETCANVASSIZE, "getcanvassize"); add(MOBILE_SETFPS, "mobile_setfps"); add(MOBILE_OPENSTORE, "mobile_openstore"); add(MOBILE_OPENSTORECATEGORY, "mobile_openstorecategory"); add(SETHIDEUSERNAME, "sethideusername"); add(GETHIDEUSERNAME, "gethideusername"); add(SETREMEMBERUSERNAME, "setrememberusername"); add(GETREMEMBERUSERNAME, "getrememberusername"); add(SHOW_IOS_REVIEW, "show_ios_review"); add(SOUND_SYNTH, "sound_synth"); add(SOUND_SONG, "sound_song"); add(SOUND_JINGLE, "sound_jingle"); add(CLIENTCLOCK, "clientclock"); add(INV_GETOBJ, "inv_getobj"); add(INV_GETNUM, "inv_getnum"); add(INV_TOTAL, "inv_total"); add(INV_SIZE, "inv_size"); add(STAT, "stat"); add(STAT_BASE, "stat_base"); add(STAT_XP, "stat_xp"); add(COORD, "coord"); add(COORDX, "coordx"); add(COORDZ, "coordz"); add(COORDY, "coordy"); add(MAP_MEMBERS, "map_members"); add(INVOTHER_GETOBJ, "invother_getobj"); add(INVOTHER_GETNUM, "invother_getnum"); add(INVOTHER_TOTAL, "invother_total"); add(STAFFMODLEVEL, "staffmodlevel"); add(REBOOTTIMER, "reboottimer"); add(MAP_WORLD, "map_world"); add(RUNENERGY_VISIBLE, "runenergy_visible"); add(RUNWEIGHT_VISIBLE, "runweight_visible"); add(PLAYERMOD, "playermod"); add(WORLDFLAGS, "worldflags"); add(MOVECOORD, "movecoord"); add(ENUM_STRING, "enum_string"); add(ENUM, "enum"); add(ENUM_GETOUTPUTCOUNT, "enum_getoutputcount"); add(FRIEND_COUNT, "friend_count"); add(FRIEND_GETNAME, "friend_getname"); add(FRIEND_GETWORLD, "friend_getworld"); add(FRIEND_GETRANK, "friend_getrank"); add(FRIEND_SETRANK, "friend_setrank"); add(FRIEND_ADD, "friend_add"); add(FRIEND_DEL, "friend_del"); add(IGNORE_ADD, "ignore_add"); add(IGNORE_DEL, "ignore_del"); add(FRIEND_TEST, "friend_test"); add(CLAN_GETCHATDISPLAYNAME, "clan_getchatdisplayname"); add(CLAN_GETCHATCOUNT, "clan_getchatcount"); add(CLAN_GETCHATUSERNAME, "clan_getchatusername"); add(CLAN_GETCHATUSERWORLD, "clan_getchatuserworld"); add(CLAN_GETCHATUSERRANK, "clan_getchatuserrank"); add(CLAN_GETCHATMINKICK, "clan_getchatminkick"); add(CLAN_KICKUSER, "clan_kickuser"); add(CLAN_GETCHATRANK, "clan_getchatrank"); add(CLAN_JOINCHAT, "clan_joinchat"); add(CLAN_LEAVECHAT, "clan_leavechat"); add(IGNORE_COUNT, "ignore_count"); add(IGNORE_GETNAME, "ignore_getname"); add(IGNORE_TEST, "ignore_test"); add(CLAN_ISSELF, "clan_isself"); add(CLAN_GETCHATOWNERNAME, "clan_getchatownername"); add(CLAN_ISFRIEND, "clan_isfriend"); add(CLAN_ISIGNORE, "clan_isignore"); add(ACTIVECLANSETTINGS_FIND_LISTENED, "activeclansettings_find_listened"); add(ACTIVECLANSETTINGS_FIND_AFFINED, "activeclansettings_find_affined"); add(ACTIVECLANSETTINGS_GETCLANNAME, "activeclansettings_getclanname"); add(ACTIVECLANSETTINGS_GETALLOWUNAFFINED, "activeclansettings_getallowunaffined"); add(ACTIVECLANSETTINGS_GETRANKTALK, "activeclansettings_getranktalk"); add(ACTIVECLANSETTINGS_GETRANKKICK, "activeclansettings_getrankkick"); add(ACTIVECLANSETTINGS_GETRANKLOOTSHARE, "activeclansettings_getranklootshare"); add(ACTIVECLANSETTINGS_GETCOINSHARE, "activeclansettings_getcoinshare"); add(ACTIVECLANSETTINGS_GETAFFINEDCOUNT, "activeclansettings_getaffinedcount"); add(ACTIVECLANSETTINGS_GETAFFINEDDISPLAYNAME, "activeclansettings_getaffineddisplayname"); add(ACTIVECLANSETTINGS_GETAFFINEDRANK, "activeclansettings_getaffinedrank"); add(ACTIVECLANSETTINGS_GETBANNEDCOUNT, "activeclansettings_getbannedcount"); add(ACTIVECLANSETTINGS_GETBANNEDDISPLAYNAME, "activeclansettings_getbanneddisplayname"); add(ACTIVECLANSETTINGS_GETAFFINEDEXTRAINFO, "activeclansettings_getaffinedextrainfo"); add(ACTIVECLANSETTINGS_GETCURRENTOWNER_SLOT, "activeclansettings_getcurrentowner_slot"); add(ACTIVECLANSETTINGS_GETREPLACEMENTOWNER_SLOT, "activeclansettings_getreplacementowner_slot"); add(ACTIVECLANSETTINGS_GETAFFINEDSLOT, "activeclansettings_getaffinedslot"); add(ACTIVECLANSETTINGS_GETSORTEDAFFINEDSLOT, "activeclansettings_getsortedaffinedslot"); add(AFFINEDCLANSETTINGS_ADDBANNED_FROMCHANNEL, "affinedclansettings_addbanned_fromchannel"); add(ACTIVECLANSETTINGS_GETAFFINEDJOINRUNEDAY, "activeclansettings_getaffinedjoinruneday"); add(AFFINEDCLANSETTINGS_SETMUTED_FROMCHANNEL, "affinedclansettings_setmuted_fromchannel"); add(ACTIVECLANSETTINGS_GETAFFINEDMUTED, "activeclansettings_getaffinedmuted"); add(ACTIVECLANCHANNEL_FIND_LISTENED, "activeclanchannel_find_listened"); add(ACTIVECLANCHANNEL_FIND_AFFINED, "activeclanchannel_find_affined"); add(ACTIVECLANCHANNEL_GETCLANNAME, "activeclanchannel_getclanname"); add(ACTIVECLANCHANNEL_GETRANKKICK, "activeclanchannel_getrankkick"); add(ACTIVECLANCHANNEL_GETRANKTALK, "activeclanchannel_getranktalk"); add(ACTIVECLANCHANNEL_GETUSERCOUNT, "activeclanchannel_getusercount"); add(ACTIVECLANCHANNEL_GETUSERDISPLAYNAME, "activeclanchannel_getuserdisplayname"); add(ACTIVECLANCHANNEL_GETUSERRANK, "activeclanchannel_getuserrank"); add(ACTIVECLANCHANNEL_GETUSERWORLD, "activeclanchannel_getuserworld"); add(ACTIVECLANCHANNEL_KICKUSER, "activeclanchannel_kickuser"); add(ACTIVECLANCHANNEL_GETUSERSLOT, "activeclanchannel_getuserslot"); add(ACTIVECLANCHANNEL_GETSORTEDUSERSLOT, "activeclanchannel_getsorteduserslot"); add(CLANPROFILE_FIND, "clanprofile_find"); add(STOCKMARKET_GETOFFERTYPE, "stockmarket_getoffertype"); add(STOCKMARKET_GETOFFERITEM, "stockmarket_getofferitem"); add(STOCKMARKET_GETOFFERPRICE, "stockmarket_getofferprice"); add(STOCKMARKET_GETOFFERCOUNT, "stockmarket_getoffercount"); add(STOCKMARKET_GETOFFERCOMPLETEDCOUNT, "stockmarket_getoffercompletedcount"); add(STOCKMARKET_GETOFFERCOMPLETEDGOLD, "stockmarket_getoffercompletedgold"); add(STOCKMARKET_ISOFFEREMPTY, "stockmarket_isofferempty"); add(STOCKMARKET_ISOFFERSTABLE, "stockmarket_isofferstable"); add(STOCKMARKET_ISOFFERFINISHED, "stockmarket_isofferfinished"); add(STOCKMARKET_ISOFFERADDING, "stockmarket_isofferadding"); add(TRADINGPOST_SORTBY_NAME, "tradingpost_sortby_name"); add(TRADINGPOST_SORTBY_PRICE, "tradingpost_sortby_price"); add(TRADINGPOST_SORTFILTERBY_WORLD, "tradingpost_sortfilterby_world"); add(TRADINGPOST_SORTBY_AGE, "tradingpost_sortby_age"); add(TRADINGPOST_SORTBY_COUNT, "tradingpost_sortby_count"); add(TRADINGPOST_GETTOTALOFFERS, "tradingpost_gettotaloffers"); add(TRADINGPOST_GETOFFERWORLD, "tradingpost_getofferworld"); add(TRADINGPOST_GETOFFERNAME, "tradingpost_getoffername"); add(TRADINGPOST_GETOFFERPREVIOUSNAME, "tradingpost_getofferpreviousname"); add(TRADINGPOST_GETOFFERAGE, "tradingpost_getofferage"); add(TRADINGPOST_GETOFFERCOUNT, "tradingpost_getoffercount"); add(TRADINGPOST_GETOFFERPRICE, "tradingpost_getofferprice"); add(TRADINGPOST_GETOFFERITEM, "tradingpost_getofferitem"); add(ADD, "add"); add(SUB, "sub"); add(MULTIPLY, "multiply"); add(DIV, "div"); add(RANDOM, "random"); add(RANDOMINC, "randominc"); add(INTERPOLATE, "interpolate"); add(ADDPERCENT, "addpercent"); add(SETBIT, "setbit"); add(CLEARBIT, "clearbit"); add(TESTBIT, "testbit"); add(MOD, "mod"); add(POW, "pow"); add(INVPOW, "invpow"); add(AND, "and"); add(OR, "or"); add(SCALE, "scale"); add(BITCOUNT, "bitcount"); add(TOGGLEBIT, "togglebit"); add(SETBIT_RANGE, "setbit_range"); add(CLEARBIT_RANGE, "clearbit_range"); add(GETBIT_RANGE, "getbit_range"); add(APPEND_NUM, "append_num"); add(APPEND, "append"); add(APPEND_SIGNNUM, "append_signnum"); add(LOWERCASE, "lowercase"); add(FROMDATE, "fromdate"); add(TEXT_GENDER, "text_gender"); add(TOSTRING, "tostring"); add(COMPARE, "compare"); add(PARAHEIGHT, "paraheight"); add(PARAWIDTH, "parawidth"); add(TEXT_SWITCH, "text_switch"); add(ESCAPE, "escape"); add(APPEND_CHAR, "append_char"); add(CHAR_ISPRINTABLE, "char_isprintable"); add(CHAR_ISALPHANUMERIC, "char_isalphanumeric"); add(CHAR_ISALPHA, "char_isalpha"); add(CHAR_ISNUMERIC, "char_isnumeric"); add(STRING_LENGTH, "string_length"); add(SUBSTRING, "substring"); add(REMOVETAGS, "removetags"); add(STRING_INDEXOF_CHAR, "string_indexof_char"); add(STRING_INDEXOF_STRING, "string_indexof_string"); add(OC_NAME, "oc_name"); add(OC_OP, "oc_op"); add(OC_IOP, "oc_iop"); add(OC_COST, "oc_cost"); add(OC_STACKABLE, "oc_stackable"); add(OC_CERT, "oc_cert"); add(OC_UNCERT, "oc_uncert"); add(OC_MEMBERS, "oc_members"); add(OC_PLACEHOLDER, "oc_placeholder"); add(OC_UNPLACEHOLDER, "oc_unplaceholder"); add(OC_FIND, "oc_find"); add(OC_FINDNEXT, "oc_findnext"); add(OC_FINDRESET, "oc_findreset"); add(CHAT_GETFILTER_PUBLIC, "chat_getfilter_public"); add(CHAT_SETFILTER, "chat_setfilter"); add(CHAT_SENDABUSEREPORT, "chat_sendabusereport"); add(CHAT_GETHISTORY_BYTYPEANDLINE, "chat_gethistory_bytypeandline"); add(CHAT_GETHISTORY_BYUID, "chat_gethistory_byuid"); add(CHAT_GETFILTER_PRIVATE, "chat_getfilter_private"); add(CHAT_SENDPUBLIC, "chat_sendpublic"); add(CHAT_SENDPRIVATE, "chat_sendprivate"); add(CHAT_SENDCLAN, "chat_sendclan"); add(CHAT_PLAYERNAME, "chat_playername"); add(CHAT_GETFILTER_TRADE, "chat_getfilter_trade"); add(CHAT_GETHISTORYLENGTH, "chat_gethistorylength"); add(CHAT_GETNEXTUID, "chat_getnextuid"); add(CHAT_GETPREVUID, "chat_getprevuid"); add(DOCHEAT, "docheat"); add(CHAT_SETMESSAGEFILTER, "chat_setmessagefilter"); add(CHAT_GETMESSAGEFILTER, "chat_getmessagefilter"); add(WRITECONSOLE, "writeconsole"); add(CHAT_GETHISTORYEX_BYTYPEANDLINE, "chat_gethistoryex_bytypeandline"); add(CHAT_GETHISTORYEX_BYUID, "chat_gethistoryex_byuid"); add(GETWINDOWMODE, "getwindowmode"); add(SETWINDOWMODE, "setwindowmode"); add(GETDEFAULTWINDOWMODE, "getdefaultwindowmode"); add(SETDEFAULTWINDOWMODE, "setdefaultwindowmode"); add(CAM_FORCEANGLE, "cam_forceangle"); add(CAM_GETANGLE_XA, "cam_getangle_xa"); add(CAM_GETANGLE_YA, "cam_getangle_ya"); add(CAM_SETFOLLOWHEIGHT, "cam_setfollowheight"); add(CAM_GETFOLLOWHEIGHT, "cam_getfollowheight"); add(LOGOUT, "logout"); add(VIEWPORT_SETFOV, "viewport_setfov"); add(VIEWPORT_SETZOOM, "viewport_setzoom"); add(VIEWPORT_CLAMPFOV, "viewport_clampfov"); add(VIEWPORT_GETEFFECTIVESIZE, "viewport_geteffectivesize"); add(VIEWPORT_GETZOOM, "viewport_getzoom"); add(VIEWPORT_GETFOV, "viewport_getfov"); add(WORLDLIST_FETCH, "worldlist_fetch"); add(WORLDLIST_START, "worldlist_start"); add(WORLDLIST_NEXT, "worldlist_next"); add(WORLDLIST_SPECIFIC, "worldlist_specific"); add(WORLDLIST_SORT, "worldlist_sort"); add(SETFOLLOWEROPSLOWPRIORITY, "setfolloweropslowpriority"); add(NC_PARAM, "nc_param"); add(LC_PARAM, "lc_param"); add(OC_PARAM, "oc_param"); add(STRUCT_PARAM, "struct_param"); add(ON_MOBILE, "on_mobile"); add(CLIENTTYPE, "clienttype"); add(MOBILE_KEYBOARDHIDE, "mobile_keyboardhide"); add(MOBILE_BATTERYLEVEL, "mobile_batterylevel"); add(MOBILE_BATTERYCHARGING, "mobile_batterycharging"); add(MOBILE_WIFIAVAILABLE, "mobile_wifiavailable"); add(WORLDMAP_GETMAPNAME, "worldmap_getmapname"); add(WORLDMAP_SETMAP, "worldmap_setmap"); add(WORLDMAP_GETZOOM, "worldmap_getzoom"); add(WORLDMAP_SETZOOM, "worldmap_setzoom"); add(WORLDMAP_ISLOADED, "worldmap_isloaded"); add(WORLDMAP_JUMPTODISPLAYCOORD, "worldmap_jumptodisplaycoord"); add(WORLDMAP_JUMPTODISPLAYCOORD_INSTANT, "worldmap_jumptodisplaycoord_instant"); add(WORLDMAP_JUMPTOSOURCECOORD, "worldmap_jumptosourcecoord"); add(WORLDMAP_JUMPTOSOURCECOORD_INSTANT, "worldmap_jumptosourcecoord_instant"); add(WORLDMAP_GETDISPLAYPOSITION, "worldmap_getdisplayposition"); add(WORLDMAP_GETCONFIGORIGIN, "worldmap_getconfigorigin"); add(WORLDMAP_GETCONFIGSIZE, "worldmap_getconfigsize"); add(WORLDMAP_GETCONFIGBOUNDS, "worldmap_getconfigbounds"); add(WORLDMAP_GETCONFIGZOOM, "worldmap_getconfigzoom"); add(WORLDMAP_GETCURRENTMAP, "worldmap_getcurrentmap"); add(WORLDMAP_GETDISPLAYCOORD, "worldmap_getdisplaycoord"); add(WORLDMAP_COORDINMAP, "worldmap_coordinmap"); add(WORLDMAP_GETSIZE, "worldmap_getsize"); add(WORLDMAP_PERPETUALFLASH, "worldmap_perpetualflash"); add(WORLDMAP_FLASHELEMENT, "worldmap_flashelement"); add(WORLDMAP_FLASHELEMENTCATEGORY, "worldmap_flashelementcategory"); add(WORLDMAP_STOPCURRENTFLASHES, "worldmap_stopcurrentflashes"); add(WORLDMAP_DISABLEELEMENTS, "worldmap_disableelements"); add(WORLDMAP_DISABLEELEMENT, "worldmap_disableelement"); add(WORLDMAP_DISABLEELEMENTCATEGORY, "worldmap_disableelementcategory"); add(WORLDMAP_GETDISABLEELEMENTS, "worldmap_getdisableelements"); add(WORLDMAP_GETDISABLEELEMENT, "worldmap_getdisableelement"); add(WORLDMAP_GETDISABLEELEMENTCATEGORY, "worldmap_getdisableelementcategory"); add(WORLDMAP_LISTELEMENT_START, "worldmap_listelement_start"); add(WORLDMAP_LISTELEMENT_NEXT, "worldmap_listelement_next"); add(MEC_TEXT, "mec_text"); add(MEC_TEXTSIZE, "mec_textsize"); add(MEC_CATEGORY, "mec_category"); add(MEC_SPRITE, "mec_sprite"); add(WORLDMAP_ELEMENT, "worldmap_element"); add(WORLDMAP_ELEMENTCOORD, "worldmap_elementcoord"); add(DB_FIND_WITH_COUNT, "db_find_with_count"); add(DB_FINDNEXT, "db_findnext"); add(DB_GETFIELD, "db_getfield"); add(DB_GETFIELDCOUNT, "db_getfieldcount"); add(DB_FINDALL_WITH_COUNT, "db_findall_with_count"); add(DB_GETROWTABLE, "db_getrowtable"); add(DB_GETROW, "db_getrow"); add(DB_FIND_FILTER_WITH_COUNT, "db_find_filter_with_count"); add(DB_FIND, "db_find"); add(DB_FINDALL, "db_findall"); add(DB_FIND_FILTER, "db_find_filter"); }
@Test public void testInit() { new Instructions().init(); }
public void save(BasicAuthConfiguration basicAuthConfiguration) { save(null, basicAuthConfiguration); }
@Test void secure() throws TimeoutException { IllegalArgumentException illegalArgumentException = Assertions.assertThrows( IllegalArgumentException.class, () -> basicAuthService.save(basicAuthConfiguration.withUsernamePassword("not-an-email", "password")) ); assertThat(illegalArgumentException.getMessage(), is("Invalid username for Basic Authentication. Please provide a valid email address.")); assertConfigurationMatchesApplicationYaml(); basicAuthService.save(basicAuthConfiguration.withUsernamePassword("some@email.com", "password")); awaitOssAuthEventApiCall("some@email.com"); }
@Override public String getFileId(final DriveItem.Metadata metadata) { final ItemReference parent = metadata.getParentReference(); if(metadata.getRemoteItem() != null) { final DriveItem.Metadata remoteMetadata = metadata.getRemoteItem(); final ItemReference remoteParent = remoteMetadata.getParentReference(); if(parent == null) { return String.join(String.valueOf(Path.DELIMITER), remoteParent.getDriveId(), remoteMetadata.getId()); } else { return String.join(String.valueOf(Path.DELIMITER), parent.getDriveId(), metadata.getId(), remoteParent.getDriveId(), remoteMetadata.getId()); } } else { return String.join(String.valueOf(Path.DELIMITER), parent.getDriveId(), metadata.getId()); } }
@Test public void testRealConsumerFileIdResponseOwnDrive() throws Exception { final DriveItem.Metadata metadata; try (final InputStream test = getClass().getResourceAsStream("/RealConsumerFileIdResponseOwnDrive.json")) { final InputStreamReader reader = new InputStreamReader(test); metadata = DriveItem.parseJson(session.getClient(), (JsonObject) Json.parse(reader)); } assertEquals("A/A!0/B/B!2", session.getFileId(metadata)); }
@Override public List<T> pollN(int n) { if (n >= size) { // if we need to remove all elements then do fast polling return pollAll(); } List<T> retList = new ArrayList<T>(n); while (n-- > 0 && head != null) { T curr = head.element; this.removeElem(curr); retList.add(curr); } shrinkIfNecessary(); return retList; }
@Test public void testPollNMulti() { LOG.info("Test pollN multi"); // use addAll set.addAll(list); // poll existing elements List<Integer> l = set.pollN(10); assertEquals(10, l.size()); for (int i = 0; i < 10; i++) { assertEquals(list.get(i), l.get(i)); } // poll more elements than present l = set.pollN(1000); assertEquals(NUM - 10, l.size()); // check the order for (int i = 10; i < NUM; i++) { assertEquals(list.get(i), l.get(i - 10)); } // set is empty assertTrue(set.isEmpty()); assertEquals(0, set.size()); LOG.info("Test pollN multi - DONE"); }
@Override public LineHashesComputer getLineHashesComputerToPersist(Component component) { boolean cacheHit = cache.contains(component); // check if line hashes are cached and if we can use it if (cacheHit && !dbLineHashesVersion.hasLineHashesWithoutSignificantCode(component)) { return new CachedLineHashesComputer(cache.get(component)); } Optional<LineRange[]> significantCodePerLine = significantCodeRepository.getRangesPerLine(component); if (cacheHit && !significantCodePerLine.isPresent()) { return new CachedLineHashesComputer(cache.get(component)); } // Generate the line hashes taking into account significant code ranges return createLineHashesProcessor(component.getFileAttributes().getLines(), significantCodePerLine); }
@Test public void should_generate_to_persist_if_needed() { List<String> lineHashes = Lists.newArrayList("line1", "line2", "line3"); LineRange[] lineRanges = {new LineRange(0, 1), null, new LineRange(1, 5)}; sourceLinesHashCache.computeIfAbsent(file, c -> lineHashes); // DB has line hashes without significant code and significant code is available in the report, so we need to generate new line hashes when(dbLineHashVersion.hasLineHashesWithoutSignificantCode(file)).thenReturn(true); when(significantCodeRepository.getRangesPerLine(file)).thenReturn(Optional.of(lineRanges)); LineHashesComputer hashesComputer = underTest.getLineHashesComputerToPersist(file); assertThat(hashesComputer).isInstanceOf(SignificantCodeLineHashesComputer.class); }
@Override public void configure(Map<String, ?> configs, boolean isKey) { if (listClass != null || inner != null) { log.error("Could not configure ListDeserializer as some parameters were already set -- listClass: {}, inner: {}", listClass, inner); throw new ConfigException("List deserializer was already initialized using a non-default constructor"); } configureListClass(configs, isKey); configureInnerSerde(configs, isKey); }
@Test public void testListKeyDeserializerNoArgConstructorsShouldThrowConfigExceptionDueMissingTypeClassProp() { props.put(CommonClientConfigs.DEFAULT_LIST_KEY_SERDE_INNER_CLASS, Serdes.StringSerde.class); final ConfigException exception = assertThrows( ConfigException.class, () -> listDeserializer.configure(props, true) ); assertEquals("Not able to determine the list class because " + "it was neither passed via the constructor nor set in the config.", exception.getMessage()); }
public void schedule(final ScheduledHealthCheck check, final boolean healthy) { unschedule(check.getName()); final Duration interval; if (healthy) { interval = check.getSchedule().getCheckInterval(); } else { interval = check.getSchedule().getDowntimeInterval(); } schedule(check, interval, interval); }
@Test void shouldScheduleCheckForNotAlreadyScheduledHealthyDependency() { final String name = "test"; final Schedule schedule = new Schedule(); final ScheduledHealthCheck check = mock(ScheduledHealthCheck.class); when(check.getName()).thenReturn(name); when(check.getSchedule()).thenReturn(schedule); when(executor.scheduleWithFixedDelay(check, schedule.getCheckInterval().toMilliseconds(), schedule.getCheckInterval().toMilliseconds(), TimeUnit.MILLISECONDS)) .thenReturn(mock(ScheduledFuture.class)); scheduler.schedule(check, true); verify(executor).scheduleWithFixedDelay(check, schedule.getCheckInterval().toMilliseconds(), schedule.getCheckInterval().toMilliseconds(), TimeUnit.MILLISECONDS); }
@VisibleForTesting V1StatefulSet createStatefulSet() { final String jobName = createJobName(instanceConfig.getFunctionDetails(), this.jobName); final V1StatefulSet statefulSet = new V1StatefulSet(); // setup stateful set metadata final V1ObjectMeta objectMeta = new V1ObjectMeta(); objectMeta.name(jobName); objectMeta.setLabels(getLabels(instanceConfig.getFunctionDetails())); // we don't technically need to set this, but it is useful for testing objectMeta.setNamespace(jobNamespace); statefulSet.metadata(objectMeta); // create the stateful set spec final V1StatefulSetSpec statefulSetSpec = new V1StatefulSetSpec(); statefulSetSpec.serviceName(jobName); statefulSetSpec.setReplicas(instanceConfig.getFunctionDetails().getParallelism()); // Parallel pod management tells the StatefulSet controller to launch or terminate // all Pods in parallel, and not to wait for Pods to become Running and Ready or completely // terminated prior to launching or terminating another Pod. statefulSetSpec.setPodManagementPolicy("Parallel"); // add selector match labels // so the we know which pods to manage final V1LabelSelector selector = new V1LabelSelector(); selector.matchLabels(getLabels(instanceConfig.getFunctionDetails())); statefulSetSpec.selector(selector); // create a pod template final V1PodTemplateSpec podTemplateSpec = new V1PodTemplateSpec(); // set up pod meta final V1ObjectMeta templateMetaData = new V1ObjectMeta().labels(getLabels(instanceConfig.getFunctionDetails())); templateMetaData.annotations(getPrometheusAnnotations()); podTemplateSpec.setMetadata(templateMetaData); final List<String> command = getExecutorCommand(); podTemplateSpec.spec(getPodSpec(command, instanceConfig.getFunctionDetails().hasResources() ? instanceConfig.getFunctionDetails().getResources() : null)); statefulSetSpec.setTemplate(podTemplateSpec); statefulSet.spec(statefulSetSpec); // let the customizer run but ensure it doesn't change the name so we can find it again final V1StatefulSet overridden = manifestCustomizer .map((customizer) -> customizer.customizeStatefulSet(instanceConfig.getFunctionDetails(), statefulSet)) .orElse(statefulSet); overridden.getMetadata().name(jobName); return statefulSet; }
@Test public void testCustomKubernetesDownloadCommandsWithAuthAndCustomTLSWithoutAuthSpec() throws Exception { InstanceConfig config = createJavaInstanceConfig(FunctionDetails.Runtime.JAVA, false); config.setFunctionDetails(createFunctionDetails(FunctionDetails.Runtime.JAVA, false)); factory = createKubernetesRuntimeFactory(null, 10, 1.0, 1.0, Optional.empty(), null, wconfig -> { wconfig.setAuthenticationEnabled(true); }, AuthenticationConfig.builder() .clientAuthenticationPlugin("com.MyAuth") .clientAuthenticationParameters("{\"authParam1\": \"authParamValue1\"}") .useTls(true) // set to verify it is ignored because pulsar admin does not consider this setting .tlsHostnameVerificationEnable(true) .tlsTrustCertsFilePath("/my/ca.pem") .build()); KubernetesRuntime container = factory.createContainer(config, userJarFile, userJarFile, null, null, 30l); V1StatefulSet spec = container.createStatefulSet(); String expectedDownloadCommand = "pulsar-admin --admin-url " + pulsarAdminUrl + " --auth-plugin com.MyAuth --auth-params {\"authParam1\": \"authParamValue1\"}" + " --tls-enable-hostname-verification" + " --tls-trust-cert-path /my/ca.pem" + " functions download " + "--tenant " + TEST_TENANT + " --namespace " + TEST_NAMESPACE + " --name " + TEST_NAME + " --destination-file " + pulsarRootDir + "/" + userJarFile; String containerCommand = spec.getSpec().getTemplate().getSpec().getContainers().get(0).getCommand().get(2); assertTrue(containerCommand.contains(expectedDownloadCommand), "Found:" + containerCommand); }
public ProtocolBuilder codec(String codec) { this.codec = codec; return getThis(); }
@Test void codec() { ProtocolBuilder builder = new ProtocolBuilder(); builder.codec("mockcodec"); Assertions.assertEquals("mockcodec", builder.build().getCodec()); }
@Override public int run(String[] args) throws Exception { try { webServiceClient = WebServiceClient.getWebServiceClient().createClient(); return runCommand(args); } finally { if (yarnClient != null) { yarnClient.close(); } if (webServiceClient != null) { webServiceClient.destroy(); } } }
@Test (timeout = 15000) public void testListNodeInfo() throws Exception { String remoteLogRootDir = "target/logs/"; conf.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, true); conf .set(YarnConfiguration.NM_REMOTE_APP_LOG_DIR, remoteLogRootDir); conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true); conf.set(YarnConfiguration.YARN_ADMIN_ACL, "admin"); ApplicationId appId = ApplicationId.newInstance(0, 1); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); List<ContainerId> containerIds = new ArrayList<ContainerId>(); ContainerId containerId1 = ContainerId.newContainerId( appAttemptId, 1); ContainerId containerId2 = ContainerId.newContainerId( appAttemptId, 2); containerIds.add(containerId1); containerIds.add(containerId2); List<NodeId> nodeIds = new ArrayList<NodeId>(); NodeId nodeId1 = NodeId.newInstance("localhost1", 1234); NodeId nodeId2 = NodeId.newInstance("localhost2", 2345); nodeIds.add(nodeId1); nodeIds.add(nodeId2); String rootLogDir = "target/LocalLogs"; FileSystem fs = FileSystem.get(conf); createContainerLogs(conf, remoteLogRootDir, rootLogDir, fs, appId, containerIds, nodeIds); YarnClient mockYarnClient = createMockYarnClient(YarnApplicationState.FINISHED, UserGroupInformation.getCurrentUser().getShortUserName()); LogsCLI cli = new LogsCLIForTest(mockYarnClient); cli.setConf(conf); cli.run(new String[] { "-applicationId", appId.toString(), "-list_nodes" }); assertTrue(sysOutStream.toString().contains( LogAggregationUtils.getNodeString(nodeId1))); assertTrue(sysOutStream.toString().contains( LogAggregationUtils.getNodeString(nodeId2))); sysOutStream.reset(); fs.delete(new Path(remoteLogRootDir), true); fs.delete(new Path(rootLogDir), true); }
@InvokeOnHeader(Web3jConstants.ETH_GAS_PRICE) void ethGasPrice(Message message) throws IOException { Request<?, EthGasPrice> request = web3j.ethGasPrice(); setRequestId(message, request); EthGasPrice response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getGasPrice()); } }
@Test public void ethGasPriceTest() throws Exception { EthGasPrice response = Mockito.mock(EthGasPrice.class); Mockito.when(mockWeb3j.ethGasPrice()).thenReturn(request); Mockito.when(request.send()).thenReturn(response); Mockito.when(response.getGasPrice()).thenReturn(BigInteger.ONE); Exchange exchange = createExchangeWithBodyAndHeader(null, OPERATION, Web3jConstants.ETH_GAS_PRICE); template.send(exchange); BigInteger body = exchange.getIn().getBody(BigInteger.class); assertEquals(BigInteger.ONE, body); }
@VisibleForTesting static boolean isBrokenPipe(IOException original) { Throwable exception = original; while (exception != null) { String message = exception.getMessage(); if (message != null && message.toLowerCase(Locale.US).contains("broken pipe")) { return true; } exception = exception.getCause(); if (exception == original) { // just in case if there's a circular chain return false; } } return false; }
@Test public void testIsBrokenPipe_brokenPipe() { Assert.assertTrue(RegistryEndpointCaller.isBrokenPipe(new IOException("cool broken pipe !"))); Assert.assertTrue(RegistryEndpointCaller.isBrokenPipe(new SocketException("BROKEN PIPE"))); Assert.assertTrue(RegistryEndpointCaller.isBrokenPipe(new SSLException("calm BrOkEn PiPe"))); }
public static double factorial(int n) { if (n < 0) { throw new IllegalArgumentException("n has to be non-negative."); } double f = 1.0; for (int i = 2; i <= n; i++) { f *= i; } return f; }
@Test public void testFactorial() { System.out.println("factorial"); assertEquals(1.0, MathEx.factorial(0), 1E-7); assertEquals(1.0, MathEx.factorial(1), 1E-7); assertEquals(2.0, MathEx.factorial(2), 1E-7); assertEquals(6.0, MathEx.factorial(3), 1E-7); assertEquals(24.0, MathEx.factorial(4), 1E-7); }
public Sdk getSdk(int apiLevel) { Sdk sdk = knownSdks.get(apiLevel); return sdk == null ? new UnknownSdk(apiLevel) : sdk; }
@Test public void shouldCacheSdks() throws Exception { assertThat(sdkCollection.getSdk(1234)).isSameInstanceAs(fakeSdk1234); assertThat(sdkCollection.getSdk(1234)).isSameInstanceAs(fakeSdk1234); verify(mockSdkProvider, times(1)).getSdks(); }
@GetMapping( path = "/api/{namespace}/{extension}", produces = MediaType.APPLICATION_JSON_VALUE ) @CrossOrigin @Operation(summary = "Provides metadata of the latest version of an extension") @ApiResponses({ @ApiResponse( responseCode = "200", description = "The extension metadata are returned in JSON format" ), @ApiResponse( responseCode = "404", description = "The specified extension could not be found", content = @Content() ), @ApiResponse( responseCode = "429", description = "A client has sent too many requests in a given amount of time", content = @Content(), headers = { @Header( name = "X-Rate-Limit-Retry-After-Seconds", description = "Number of seconds to wait after receiving a 429 response", schema = @Schema(type = "integer", format = "int32") ), @Header( name = "X-Rate-Limit-Remaining", description = "Remaining number of requests left", schema = @Schema(type = "integer", format = "int32") ) } ) }) public ResponseEntity<ExtensionJson> getExtension( @PathVariable @Parameter(description = "Extension namespace", example = "redhat") String namespace, @PathVariable @Parameter(description = "Extension name", example = "java") String extension ) { for (var registry : getRegistries()) { try { return ResponseEntity.ok() .cacheControl(CacheControl.noCache().cachePublic()) .body(registry.getExtension(namespace, extension, null)); } catch (NotFoundException exc) { // Try the next registry } } var json = ExtensionJson.error("Extension not found: " + NamingUtil.toExtensionId(namespace, extension)); return new ResponseEntity<>(json, HttpStatus.NOT_FOUND); }
@Test public void testDeleteNonExistingReview() throws Exception { var user = mockUserData(); var extVersion = mockExtension(); var extension = extVersion.getExtension(); Mockito.when(repositories.findExtension("bar", "foo")) .thenReturn(extension); Mockito.when(repositories.findActiveReviews(extension, user)) .thenReturn(Streamable.empty()); mockMvc.perform(post("/api/{namespace}/{extension}/review/delete", "foo", "bar") .with(user("test_user")) .with(csrf().asHeader())) .andExpect(status().isBadRequest()) .andExpect(content().json(errorJson("You have not submitted any review yet."))); }
public static DataflowRunner fromOptions(PipelineOptions options) { DataflowPipelineOptions dataflowOptions = PipelineOptionsValidator.validate(DataflowPipelineOptions.class, options); ArrayList<String> missing = new ArrayList<>(); if (dataflowOptions.getAppName() == null) { missing.add("appName"); } if (Strings.isNullOrEmpty(dataflowOptions.getRegion()) && isServiceEndpoint(dataflowOptions.getDataflowEndpoint())) { missing.add("region"); } if (missing.size() > 0) { throw new IllegalArgumentException( "Missing required pipeline options: " + Joiner.on(',').join(missing)); } validateWorkerSettings( PipelineOptionsValidator.validate(DataflowPipelineWorkerPoolOptions.class, options)); PathValidator validator = dataflowOptions.getPathValidator(); String gcpTempLocation; try { gcpTempLocation = dataflowOptions.getGcpTempLocation(); } catch (Exception e) { throw new IllegalArgumentException( "DataflowRunner requires gcpTempLocation, " + "but failed to retrieve a value from PipelineOptions", e); } validator.validateOutputFilePrefixSupported(gcpTempLocation); String stagingLocation; try { stagingLocation = dataflowOptions.getStagingLocation(); } catch (Exception e) { throw new IllegalArgumentException( "DataflowRunner requires stagingLocation, " + "but failed to retrieve a value from PipelineOptions", e); } validator.validateOutputFilePrefixSupported(stagingLocation); if (!isNullOrEmpty(dataflowOptions.getSaveProfilesToGcs())) { validator.validateOutputFilePrefixSupported(dataflowOptions.getSaveProfilesToGcs()); } if (dataflowOptions.getFilesToStage() != null) { // The user specifically requested these files, so fail now if they do not exist. // (automatically detected classpath elements are permitted to not exist, so later // staging will not fail on nonexistent files) dataflowOptions.getFilesToStage().stream() .forEach( stagedFileSpec -> { File localFile; if (stagedFileSpec.contains("=")) { String[] components = stagedFileSpec.split("=", 2); localFile = new File(components[1]); } else { localFile = new File(stagedFileSpec); } if (!localFile.exists()) { // should be FileNotFoundException, but for build-time backwards compatibility // cannot add checked exception throw new RuntimeException( String.format("Non-existent files specified in filesToStage: %s", localFile)); } }); } else { dataflowOptions.setFilesToStage( detectClassPathResourcesToStage(DataflowRunner.class.getClassLoader(), options)); if (dataflowOptions.getFilesToStage().isEmpty()) { throw new IllegalArgumentException("No files to stage has been found."); } else { LOG.info( "PipelineOptions.filesToStage was not specified. " + "Defaulting to files from the classpath: will stage {} files. " + "Enable logging at DEBUG level to see which files will be staged.", dataflowOptions.getFilesToStage().size()); LOG.debug("Classpath elements: {}", dataflowOptions.getFilesToStage()); } } // Verify jobName according to service requirements, truncating converting to lowercase if // necessary. String jobName = dataflowOptions.getJobName().toLowerCase(); checkArgument( jobName.matches("[a-z]([-a-z0-9]*[a-z0-9])?"), "JobName invalid; the name must consist of only the characters " + "[-a-z0-9], starting with a letter and ending with a letter " + "or number"); if (!jobName.equals(dataflowOptions.getJobName())) { LOG.info( "PipelineOptions.jobName did not match the service requirements. " + "Using {} instead of {}.", jobName, dataflowOptions.getJobName()); } dataflowOptions.setJobName(jobName); // Verify project String project = dataflowOptions.getProject(); if (project.matches("[0-9]*")) { throw new IllegalArgumentException( "Project ID '" + project + "' invalid. Please make sure you specified the Project ID, not project number."); } else if (!project.matches(PROJECT_ID_REGEXP)) { throw new IllegalArgumentException( "Project ID '" + project + "' invalid. Please make sure you specified the Project ID, not project" + " description."); } DataflowPipelineDebugOptions debugOptions = dataflowOptions.as(DataflowPipelineDebugOptions.class); // Verify the number of worker threads is a valid value if (debugOptions.getNumberOfWorkerHarnessThreads() < 0) { throw new IllegalArgumentException( "Number of worker harness threads '" + debugOptions.getNumberOfWorkerHarnessThreads() + "' invalid. Please make sure the value is non-negative."); } // Verify that if recordJfrOnGcThrashing is set, the pipeline is at least on java 11 if (dataflowOptions.getRecordJfrOnGcThrashing() && Environments.getJavaVersion() == Environments.JavaVersion.java8) { throw new IllegalArgumentException( "recordJfrOnGcThrashing is only supported on java 9 and up."); } if (dataflowOptions.isStreaming() && dataflowOptions.getGcsUploadBufferSizeBytes() == null) { dataflowOptions.setGcsUploadBufferSizeBytes(GCS_UPLOAD_BUFFER_SIZE_BYTES_DEFAULT); } // Adding the Java version to the SDK name for user's and support convenience. String agentJavaVer = "(JRE 8 environment)"; if (Environments.getJavaVersion() != Environments.JavaVersion.java8) { agentJavaVer = String.format("(JRE %s environment)", Environments.getJavaVersion().specification()); } DataflowRunnerInfo dataflowRunnerInfo = DataflowRunnerInfo.getDataflowRunnerInfo(); String userAgentName = dataflowRunnerInfo.getName(); Preconditions.checkArgument( !userAgentName.equals(""), "Dataflow runner's `name` property cannot be empty."); String userAgentVersion = dataflowRunnerInfo.getVersion(); Preconditions.checkArgument( !userAgentVersion.equals(""), "Dataflow runner's `version` property cannot be empty."); String userAgent = String.format("%s/%s%s", userAgentName, userAgentVersion, agentJavaVer).replace(" ", "_"); dataflowOptions.setUserAgent(userAgent); return new DataflowRunner(dataflowOptions); }
@Test public void testGcpTempAndNoTempLocationSucceeds() throws Exception { DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class); options.setRunner(DataflowRunner.class); options.setGcpCredential(new TestCredential()); options.setProject("foo-project"); options.setRegion(REGION_ID); options.setGcpTempLocation(VALID_TEMP_BUCKET); options.setGcsUtil(mockGcsUtil); DataflowRunner.fromOptions(options); }
@Override public void pushMsgToRuleEngine(TopicPartitionInfo tpi, UUID msgId, ToRuleEngineMsg msg, TbQueueCallback callback) { log.trace("PUSHING msg: {} to:{}", msg, tpi); producerProvider.getRuleEngineMsgProducer().send(tpi, new TbProtoQueueMsg<>(msgId, msg), callback); toRuleEngineMsgs.incrementAndGet(); }
@Test public void testPushMsgToRuleEngineWithTenantIdIsNotNullUuidUseQueueFromMsgIsTrue() { TbQueueProducer<TbProtoQueueMsg<TransportProtos.ToRuleEngineMsg>> tbREQueueProducer = mock(TbQueueProducer.class); TbQueueCallback callback = mock(TbQueueCallback.class); TenantId tenantId = TenantId.fromUUID(UUID.fromString("3c8bd350-1239-4a3b-b9c3-4dd76f8e20f1")); DeviceId deviceId = new DeviceId(UUID.fromString("adbb9d41-3367-40fd-9e74-7dd7cc5d30cf")); DeviceProfile deviceProfile = new DeviceProfile(new DeviceProfileId(UUID.fromString("552f5d6d-0b2b-43e1-a7d2-a51cb2a96927"))); TbMsg requestMsg = TbMsg.newMsg(DataConstants.HP_QUEUE_NAME, TbMsgType.REST_API_REQUEST, deviceId, TbMsgMetaData.EMPTY, TbMsg.EMPTY_JSON_OBJECT); when(deviceProfileCache.get(any(TenantId.class), any(DeviceId.class))).thenReturn(deviceProfile); when(producerProvider.getRuleEngineMsgProducer()).thenReturn(tbREQueueProducer); clusterService.pushMsgToRuleEngine(tenantId, deviceId, requestMsg, true, callback); verify(producerProvider).getRuleEngineMsgProducer(); verify(ruleEngineProducerService).sendToRuleEngine(tbREQueueProducer, tenantId, requestMsg, callback); }
@VisibleForTesting Map<String, Integer> getColumnMappings( JobHistoryDelegate.JobHistoryLogTab model ) { Map<String, Integer> map = new HashMap(); for ( ColumnInfo ci : model.logDisplayTableView.getColumns() ) { for ( int i = 0; i < model.logTableFields.size(); i++ ) { if ( ci.getValueMeta().getName().equals( model.logTableFields.get( i ).getFieldName() ) ) { map.put( model.logTableFields.get( i ).getFieldName(), i ); break; } } } return map; }
@Test public void getColumnMappings() { TableView view = mock( TableView.class ); doReturn( getColumnInfo() ).when( view ).getColumns(); JobHistoryDelegate.JobHistoryLogTab model = mock( JobHistoryDelegate.JobHistoryLogTab.class ); setInternalState( model, "logDisplayTableView", view ); setInternalState( model, "logTableFields", getLogTableFields() ); JobHistoryDelegate history = new JobHistoryDelegate( mock( Spoon.class ), mock( JobGraph.class ) ); Map<String, Integer> map = history.getColumnMappings( model ); assertEquals( 0, (int) map.get( "COLUMN_1" ) ); assertEquals( 1, (int) map.get( "COLUMN_2" ) ); assertEquals( 2, (int) map.get( "COLUMN_3" ) ); assertEquals( 4, (int) map.get( "COLUMN_5" ) ); assertEquals( 5, (int) map.get( "COLUMN_6" ) ); }
public FEELFnResult<String> invoke(@ParameterName("from") Object val) { if ( val == null ) { return FEELFnResult.ofResult( null ); } else { return FEELFnResult.ofResult( TypeUtil.formatValue(val, false) ); } }
@Test void invokePeriodZero() { FunctionTestUtil.assertResult(stringFunction.invoke(Period.ZERO), "P0M"); }
public boolean contains(double latitude, double longitude) { return this.minLatitude <= latitude && this.maxLatitude >= latitude && this.minLongitude <= longitude && this.maxLongitude >= longitude; }
@Test public void containsLatLongTest() { BoundingBox boundingBox = new BoundingBox(MIN_LATITUDE, MIN_LONGITUDE, MAX_LATITUDE, MAX_LONGITUDE); LatLong latLong1 = new LatLong(MIN_LATITUDE, MIN_LONGITUDE); LatLong latLong2 = new LatLong(MAX_LATITUDE, MAX_LONGITUDE); LatLong latLong3 = new LatLong(MIN_LONGITUDE, MIN_LONGITUDE); LatLong latLong4 = new LatLong(MAX_LATITUDE, MAX_LATITUDE); Assert.assertTrue(boundingBox.contains(latLong1)); Assert.assertTrue(boundingBox.contains(latLong2)); Assert.assertFalse(boundingBox.contains(latLong3)); Assert.assertFalse(boundingBox.contains(latLong4)); }
@Udf public <T> Map<String, T> union( @UdfParameter(description = "first map to union") final Map<String, T> map1, @UdfParameter(description = "second map to union") final Map<String, T> map2) { final List<Map<String, T>> nonNullInputs = Stream.of(map1, map2) .filter(Objects::nonNull) .collect(Collectors.toList()); if (nonNullInputs.size() == 0) { return null; } final Map<String, T> output = new HashMap<>(); nonNullInputs .forEach(output::putAll); return output; }
@Test public void shouldReturnEmptyMapFromEmptyMaps() { final Map<String, BigDecimal> input1 = Maps.newHashMap(); final Map<String, BigDecimal> input2 = Maps.newHashMap(); assertThat(udf.union(input1, input2), equalTo(Collections.EMPTY_MAP)); }
public static Combine.BinaryCombineDoubleFn ofDoubles() { return new SumDoubleFn(); }
@Test public void testSumDoubleFn() { testCombineFn(Sum.ofDoubles(), Lists.newArrayList(1.0, 2.0, 3.0, 4.0), 10.0); }
@Override public DescriptiveUrl toDownloadUrl(final Path file, final Sharee sharee, final Object options, final PasswordCallback callback) throws BackgroundException { final Host bookmark = session.getHost(); final StringBuilder request = new StringBuilder(String.format("https://%s%s/apps/files_sharing/api/v1/shares?path=%s&shareType=%d&shareWith=%s", bookmark.getHostname(), new NextcloudHomeFeature(bookmark).find(NextcloudHomeFeature.Context.ocs).getAbsolute(), URIEncoder.encode(PathRelativizer.relativize(NextcloudHomeFeature.Context.files.home(bookmark).find().getAbsolute(), file.getAbsolute())), Sharee.world.equals(sharee) ? SHARE_TYPE_PUBLIC_LINK : SHARE_TYPE_USER, Sharee.world.equals(sharee) ? StringUtils.EMPTY : sharee.getIdentifier() )); final Credentials password = callback.prompt(bookmark, LocaleFactory.localizedString("Passphrase", "Cryptomator"), MessageFormat.format(LocaleFactory.localizedString("Create a passphrase required to access {0}", "Credentials"), file.getName()), new LoginOptions().anonymous(true).keychain(false).icon(bookmark.getProtocol().disk())); if(password.isPasswordAuthentication()) { request.append(String.format("&password=%s", URIEncoder.encode(password.getPassword()))); } final HttpPost resource = new HttpPost(request.toString()); resource.setHeader("OCS-APIRequest", "true"); resource.setHeader(HttpHeaders.ACCEPT, ContentType.APPLICATION_XML.getMimeType()); try { return session.getClient().execute(resource, new OcsDownloadShareResponseHandler()); } catch(HttpResponseException e) { throw new DefaultHttpResponseExceptionMappingService().map(e); } catch(IOException e) { throw new DefaultIOExceptionMappingService().map(e); } }
@Test public void testToDownloadNotfound() throws Exception { final Path home = new NextcloudHomeFeature(session.getHost()).find(); final Path file = new Path(home, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)); assertThrows(NotfoundException.class, () -> new NextcloudShareFeature(session).toDownloadUrl(file, Share.Sharee.world, null, new DisabledPasswordCallback())); }
public <T> T parse(String input, Class<T> cls) { return readFlow(input, cls, type(cls)); }
@Test void inputsOld() { Flow flow = this.parse("flows/tests/inputs-old.yaml"); assertThat(flow.getInputs().size(), is(1)); assertThat(flow.getInputs().getFirst().getId(), is("myInput")); assertThat(flow.getInputs().getFirst().getType(), is(Type.STRING)); }
static int encodeLogHeader( final MutableDirectBuffer encodingBuffer, final int offset, final int captureLength, final int length) { return internalEncodeLogHeader(encodingBuffer, offset, captureLength, length, SystemNanoClock.INSTANCE); }
@Test void encodeLogHeader() { final int offset = 12; final int encodedLength = internalEncodeLogHeader(buffer, offset, 100, Integer.MAX_VALUE, () -> 555_000L); assertEquals(LOG_HEADER_LENGTH, encodedLength); assertEquals(100, buffer.getInt(offset, LITTLE_ENDIAN)); assertEquals(Integer.MAX_VALUE, buffer.getInt(offset + SIZE_OF_INT, LITTLE_ENDIAN)); assertEquals(555_000L, buffer.getLong(offset + SIZE_OF_INT * 2, LITTLE_ENDIAN)); }
public static <T extends DataflowWorkerHarnessOptions> T createFromSystemProperties( Class<T> harnessOptionsClass) throws IOException { ObjectMapper objectMapper = new ObjectMapper(); T options; if (System.getProperties().containsKey("sdk_pipeline_options")) { // TODO: remove this method of getting pipeline options, once migration is complete. String serializedOptions = System.getProperty("sdk_pipeline_options"); LOG.info("Worker harness starting with: {}", serializedOptions); options = objectMapper.readValue(serializedOptions, PipelineOptions.class).as(harnessOptionsClass); } else if (System.getProperties().containsKey("sdk_pipeline_options_file")) { String filePath = System.getProperty("sdk_pipeline_options_file"); LOG.info("Loading pipeline options from " + filePath); String serializedOptions = new String(Files.readAllBytes(Paths.get(filePath)), StandardCharsets.UTF_8); LOG.info("Worker harness starting with: " + serializedOptions); options = objectMapper.readValue(serializedOptions, PipelineOptions.class).as(harnessOptionsClass); } else { LOG.info("Using empty PipelineOptions, as none were provided."); options = PipelineOptionsFactory.as(harnessOptionsClass); } // These values will not be known at job submission time and must be provided. if (System.getProperties().containsKey("worker_id")) { options.setWorkerId(System.getProperty("worker_id")); } if (System.getProperties().containsKey("job_id")) { options.setJobId(System.getProperty("job_id")); } if (System.getProperties().containsKey("worker_pool")) { options.setWorkerPool(System.getProperty("worker_pool")); } // Remove impersonate information from workers // More details: // https://cloud.google.com/dataflow/docs/reference/pipeline-options#security_and_networking if (options.getImpersonateServiceAccount() != null) { LOG.info( "Remove the impersonateServiceAccount pipeline option ({}) when starting the Worker harness.", options.getImpersonateServiceAccount()); options.setImpersonateServiceAccount(null); } return options; }
@Test public void testCreationFromSystemProperties() throws Exception { System.getProperties() .putAll( ImmutableMap.<String, String>builder() .put("worker_id", "test_worker_id") .put("job_id", "test_job_id") // Set a non-default value for testing .put("sdk_pipeline_options", "{\"options\":{\"numWorkers\":999}}") .build()); @SuppressWarnings("deprecation") // testing deprecated functionality DataflowWorkerHarnessOptions options = WorkerPipelineOptionsFactory.createFromSystemProperties(DataflowWorkerHarnessOptions.class); assertEquals("test_worker_id", options.getWorkerId()); assertEquals("test_job_id", options.getJobId()); assertEquals(999, options.getNumWorkers()); }
public static List<Container> listOrNull(Container container) { return container != null ? List.of(container) : null; }
@Test public void testListOrNull() { assertThat(ContainerUtils.listOrNull(null), is(nullValue())); assertThat(ContainerUtils.listOrNull(new ContainerBuilder().withName("my-container").build()).size(), is(1)); }
@Override public Iterator<QueryableEntry> iterator() { return new It(); }
@Test public void testIterator_empty() { assertThat(result.iterator().hasNext()).isFalse(); }
@Override public Object[] toArray() { Object[] result = new Object[size()]; Iterator<T> iter = iterator(); int i = 0; while (iter.hasNext()) { result[i++] = iter.next(); } return result; }
@Test public void testToArray() { SnapshotRegistry registry = new SnapshotRegistry(new LogContext()); TimelineHashSet<String> set = new TimelineHashSet<>(registry, 1); set.add("z"); assertArrayEquals(new String[] {"z"}, set.toArray()); assertArrayEquals(new String[] {"z", null}, set.toArray(new String[2])); assertArrayEquals(new String[] {"z"}, set.toArray(new String[0])); }
public void succeededMoveApplicationAcrossQueuesRetrieved(long duration) { totalSucceededMoveApplicationAcrossQueuesRetrieved.add(duration); moveApplicationAcrossQueuesLatency.add(duration); }
@Test public void testSucceededMoveApplicationAcrossQueuesRetrieved() { long totalGoodBefore = metrics.getNumSucceededMoveApplicationAcrossQueuesRetrieved(); goodSubCluster.moveApplicationAcrossQueuesRetrieved(150); Assert.assertEquals(totalGoodBefore + 1, metrics.getNumSucceededMoveApplicationAcrossQueuesRetrieved()); Assert.assertEquals(150, metrics.getLatencySucceededMoveApplicationAcrossQueuesRetrieved(), ASSERT_DOUBLE_DELTA); goodSubCluster.moveApplicationAcrossQueuesRetrieved(300); Assert.assertEquals(totalGoodBefore + 2, metrics.getNumSucceededMoveApplicationAcrossQueuesRetrieved()); Assert.assertEquals(225, metrics.getLatencySucceededMoveApplicationAcrossQueuesRetrieved(), ASSERT_DOUBLE_DELTA); }
static Serde<List<?>> createSerde(final PersistenceSchema schema) { final List<SimpleColumn> columns = schema.columns(); if (columns.isEmpty()) { // No columns: return new KsqlVoidSerde<>(); } if (columns.size() != 1) { throw new KsqlException("The '" + FormatFactory.KAFKA.name() + "' format only supports a single field. Got: " + columns); } final SimpleColumn singleColumn = columns.get(0); final Class<?> javaType = SchemaConverters.sqlToJavaConverter() .toJavaType(singleColumn.type()); return createSerde(singleColumn, javaType); }
@Test public void shouldThroIfMultipleFields() { // Given: final PersistenceSchema schema = getPersistenceSchema(LogicalSchema.builder() .keyColumn(SystemColumns.ROWKEY_NAME, SqlTypes.STRING) .valueColumn(ColumnName.of("f0"), SqlTypes.INTEGER) .valueColumn(ColumnName.of("f1"), SqlTypes.BIGINT) .build()); // When: final Exception e = assertThrows( KsqlException.class, () -> KafkaSerdeFactory.createSerde(schema) ); // Then: assertThat(e.getMessage(), containsString( "The 'KAFKA' format only supports a single field. Got: [`f0` INTEGER, `f1` BIGINT]")); }
public static boolean checkNoError() { if (findLevel(Level.ERROR) == 0) { return true; } else { return false; } }
@Test void testCheckNoError() { Log log = mock(Log.class); DubboAppender.logList.add(log); when(log.getLogLevel()).thenReturn(Level.ERROR); assertThat(LogUtil.checkNoError(), is(false)); when(log.getLogLevel()).thenReturn(Level.INFO); assertThat(LogUtil.checkNoError(), is(true)); }
public JibContainer runBuild() throws BuildStepsExecutionException, IOException, CacheDirectoryCreationException { try { logger.accept(LogEvent.lifecycle("")); logger.accept(LogEvent.lifecycle(startupMessage)); JibContainer jibContainer = jibContainerBuilder.containerize(containerizer); logger.accept(LogEvent.lifecycle("")); logger.accept(LogEvent.lifecycle(successMessage)); // when an image is built, write out the digest and id if (imageDigestOutputPath != null) { String imageDigest = jibContainer.getDigest().toString(); Files.write(imageDigestOutputPath, imageDigest.getBytes(StandardCharsets.UTF_8)); } if (imageIdOutputPath != null) { String imageId = jibContainer.getImageId().toString(); Files.write(imageIdOutputPath, imageId.getBytes(StandardCharsets.UTF_8)); } if (imageJsonOutputPath != null) { ImageMetadataOutput metadataOutput = ImageMetadataOutput.fromJibContainer(jibContainer); String imageJson = metadataOutput.toJson(); Files.write(imageJsonOutputPath, imageJson.getBytes(StandardCharsets.UTF_8)); } return jibContainer; } catch (HttpHostConnectException ex) { // Failed to connect to registry. throw new BuildStepsExecutionException(helpfulSuggestions.forHttpHostConnect(), ex); } catch (RegistryUnauthorizedException ex) { handleRegistryUnauthorizedException(ex, helpfulSuggestions); } catch (RegistryCredentialsNotSentException ex) { throw new BuildStepsExecutionException(helpfulSuggestions.forCredentialsNotSent(), ex); } catch (RegistryAuthenticationFailedException ex) { if (ex.getCause() instanceof ResponseException) { handleRegistryUnauthorizedException( new RegistryUnauthorizedException( ex.getServerUrl(), ex.getImageName(), (ResponseException) ex.getCause()), helpfulSuggestions); } else { // Unknown cause throw new BuildStepsExecutionException(helpfulSuggestions.none(), ex); } } catch (UnknownHostException ex) { throw new BuildStepsExecutionException(helpfulSuggestions.forUnknownHost(), ex); } catch (InsecureRegistryException ex) { throw new BuildStepsExecutionException(helpfulSuggestions.forInsecureRegistry(), ex); } catch (RegistryException ex) { String message = Verify.verifyNotNull(ex.getMessage()); // keep null-away happy throw new BuildStepsExecutionException(message, ex); } catch (ExecutionException ex) { String message = ex.getCause().getMessage(); throw new BuildStepsExecutionException( message == null ? "(null exception message)" : message, ex.getCause()); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); throw new BuildStepsExecutionException(helpfulSuggestions.none(), ex); } throw new IllegalStateException("unreachable"); }
@Test public void testBuildImage_other() throws InterruptedException, IOException, CacheDirectoryCreationException, RegistryException, ExecutionException { Mockito.doThrow(new RegistryException("messagePrefix")) .when(mockJibContainerBuilder) .containerize(mockContainerizer); try { testJibBuildRunner.runBuild(); Assert.fail(); } catch (BuildStepsExecutionException ex) { Assert.assertEquals(TEST_HELPFUL_SUGGESTIONS.none(), ex.getMessage()); } }
@Override public boolean isInputConsumable( SchedulingExecutionVertex executionVertex, Set<ExecutionVertexID> verticesToDeploy, Map<ConsumedPartitionGroup, Boolean> consumableStatusCache) { for (ConsumedPartitionGroup consumedPartitionGroup : executionVertex.getConsumedPartitionGroups()) { if (!consumableStatusCache.computeIfAbsent( consumedPartitionGroup, this::isConsumableBasedOnFinishedProducers)) { return false; } } return true; }
@Test void testNotFinishedBlockingInput() { final TestingSchedulingTopology topology = new TestingSchedulingTopology(); final List<TestingSchedulingExecutionVertex> producers = topology.addExecutionVertices().withParallelism(2).finish(); final List<TestingSchedulingExecutionVertex> consumer = topology.addExecutionVertices().withParallelism(2).finish(); topology.connectAllToAll(producers, consumer) .withResultPartitionState(ResultPartitionState.CREATED) .withResultPartitionType(ResultPartitionType.BLOCKING) .finish(); PartialFinishedInputConsumableDecider inputConsumableDecider = createPartialFinishedInputConsumableDecider(); assertThat( inputConsumableDecider.isInputConsumable( consumer.get(0), Collections.emptySet(), new HashMap<>())) .isFalse(); assertThat( inputConsumableDecider.isInputConsumable( consumer.get(1), Collections.emptySet(), new HashMap<>())) .isFalse(); }
@Override public <VAgg> KTable<K, VAgg> aggregate(final Initializer<VAgg> initializer, final Aggregator<? super K, ? super V, VAgg> adder, final Aggregator<? super K, ? super V, VAgg> subtractor, final Materialized<K, VAgg, KeyValueStore<Bytes, byte[]>> materialized) { return aggregate(initializer, adder, subtractor, NamedInternal.empty(), materialized); }
@SuppressWarnings("unchecked") @Test public void shouldThrowNullPointerOnAggregateWhenMaterializedIsNull() { assertThrows(NullPointerException.class, () -> groupedTable.aggregate( MockInitializer.STRING_INIT, MockAggregator.TOSTRING_ADDER, MockAggregator.TOSTRING_REMOVER, (Materialized) null)); }
public void decode(ByteBuf buffer) { boolean last; int statusCode; while (true) { switch(state) { case READ_COMMON_HEADER: if (buffer.readableBytes() < SPDY_HEADER_SIZE) { return; } int frameOffset = buffer.readerIndex(); int flagsOffset = frameOffset + SPDY_HEADER_FLAGS_OFFSET; int lengthOffset = frameOffset + SPDY_HEADER_LENGTH_OFFSET; buffer.skipBytes(SPDY_HEADER_SIZE); boolean control = (buffer.getByte(frameOffset) & 0x80) != 0; int version; int type; if (control) { // Decode control frame common header version = getUnsignedShort(buffer, frameOffset) & 0x7FFF; type = getUnsignedShort(buffer, frameOffset + SPDY_HEADER_TYPE_OFFSET); streamId = 0; // Default to session Stream-ID } else { // Decode data frame common header version = spdyVersion; // Default to expected version type = SPDY_DATA_FRAME; streamId = getUnsignedInt(buffer, frameOffset); } flags = buffer.getByte(flagsOffset); length = getUnsignedMedium(buffer, lengthOffset); // Check version first then validity if (version != spdyVersion) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SPDY Version"); } else if (!isValidFrameHeader(streamId, type, flags, length)) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid Frame Error"); } else { state = getNextState(type, length); } break; case READ_DATA_FRAME: if (length == 0) { state = State.READ_COMMON_HEADER; delegate.readDataFrame(streamId, hasFlag(flags, SPDY_DATA_FLAG_FIN), Unpooled.buffer(0)); break; } // Generate data frames that do not exceed maxChunkSize int dataLength = Math.min(maxChunkSize, length); // Wait until entire frame is readable if (buffer.readableBytes() < dataLength) { return; } ByteBuf data = buffer.alloc().buffer(dataLength); data.writeBytes(buffer, dataLength); length -= dataLength; if (length == 0) { state = State.READ_COMMON_HEADER; } last = length == 0 && hasFlag(flags, SPDY_DATA_FLAG_FIN); delegate.readDataFrame(streamId, last, data); break; case READ_SYN_STREAM_FRAME: if (buffer.readableBytes() < 10) { return; } int offset = buffer.readerIndex(); streamId = getUnsignedInt(buffer, offset); int associatedToStreamId = getUnsignedInt(buffer, offset + 4); byte priority = (byte) (buffer.getByte(offset + 8) >> 5 & 0x07); last = hasFlag(flags, SPDY_FLAG_FIN); boolean unidirectional = hasFlag(flags, SPDY_FLAG_UNIDIRECTIONAL); buffer.skipBytes(10); length -= 10; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SYN_STREAM Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readSynStreamFrame(streamId, associatedToStreamId, priority, last, unidirectional); } break; case READ_SYN_REPLY_FRAME: if (buffer.readableBytes() < 4) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); last = hasFlag(flags, SPDY_FLAG_FIN); buffer.skipBytes(4); length -= 4; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SYN_REPLY Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readSynReplyFrame(streamId, last); } break; case READ_RST_STREAM_FRAME: if (buffer.readableBytes() < 8) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); statusCode = getSignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); if (streamId == 0 || statusCode == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid RST_STREAM Frame"); } else { state = State.READ_COMMON_HEADER; delegate.readRstStreamFrame(streamId, statusCode); } break; case READ_SETTINGS_FRAME: if (buffer.readableBytes() < 4) { return; } boolean clear = hasFlag(flags, SPDY_SETTINGS_CLEAR); numSettings = getUnsignedInt(buffer, buffer.readerIndex()); buffer.skipBytes(4); length -= 4; // Validate frame length against number of entries. Each ID/Value entry is 8 bytes. if ((length & 0x07) != 0 || length >> 3 != numSettings) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid SETTINGS Frame"); } else { state = State.READ_SETTING; delegate.readSettingsFrame(clear); } break; case READ_SETTING: if (numSettings == 0) { state = State.READ_COMMON_HEADER; delegate.readSettingsEnd(); break; } if (buffer.readableBytes() < 8) { return; } byte settingsFlags = buffer.getByte(buffer.readerIndex()); int id = getUnsignedMedium(buffer, buffer.readerIndex() + 1); int value = getSignedInt(buffer, buffer.readerIndex() + 4); boolean persistValue = hasFlag(settingsFlags, SPDY_SETTINGS_PERSIST_VALUE); boolean persisted = hasFlag(settingsFlags, SPDY_SETTINGS_PERSISTED); buffer.skipBytes(8); --numSettings; delegate.readSetting(id, value, persistValue, persisted); break; case READ_PING_FRAME: if (buffer.readableBytes() < 4) { return; } int pingId = getSignedInt(buffer, buffer.readerIndex()); buffer.skipBytes(4); state = State.READ_COMMON_HEADER; delegate.readPingFrame(pingId); break; case READ_GOAWAY_FRAME: if (buffer.readableBytes() < 8) { return; } int lastGoodStreamId = getUnsignedInt(buffer, buffer.readerIndex()); statusCode = getSignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); state = State.READ_COMMON_HEADER; delegate.readGoAwayFrame(lastGoodStreamId, statusCode); break; case READ_HEADERS_FRAME: if (buffer.readableBytes() < 4) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); last = hasFlag(flags, SPDY_FLAG_FIN); buffer.skipBytes(4); length -= 4; if (streamId == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid HEADERS Frame"); } else { state = State.READ_HEADER_BLOCK; delegate.readHeadersFrame(streamId, last); } break; case READ_WINDOW_UPDATE_FRAME: if (buffer.readableBytes() < 8) { return; } streamId = getUnsignedInt(buffer, buffer.readerIndex()); int deltaWindowSize = getUnsignedInt(buffer, buffer.readerIndex() + 4); buffer.skipBytes(8); if (deltaWindowSize == 0) { state = State.FRAME_ERROR; delegate.readFrameError("Invalid WINDOW_UPDATE Frame"); } else { state = State.READ_COMMON_HEADER; delegate.readWindowUpdateFrame(streamId, deltaWindowSize); } break; case READ_HEADER_BLOCK: if (length == 0) { state = State.READ_COMMON_HEADER; delegate.readHeaderBlockEnd(); break; } if (!buffer.isReadable()) { return; } int compressedBytes = Math.min(buffer.readableBytes(), length); ByteBuf headerBlock = buffer.alloc().buffer(compressedBytes); headerBlock.writeBytes(buffer, compressedBytes); length -= compressedBytes; delegate.readHeaderBlock(headerBlock); break; case DISCARD_FRAME: int numBytes = Math.min(buffer.readableBytes(), length); buffer.skipBytes(numBytes); length -= numBytes; if (length == 0) { state = State.READ_COMMON_HEADER; break; } return; case FRAME_ERROR: buffer.skipBytes(buffer.readableBytes()); return; default: throw new Error("Shouldn't reach here."); } } }
@Test public void testSpdySettingsFrame() throws Exception { short type = 4; byte flags = 0; int numSettings = 2; int length = 8 * numSettings + 4; byte idFlags = 0; int id = RANDOM.nextInt() & 0x00FFFFFF; int value = RANDOM.nextInt(); ByteBuf buf = Unpooled.buffer(SPDY_HEADER_SIZE + length); encodeControlFrameHeader(buf, type, flags, length); buf.writeInt(numSettings); for (int i = 0; i < numSettings; i++) { buf.writeByte(idFlags); buf.writeMedium(id); buf.writeInt(value); } delegate.readSettingsEnd(); decoder.decode(buf); verify(delegate).readSettingsFrame(false); verify(delegate, times(numSettings)).readSetting(id, value, false, false); assertFalse(buf.isReadable()); buf.release(); }
Plugin create(Options.Plugin plugin) { try { return instantiate(plugin.pluginString(), plugin.pluginClass(), plugin.argument()); } catch (IOException | URISyntaxException e) { throw new CucumberException(e); } }
@Test void instantiates_junit_plugin_with_file_arg() { PluginOption option = parse("junit:" + tmp.resolve("cucumber.xml")); plugin = fc.create(option); assertThat(plugin.getClass(), is(equalTo(JUnitFormatter.class))); }
@Override public ChannelFuture writeData(final ChannelHandlerContext ctx, final int streamId, ByteBuf data, int padding, final boolean endOfStream, ChannelPromise promise) { promise = promise.unvoid(); final Http2Stream stream; try { stream = requireStream(streamId); // Verify that the stream is in the appropriate state for sending DATA frames. switch (stream.state()) { case OPEN: case HALF_CLOSED_REMOTE: // Allowed sending DATA frames in these states. break; default: throw new IllegalStateException("Stream " + stream.id() + " in unexpected state " + stream.state()); } } catch (Throwable e) { data.release(); return promise.setFailure(e); } // Hand control of the frame to the flow controller. flowController().addFlowControlled(stream, new FlowControlledData(stream, data, padding, endOfStream, promise)); return promise; }
@Test public void dataWriteToHalfClosedLocalStreamShouldFail() throws Exception { createStream(STREAM_ID, true); ByteBuf data = mock(ByteBuf.class); ChannelPromise promise = newPromise(); encoder.writeData(ctx, STREAM_ID, data, 0, false, promise); assertTrue(promise.isDone()); assertFalse(promise.isSuccess()); assertThat(promise.cause(), instanceOf(IllegalStateException.class)); verify(data).release(); }
@Override public Config build() { return build(new Config()); }
@Override @Test public void testConfigurationWithFileName() throws Exception { File file = createTempFile("foo", "bar"); file.deleteOnExit(); String yaml = """ hazelcast: map: my-map: backup-count: 1"""; Writer writer = new PrintWriter(file, StandardCharsets.UTF_8); writer.write(yaml); writer.close(); String path = file.getAbsolutePath(); Config config = new YamlConfigBuilder(path).build(); assertEquals(path, config.getConfigurationFile().getAbsolutePath()); assertNull(config.getConfigurationUrl()); }
@Override public Set<Stream> loadByIds(Collection<String> streamIds) { final Set<ObjectId> objectIds = streamIds.stream() .map(ObjectId::new) .collect(Collectors.toSet()); final DBObject query = QueryBuilder.start(StreamImpl.FIELD_ID).in(objectIds).get(); return ImmutableSet.copyOf(loadAll(query)); }
@Test @MongoDBFixtures("someStreamsWithAlertConditions.json") public void loadByIds() { assertThat(this.streamService.loadByIds(ImmutableSet.of("565f02223b0c25a537197af2"))).hasSize(1); assertThat(this.streamService.loadByIds(ImmutableSet.of("565f02223b0c25a5deadbeef"))).isEmpty(); assertThat(this.streamService.loadByIds(ImmutableSet.of("565f02223b0c25a537197af2", "565f02223b0c25a5deadbeef"))).hasSize(1); }
public static GUI fromJson(String json) { GUI gui = gson.fromJson(json, GUI.class); gui.convertOldInput(); return gui; }
@Test void testOldGson_3() throws IOException { String oldJson = "{\n" + " \"params\": {\n" + " \"maxAge\": \"35\"\n" + " },\n" + " \"forms\": {\n" + " \"maxAge\": {\n" + " \"name\": \"maxAge\",\n" + " \"defaultValue\": \"30\",\n" + " \"hidden\": false\n" + " }\n" + " }\n" + " }"; GUI gui = GUI.fromJson(oldJson); assertEquals(1, gui.forms.size()); assertTrue(gui.forms.get("maxAge") instanceof TextBox); assertEquals("30", gui.forms.get("maxAge").getDefaultValue()); oldJson = "{\n" + " \"params\": {\n" + " \"marital\": \"single\"\n" + " },\n" + " \"forms\": {\n" + " \"marital\": {\n" + " \"name\": \"marital\",\n" + " \"defaultValue\": \"single\",\n" + " \"options\": [\n" + " {\n" + " \"value\": \"single\"\n" + " },\n" + " {\n" + " \"value\": \"divorced\"\n" + " },\n" + " {\n" + " \"value\": \"married\"\n" + " }\n" + " ],\n" + " \"hidden\": false\n" + " }\n" + " }\n" + " }"; gui = GUI.fromJson(oldJson); assertEquals(1, gui.forms.size()); assertTrue(gui.forms.get("marital") instanceof Select); assertEquals("single", gui.forms.get("marital").getDefaultValue()); }
public static boolean isChar(Object value) { //noinspection ConstantConditions return value instanceof Character || value.getClass() == char.class; }
@Test public void isCharTest(){ final char a = 'a'; assertTrue(CharUtil.isChar(a)); }