code
stringlengths 73
34.1k
| label
stringclasses 1
value |
|---|---|
public void querySchema() {
ResultSet results =
session.execute(
"SELECT * FROM simplex.playlists "
+ "WHERE id = 2cc9ccb7-6221-4ccb-8387-f22b6a1b354d;");
System.out.printf("%-30s\t%-20s\t%-20s%n", "title", "album", "artist");
System.out.println(
"-------------------------------+-----------------------+--------------------");
for (Row row : results) {
System.out.printf(
"%-30s\t%-20s\t%-20s%n",
row.getString("title"), row.getString("album"), row.getString("artist"));
}
}
|
java
|
public static Class<?> loadClass(ClassLoader classLoader, String className) {
try {
// If input classLoader is null, use current thread's ClassLoader, if that is null, use
// default (calling class') ClassLoader.
ClassLoader cl =
classLoader != null ? classLoader : Thread.currentThread().getContextClassLoader();
if (cl != null) {
return Class.forName(className, true, cl);
} else {
return Class.forName(className);
}
} catch (ClassNotFoundException e) {
return null;
}
}
|
java
|
public static <ComponentT> Optional<ComponentT> buildFromConfig(
InternalDriverContext context,
DriverOption classNameOption,
Class<ComponentT> expectedSuperType,
String... defaultPackages) {
return buildFromConfig(context, null, classNameOption, expectedSuperType, defaultPackages);
}
|
java
|
public static <ComponentT> Map<String, ComponentT> buildFromConfigProfiles(
InternalDriverContext context,
DriverOption rootOption,
Class<ComponentT> expectedSuperType,
String... defaultPackages) {
// Find out how many distinct configurations we have
ListMultimap<Object, String> profilesByConfig =
MultimapBuilder.hashKeys().arrayListValues().build();
for (DriverExecutionProfile profile : context.getConfig().getProfiles().values()) {
profilesByConfig.put(profile.getComparisonKey(rootOption), profile.getName());
}
// Instantiate each distinct configuration, and associate it with the corresponding profiles
ImmutableMap.Builder<String, ComponentT> result = ImmutableMap.builder();
for (Collection<String> profiles : profilesByConfig.asMap().values()) {
// Since all profiles use the same config, we can use any of them
String profileName = profiles.iterator().next();
ComponentT policy =
buildFromConfig(
context, profileName, classOption(rootOption), expectedSuperType, defaultPackages)
.orElseThrow(
() ->
new IllegalArgumentException(
String.format(
"Missing configuration for %s in profile %s",
rootOption.getPath(), profileName)));
for (String profile : profiles) {
result.put(profile, policy);
}
}
return result.build();
}
|
java
|
public Map<CqlIdentifier, UserDefinedType> parse(
Collection<AdminRow> typeRows, CqlIdentifier keyspaceId) {
if (typeRows.isEmpty()) {
return Collections.emptyMap();
} else {
Map<CqlIdentifier, UserDefinedType> types = new LinkedHashMap<>();
for (AdminRow row : topologicalSort(typeRows, keyspaceId)) {
UserDefinedType type = parseType(row, keyspaceId, types);
types.put(type.getName(), type);
}
return ImmutableMap.copyOf(types);
}
}
|
java
|
public CompletionStage<Void> init(
boolean listenToClusterEvents,
boolean reconnectOnFailure,
boolean useInitialReconnectionSchedule) {
RunOrSchedule.on(
adminExecutor,
() ->
singleThreaded.init(
listenToClusterEvents, reconnectOnFailure, useInitialReconnectionSchedule));
return singleThreaded.initFuture;
}
|
java
|
public CompletionStage<Void> setKeyspace(CqlIdentifier newKeyspaceName) {
return RunOrSchedule.on(adminExecutor, () -> singleThreaded.setKeyspace(newKeyspaceName));
}
|
java
|
public List<String> getPreReleaseLabels() {
return preReleases == null ? null : Collections.unmodifiableList(Arrays.asList(preReleases));
}
|
java
|
private void processNodeStateEvent(NodeStateEvent event) {
switch (stateRef.get()) {
case BEFORE_INIT:
case DURING_INIT:
throw new AssertionError("Filter should not be marked ready until LBP init");
case CLOSING:
return; // ignore
case RUNNING:
for (LoadBalancingPolicy policy : policies) {
if (event.newState == NodeState.UP) {
policy.onUp(event.node);
} else if (event.newState == NodeState.DOWN || event.newState == NodeState.FORCED_DOWN) {
policy.onDown(event.node);
} else if (event.newState == NodeState.UNKNOWN) {
policy.onAdd(event.node);
} else if (event.newState == null) {
policy.onRemove(event.node);
} else {
LOG.warn("[{}] Unsupported event: {}", logPrefix, event);
}
}
break;
}
}
|
java
|
@VisibleForTesting
static String reverse(InetAddress address) {
byte[] bytes = address.getAddress();
if (bytes.length == 4) return reverseIpv4(bytes);
else return reverseIpv6(bytes);
}
|
java
|
public int firstIndexOf(CqlIdentifier id) {
Integer index = byId.get(id);
return (index == null) ? -1 : index;
}
|
java
|
private static void insertWithCoreApi(CqlSession session) {
// Bind in a simple statement:
session.execute(
SimpleStatement.newInstance(
"INSERT INTO examples.querybuilder_json JSON ?",
"{ \"id\": 1, \"name\": \"Mouse\", \"specs\": { \"color\": \"silver\" } }"));
// Bind in a prepared statement:
// (subsequent calls to the prepare() method will return cached statement)
PreparedStatement pst = session.prepare("INSERT INTO examples.querybuilder_json JSON :payload");
session.execute(
pst.bind()
.setString(
"payload",
"{ \"id\": 2, \"name\": \"Keyboard\", \"specs\": { \"layout\": \"qwerty\" } }"));
// fromJson lets you provide individual columns as JSON:
session.execute(
SimpleStatement.newInstance(
"INSERT INTO examples.querybuilder_json "
+ "(id, name, specs) VALUES (?, ?, fromJson(?))",
3,
"Screen",
"{ \"size\": \"24-inch\" }"));
}
|
java
|
private static void selectWithCoreApi(CqlSession session) {
// Reading the whole row as a JSON object:
Row row =
session
.execute(
SimpleStatement.newInstance(
"SELECT JSON * FROM examples.querybuilder_json WHERE id = ?", 1))
.one();
assert row != null;
System.out.printf("Entry #1 as JSON: %s%n", row.getString("[json]"));
// Extracting a particular column as JSON:
row =
session
.execute(
SimpleStatement.newInstance(
"SELECT id, toJson(specs) AS json_specs FROM examples.querybuilder_json WHERE id = ?",
2))
.one();
assert row != null;
System.out.printf(
"Entry #%d's specs as JSON: %s%n", row.getInt("id"), row.getString("json_specs"));
}
|
java
|
public UserDefinedTypeBuilder withField(CqlIdentifier name, DataType type) {
fieldNames.add(name);
fieldTypes.add(type);
return this;
}
|
java
|
public DefaultMetadata withNodes(
Map<UUID, Node> newNodes,
boolean tokenMapEnabled,
boolean tokensChanged,
TokenFactory tokenFactory,
InternalDriverContext context) {
// Force a rebuild if at least one node has different tokens, or there are new or removed nodes.
boolean forceFullRebuild = tokensChanged || !newNodes.equals(nodes);
return new DefaultMetadata(
ImmutableMap.copyOf(newNodes),
this.keyspaces,
rebuildTokenMap(
newNodes, keyspaces, tokenMapEnabled, forceFullRebuild, tokenFactory, context));
}
|
java
|
private void sendRequest(
Node retriedNode,
Queue<Node> queryPlan,
int currentExecutionIndex,
int retryCount,
boolean scheduleNextExecution) {
if (result.isDone()) {
return;
}
Node node = retriedNode;
DriverChannel channel = null;
if (node == null || (channel = session.getChannel(node, logPrefix)) == null) {
while (!result.isDone() && (node = queryPlan.poll()) != null) {
channel = session.getChannel(node, logPrefix);
if (channel != null) {
break;
}
}
}
if (channel == null) {
// We've reached the end of the query plan without finding any node to write to
if (!result.isDone() && activeExecutionsCount.decrementAndGet() == 0) {
// We're the last execution so fail the result
setFinalError(AllNodesFailedException.fromErrors(this.errors), null, -1);
}
} else {
NodeResponseCallback nodeResponseCallback =
new NodeResponseCallback(
node,
queryPlan,
channel,
currentExecutionIndex,
retryCount,
scheduleNextExecution,
logPrefix);
channel
.write(message, statement.isTracing(), statement.getCustomPayload(), nodeResponseCallback)
.addListener(nodeResponseCallback);
}
}
|
java
|
private static void selectJsonRow(CqlSession session) {
// Reading the whole row as a JSON object
Statement stmt =
selectFrom("examples", "json_jsr353_row")
.json()
.all()
.whereColumn("id")
.in(literal(1), literal(2))
.build();
ResultSet rows = session.execute(stmt);
for (Row row : rows) {
// SELECT JSON returns only one column for each row, of type VARCHAR,
// containing the row as a JSON payload.
// Note that the codec requires that the type passed to the get() method
// be always JsonStructure, and not a subclass of it, such as JsonObject,
// hence the need to downcast to JsonObject manually
JsonObject user = (JsonObject) row.get(0, JsonStructure.class);
System.out.printf("Retrieved user: %s%n", user);
}
}
|
java
|
private static ByteBuffer readAll(File file) throws IOException {
try (FileInputStream inputStream = new FileInputStream(file)) {
FileChannel channel = inputStream.getChannel();
ByteBuffer buffer = ByteBuffer.allocate((int) channel.size());
channel.read(buffer);
buffer.flip();
return buffer;
}
}
|
java
|
public static int minimumRequestSize(Request request) {
// Header and payload are common inside a Frame at the protocol level
// Frame header has a fixed size of 9 for protocol version >= V3, which includes Frame flags
// size
int size = FrameCodec.headerEncodedSize();
if (!request.getCustomPayload().isEmpty()) {
// Custom payload is not supported in v3, but assume user won't have a custom payload set if
// they use this version
size += PrimitiveSizes.sizeOfBytesMap(request.getCustomPayload());
}
return size;
}
|
java
|
public static int sizeOfSimpleStatementValues(
SimpleStatement simpleStatement,
ProtocolVersion protocolVersion,
CodecRegistry codecRegistry) {
int size = 0;
if (!simpleStatement.getPositionalValues().isEmpty()) {
List<ByteBuffer> positionalValues =
new ArrayList<>(simpleStatement.getPositionalValues().size());
for (Object value : simpleStatement.getPositionalValues()) {
positionalValues.add(Conversions.encode(value, codecRegistry, protocolVersion));
}
size += Values.sizeOfPositionalValues(positionalValues);
} else if (!simpleStatement.getNamedValues().isEmpty()) {
Map<String, ByteBuffer> namedValues = new HashMap<>(simpleStatement.getNamedValues().size());
for (Map.Entry<CqlIdentifier, Object> value : simpleStatement.getNamedValues().entrySet()) {
namedValues.put(
value.getKey().asInternal(),
Conversions.encode(value.getValue(), codecRegistry, protocolVersion));
}
size += Values.sizeOfNamedValues(namedValues);
}
return size;
}
|
java
|
public static Integer sizeOfInnerBatchStatementInBytes(
BatchableStatement statement, ProtocolVersion protocolVersion, CodecRegistry codecRegistry) {
int size = 0;
size +=
PrimitiveSizes
.BYTE; // for each inner statement, there is one byte for the "kind": prepared or string
if (statement instanceof SimpleStatement) {
size += PrimitiveSizes.sizeOfLongString(((SimpleStatement) statement).getQuery());
size +=
sizeOfSimpleStatementValues(
((SimpleStatement) statement), protocolVersion, codecRegistry);
} else if (statement instanceof BoundStatement) {
size +=
PrimitiveSizes.sizeOfShortBytes(
((BoundStatement) statement).getPreparedStatement().getId().array());
size += sizeOfBoundStatementValues(((BoundStatement) statement));
}
return size;
}
|
java
|
public List<VertexT> topologicalSort() {
Preconditions.checkState(!wasSorted);
wasSorted = true;
Queue<VertexT> queue = new ArrayDeque<>();
for (Map.Entry<VertexT, Integer> entry : vertices.entrySet()) {
if (entry.getValue() == 0) {
queue.add(entry.getKey());
}
}
List<VertexT> result = Lists.newArrayList();
while (!queue.isEmpty()) {
VertexT vertex = queue.remove();
result.add(vertex);
for (VertexT successor : adjacencyList.get(vertex)) {
if (decrementAndGetCount(successor) == 0) {
queue.add(successor);
}
}
}
if (result.size() != vertices.size()) {
throw new IllegalArgumentException("failed to perform topological sort, graph has a cycle");
}
return result;
}
|
java
|
public void cancel(ResponseCallback responseCallback) {
// To avoid creating an extra message, we adopt the convention that writing the callback
// directly means cancellation
writeCoalescer.writeAndFlush(channel, responseCallback).addListener(UncaughtExceptions::log);
}
|
java
|
private void computeEvents(
KeyspaceMetadata oldKeyspace,
KeyspaceMetadata newKeyspace,
ImmutableList.Builder<Object> events) {
if (oldKeyspace == null) {
events.add(KeyspaceChangeEvent.created(newKeyspace));
} else {
if (!shallowEquals(oldKeyspace, newKeyspace)) {
events.add(KeyspaceChangeEvent.updated(oldKeyspace, newKeyspace));
}
computeChildEvents(oldKeyspace, newKeyspace, events);
}
}
|
java
|
public void receive(IncomingT element) {
assert adminExecutor.inEventLoop();
if (stopped) {
return;
}
if (window.isZero() || maxEvents == 1) {
LOG.debug(
"Received {}, flushing immediately (window = {}, maxEvents = {})",
element,
window,
maxEvents);
onFlush.accept(coalescer.apply(ImmutableList.of(element)));
} else {
currentBatch.add(element);
if (currentBatch.size() == maxEvents) {
LOG.debug(
"Received {}, flushing immediately (because {} accumulated events)",
element,
maxEvents);
flushNow();
} else {
LOG.debug("Received {}, scheduling next flush in {}", element, window);
scheduleFlush();
}
}
}
|
java
|
@NonNull
public CompletionStage<SessionT> buildAsync() {
CompletionStage<CqlSession> buildStage = buildDefaultSessionAsync();
CompletionStage<SessionT> wrapStage = buildStage.thenApply(this::wrap);
// thenApply does not propagate cancellation (!)
CompletableFutures.propagateCancellation(wrapStage, buildStage);
return wrapStage;
}
|
java
|
private boolean getRow() {
try {
rowCache.clear();
while(rowCache.size() < rowCacheSize && parser.hasNext()) {
handleEvent(parser.nextEvent());
}
rowCacheIterator = rowCache.iterator();
return rowCacheIterator.hasNext();
} catch(XMLStreamException e) {
throw new ParseException("Error reading XML stream", e);
}
}
|
java
|
void setFormatString(StartElement startElement, StreamingCell cell) {
Attribute cellStyle = startElement.getAttributeByName(new QName("s"));
String cellStyleString = (cellStyle != null) ? cellStyle.getValue() : null;
XSSFCellStyle style = null;
if(cellStyleString != null) {
style = stylesTable.getStyleAt(Integer.parseInt(cellStyleString));
} else if(stylesTable.getNumCellStyles() > 0) {
style = stylesTable.getStyleAt(0);
}
if(style != null) {
cell.setNumericFormatIndex(style.getDataFormat());
String formatString = style.getDataFormatString();
if(formatString != null) {
cell.setNumericFormat(formatString);
} else {
cell.setNumericFormat(BuiltinFormats.getBuiltinFormat(cell.getNumericFormatIndex()));
}
} else {
cell.setNumericFormatIndex(null);
cell.setNumericFormat(null);
}
}
|
java
|
private Supplier getFormatterForType(String type) {
switch(type) {
case "s": //string stored in shared table
if (!lastContents.isEmpty()) {
int idx = Integer.parseInt(lastContents);
return new StringSupplier(new XSSFRichTextString(sst.getEntryAt(idx)).toString());
}
return new StringSupplier(lastContents);
case "inlineStr": //inline string (not in sst)
case "str":
return new StringSupplier(new XSSFRichTextString(lastContents).toString());
case "e": //error type
return new StringSupplier("ERROR: " + lastContents);
case "n": //numeric type
if(currentCell.getNumericFormat() != null && lastContents.length() > 0) {
// the formatRawCellContents operation incurs a significant overhead on large sheets,
// and we want to defer the execution of this method until the value is actually needed.
// it is not needed in all cases..
final String currentLastContents = lastContents;
final int currentNumericFormatIndex = currentCell.getNumericFormatIndex();
final String currentNumericFormat = currentCell.getNumericFormat();
return new Supplier() {
String cachedContent;
@Override
public Object getContent() {
if (cachedContent == null) {
cachedContent = dataFormatter.formatRawCellContents(
Double.parseDouble(currentLastContents),
currentNumericFormatIndex,
currentNumericFormat);
}
return cachedContent;
}
};
} else {
return new StringSupplier(lastContents);
}
default:
return new StringSupplier(lastContents);
}
}
|
java
|
String unformattedContents() {
switch(currentCell.getType()) {
case "s": //string stored in shared table
if (!lastContents.isEmpty()) {
int idx = Integer.parseInt(lastContents);
return new XSSFRichTextString(sst.getEntryAt(idx)).toString();
}
return lastContents;
case "inlineStr": //inline string (not in sst)
return new XSSFRichTextString(lastContents).toString();
default:
return lastContents;
}
}
|
java
|
@Override
public CellType getCellType() {
if(formulaType) {
return CellType.FORMULA;
} else if(contentsSupplier.getContent() == null || type == null) {
return CellType.BLANK;
} else if("n".equals(type)) {
return CellType.NUMERIC;
} else if("s".equals(type) || "inlineStr".equals(type) || "str".equals(type)) {
return CellType.STRING;
} else if("str".equals(type)) {
return CellType.FORMULA;
} else if("b".equals(type)) {
return CellType.BOOLEAN;
} else if("e".equals(type)) {
return CellType.ERROR;
} else {
throw new UnsupportedOperationException("Unsupported cell type '" + type + "'");
}
}
|
java
|
@Override
public String getStringCellValue() {
Object c = contentsSupplier.getContent();
return c == null ? "" : c.toString();
}
|
java
|
@Override
public Date getDateCellValue() {
if(getCellType() == CellType.STRING){
throw new IllegalStateException("Cell type cannot be CELL_TYPE_STRING");
}
return rawContents == null ? null : HSSFDateUtil.getJavaDate(getNumericCellValue(), use1904Dates);
}
|
java
|
@Override
public boolean getBooleanCellValue() {
CellType cellType = getCellType();
switch(cellType) {
case BLANK:
return false;
case BOOLEAN:
return rawContents != null && TRUE_AS_STRING.equals(rawContents);
case FORMULA:
throw new NotSupportedException();
default:
throw typeMismatch(CellType.BOOLEAN, cellType, false);
}
}
|
java
|
private static String getCellTypeName(CellType cellType) {
switch (cellType) {
case BLANK: return "blank";
case STRING: return "text";
case BOOLEAN: return "boolean";
case ERROR: return "error";
case NUMERIC: return "numeric";
case FORMULA: return "formula";
}
return "#unknown cell type (" + cellType + ")#";
}
|
java
|
@Override
public CellType getCachedFormulaResultType() {
if (formulaType) {
if(contentsSupplier.getContent() == null || type == null) {
return CellType.BLANK;
} else if("n".equals(type)) {
return CellType.NUMERIC;
} else if("s".equals(type) || "inlineStr".equals(type) || "str".equals(type)) {
return CellType.STRING;
} else if("b".equals(type)) {
return CellType.BOOLEAN;
} else if("e".equals(type)) {
return CellType.ERROR;
} else {
throw new UnsupportedOperationException("Unsupported cell type '" + type + "'");
}
} else {
throw new IllegalStateException("Only formula cells have cached results");
}
}
|
java
|
@Override
public void close() throws IOException {
try {
workbook.close();
} finally {
if(tmp != null) {
if (log.isDebugEnabled()) {
log.debug("Deleting tmp file [" + tmp.getAbsolutePath() + "]");
}
tmp.delete();
}
}
}
|
java
|
public boolean containsThumbnail(int userPage, int page, float width, float height, RectF pageRelativeBounds) {
PagePart fakePart = new PagePart(userPage, page, null, width, height, pageRelativeBounds, true, 0);
for (PagePart part : thumbnails) {
if (part.equals(fakePart)) {
return true;
}
}
return false;
}
|
java
|
private float distance(MotionEvent event) {
if (event.getPointerCount() < 2) {
return 0;
}
return PointF.length(event.getX(POINTER1) - event.getX(POINTER2), //
event.getY(POINTER1) - event.getY(POINTER2));
}
|
java
|
private boolean isClick(MotionEvent upEvent, float xDown, float yDown, float xUp, float yUp) {
if (upEvent == null) return false;
long time = upEvent.getEventTime() - upEvent.getDownTime();
float distance = PointF.length( //
xDown - xUp, //
yDown - yUp);
return time < MAX_CLICK_TIME && distance < MAX_CLICK_DISTANCE;
}
|
java
|
private void drawPart(Canvas canvas, PagePart part) {
// Can seem strange, but avoid lot of calls
RectF pageRelativeBounds = part.getPageRelativeBounds();
Bitmap renderedBitmap = part.getRenderedBitmap();
// Move to the target page
float localTranslationX = 0;
float localTranslationY = 0;
if (swipeVertical)
localTranslationY = toCurrentScale(part.getUserPage() * optimalPageHeight);
else
localTranslationX = toCurrentScale(part.getUserPage() * optimalPageWidth);
canvas.translate(localTranslationX, localTranslationY);
Rect srcRect = new Rect(0, 0, renderedBitmap.getWidth(), //
renderedBitmap.getHeight());
float offsetX = toCurrentScale(pageRelativeBounds.left * optimalPageWidth);
float offsetY = toCurrentScale(pageRelativeBounds.top * optimalPageHeight);
float width = toCurrentScale(pageRelativeBounds.width() * optimalPageWidth);
float height = toCurrentScale(pageRelativeBounds.height() * optimalPageHeight);
// If we use float values for this rectangle, there will be
// a possible gap between page parts, especially when
// the zoom level is high.
RectF dstRect = new RectF((int) offsetX, (int) offsetY, //
(int) (offsetX + width), //
(int) (offsetY + height));
// Check if bitmap is in the screen
float translationX = currentXOffset + localTranslationX;
float translationY = currentYOffset + localTranslationY;
if (translationX + dstRect.left >= getWidth() || translationX + dstRect.right <= 0 ||
translationY + dstRect.top >= getHeight() || translationY + dstRect.bottom <= 0) {
canvas.translate(-localTranslationX, -localTranslationY);
return;
}
canvas.drawBitmap(renderedBitmap, srcRect, dstRect, paint);
if (Constants.DEBUG_MODE) {
debugPaint.setColor(part.getUserPage() % 2 == 0 ? Color.RED : Color.BLUE);
canvas.drawRect(dstRect, debugPaint);
}
// Restore the canvas position
canvas.translate(-localTranslationX, -localTranslationY);
}
|
java
|
public void loadPages() {
if (optimalPageWidth == 0 || optimalPageHeight == 0) {
return;
}
// Cancel all current tasks
renderingAsyncTask.removeAllTasks();
cacheManager.makeANewSet();
// Find current index in filtered user pages
int index = currentPage;
if (filteredUserPageIndexes != null) {
index = filteredUserPageIndexes[currentPage];
}
// Loop through the pages like [...][4][2][0][1][3][...]
// loading as many parts as it can.
int parts = 0;
for (int i = 0; i <= Constants.LOADED_SIZE / 2 && parts < CACHE_SIZE; i++) {
parts += loadPage(index + i, CACHE_SIZE - parts);
if (i != 0 && parts < CACHE_SIZE) {
parts += loadPage(index - i, CACHE_SIZE - parts);
}
}
invalidate();
}
|
java
|
public void loadComplete(DecodeService decodeService) {
this.decodeService = decodeService;
this.documentPageCount = decodeService.getPageCount();
// We assume all the pages are the same size
this.pageWidth = decodeService.getPageWidth(0);
this.pageHeight = decodeService.getPageHeight(0);
state = State.LOADED;
calculateOptimalWidthAndHeight();
// Notify the listener
jumpTo(defaultPage);
if (onLoadCompleteListener != null) {
onLoadCompleteListener.loadComplete(documentPageCount);
}
}
|
java
|
public void onBitmapRendered(PagePart part) {
if (part.isThumbnail()) {
cacheManager.cacheThumbnail(part);
} else {
cacheManager.cachePart(part);
}
invalidate();
}
|
java
|
private int determineValidPageNumberFrom(int userPage) {
if (userPage <= 0) {
return 0;
}
if (originalUserPages != null) {
if (userPage >= originalUserPages.length) {
return originalUserPages.length - 1;
}
} else {
if (userPage >= documentPageCount) {
return documentPageCount - 1;
}
}
return userPage;
}
|
java
|
private void calculateOptimalWidthAndHeight() {
if (state == State.DEFAULT || getWidth() == 0) {
return;
}
float maxWidth = getWidth(), maxHeight = getHeight();
float w = pageWidth, h = pageHeight;
float ratio = w / h;
w = maxWidth;
h = (float) Math.floor(maxWidth / ratio);
if (h > maxHeight) {
h = maxHeight;
w = (float) Math.floor(maxHeight * ratio);
}
optimalPageWidth = w;
optimalPageHeight = h;
calculateMasksBounds();
calculateMinimapBounds();
}
|
java
|
private void calculateMinimapBounds() {
float ratioX = Constants.MINIMAP_MAX_SIZE / optimalPageWidth;
float ratioY = Constants.MINIMAP_MAX_SIZE / optimalPageHeight;
float ratio = Math.min(ratioX, ratioY);
float minimapWidth = optimalPageWidth * ratio;
float minimapHeight = optimalPageHeight * ratio;
minimapBounds = new RectF(getWidth() - 5 - minimapWidth, 5, getWidth() - 5, 5 + minimapHeight);
calculateMinimapAreaBounds();
}
|
java
|
private void calculateMasksBounds() {
leftMask = new RectF(0, 0, getWidth() / 2 - toCurrentScale(optimalPageWidth) / 2, getHeight());
rightMask = new RectF(getWidth() / 2 + toCurrentScale(optimalPageWidth) / 2, 0, getWidth(), getHeight());
}
|
java
|
public void moveTo(float offsetX, float offsetY) {
if (swipeVertical) {
// Check X offset
if (toCurrentScale(optimalPageWidth) < getWidth()) {
offsetX = getWidth() / 2 - toCurrentScale(optimalPageWidth) / 2;
} else {
if (offsetX > 0) {
offsetX = 0;
} else if (offsetX + toCurrentScale(optimalPageWidth) < getWidth()) {
offsetX = getWidth() - toCurrentScale(optimalPageWidth);
}
}
// Check Y offset
if (isZooming()) {
if (toCurrentScale(optimalPageHeight) < getHeight()) {
miniMapRequired = false;
offsetY = getHeight() / 2 - toCurrentScale((currentFilteredPage + 0.5f) * optimalPageHeight);
} else {
miniMapRequired = true;
if (offsetY + toCurrentScale(currentFilteredPage * optimalPageHeight) > 0) {
offsetY = -toCurrentScale(currentFilteredPage * optimalPageHeight);
} else if (offsetY + toCurrentScale((currentFilteredPage + 1) * optimalPageHeight) < getHeight()) {
offsetY = getHeight() - toCurrentScale((currentFilteredPage + 1) * optimalPageHeight);
}
}
} else {
float maxY = calculateCenterOffsetForPage(currentFilteredPage + 1);
float minY = calculateCenterOffsetForPage(currentFilteredPage - 1);
if (offsetY < maxY) {
offsetY = maxY;
} else if (offsetY > minY) {
offsetY = minY;
}
}
} else {
// Check Y offset
if (toCurrentScale(optimalPageHeight) < getHeight()) {
offsetY = getHeight() / 2 - toCurrentScale(optimalPageHeight) / 2;
} else {
if (offsetY > 0) {
offsetY = 0;
} else if (offsetY + toCurrentScale(optimalPageHeight) < getHeight()) {
offsetY = getHeight() - toCurrentScale(optimalPageHeight);
}
}
// Check X offset
if (isZooming()) {
if (toCurrentScale(optimalPageWidth) < getWidth()) {
miniMapRequired = false;
offsetX = getWidth() / 2 - toCurrentScale((currentFilteredPage + 0.5f) * optimalPageWidth);
} else {
miniMapRequired = true;
if (offsetX + toCurrentScale(currentFilteredPage * optimalPageWidth) > 0) {
offsetX = -toCurrentScale(currentFilteredPage * optimalPageWidth);
} else if (offsetX + toCurrentScale((currentFilteredPage + 1) * optimalPageWidth) < getWidth()) {
offsetX = getWidth() - toCurrentScale((currentFilteredPage + 1) * optimalPageWidth);
}
}
} else {
float maxX = calculateCenterOffsetForPage(currentFilteredPage + 1);
float minX = calculateCenterOffsetForPage(currentFilteredPage - 1);
if (offsetX < maxX) {
offsetX = maxX;
} else if (offsetX > minX) {
offsetX = minX;
}
}
}
currentXOffset = offsetX;
currentYOffset = offsetY;
calculateMinimapAreaBounds();
invalidate();
}
|
java
|
public Configurator fromAsset(String assetName) {
try {
File pdfFile = FileUtils.fileFromAsset(getContext(), assetName);
return fromFile(pdfFile);
} catch (IOException e) {
throw new FileNotFoundException(assetName + " does not exist.", e);
}
}
|
java
|
public Configurator fromFile(File file) {
if (!file.exists()) throw new FileNotFoundException(file.getAbsolutePath() + "does not exist.");
return new Configurator(Uri.fromFile(file));
}
|
java
|
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public Span putAttribute(Data data) {
Span span = data.attributeSpan;
for (int i = 0; i < data.size; i++) {
span.putAttribute(data.attributeKeys[i], data.attributeValues[i]);
}
return span;
}
|
java
|
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public Span putAttributes(Data data) {
Span span = data.attributeSpan;
span.putAttributes(data.attributeMap);
return span;
}
|
java
|
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public Span addAnnotationEmpty(Data data) {
Span span = data.annotationSpanEmpty;
span.addAnnotation(ANNOTATION_DESCRIPTION);
return span;
}
|
java
|
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public Span addAnnotationWithAttributes(Data data) {
Span span = data.annotationSpanAttributes;
span.addAnnotation(ANNOTATION_DESCRIPTION, data.attributeMap);
return span;
}
|
java
|
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public Span addAnnotationWithAnnotation(Data data) {
Span span = data.annotationSpanAnnotation;
Annotation annotation =
Annotation.fromDescriptionAndAttributes(ANNOTATION_DESCRIPTION, data.attributeMap);
span.addAnnotation(annotation);
return span;
}
|
java
|
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public Span addMessageEvent(Data data) {
Span span = data.messageEventSpan;
for (int i = 0; i < data.size; i++) {
span.addMessageEvent(data.messageEvents[i]);
}
return span;
}
|
java
|
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public Span addLink(Data data) {
Span span = data.linkSpan;
for (int i = 0; i < data.size; i++) {
span.addLink(data.links[i]);
}
return span;
}
|
java
|
@Override
public <V> Callable<V> wrap(Callable<V> callable) {
if (isTracing()) {
return new SpanContinuingTraceCallable<V>(this, this.traceKeys, this.spanNamer, callable);
}
return callable;
}
|
java
|
@Override
public Runnable wrap(Runnable runnable) {
if (isTracing()) {
return new SpanContinuingTraceRunnable(this, this.traceKeys, this.spanNamer, runnable);
}
return runnable;
}
|
java
|
public static void premain(String agentArgs, Instrumentation instrumentation) throws Exception {
checkNotNull(instrumentation, "instrumentation");
logger.fine("Initializing.");
// The classes in bootstrap.jar, such as ContextManger and ContextStrategy, will be referenced
// from classes loaded by the bootstrap classloader. Thus, these classes have to be loaded by
// the bootstrap classloader, too.
instrumentation.appendToBootstrapClassLoaderSearch(
new JarFile(Resources.getResourceAsTempFile("bootstrap.jar")));
checkLoadedByBootstrapClassloader(ContextTrampoline.class);
checkLoadedByBootstrapClassloader(ContextStrategy.class);
Settings settings = Settings.load();
AgentBuilder agentBuilder =
new AgentBuilder.Default()
.disableClassFormatChanges()
.with(AgentBuilder.RedefinitionStrategy.RETRANSFORMATION)
.with(new AgentBuilderListener())
.ignore(none());
for (Instrumenter instrumenter : ServiceLoader.load(Instrumenter.class)) {
agentBuilder = instrumenter.instrument(agentBuilder, settings);
}
agentBuilder.installOn(instrumentation);
logger.fine("Initialized.");
}
|
java
|
private static TraceServiceGrpc.TraceServiceStub getTraceServiceStub(
String endPoint, Boolean useInsecure, SslContext sslContext) {
ManagedChannelBuilder<?> channelBuilder;
if (useInsecure) {
channelBuilder = ManagedChannelBuilder.forTarget(endPoint).usePlaintext();
} else {
channelBuilder =
NettyChannelBuilder.forTarget(endPoint)
.negotiationType(NegotiationType.TLS)
.sslContext(sslContext);
}
ManagedChannel channel = channelBuilder.build();
return TraceServiceGrpc.newStub(channel);
}
|
java
|
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public MeasureMap recordBatchedDoubleCount(Data data) {
MeasureMap map = data.recorder.newMeasureMap();
for (int i = 0; i < data.numValues; i++) {
map.put(StatsBenchmarksUtil.DOUBLE_COUNT_MEASURES[i], (double) i);
}
map.record(data.tags);
return map;
}
|
java
|
static File getResourceAsTempFile(String resourceName) throws IOException {
checkArgument(!Strings.isNullOrEmpty(resourceName), "resourceName");
File file = File.createTempFile(resourceName, ".tmp");
OutputStream os = new FileOutputStream(file);
try {
getResourceAsTempFile(resourceName, file, os);
return file;
} finally {
os.close();
}
}
|
java
|
public static void createAndRegister(DatadogTraceConfiguration configuration)
throws MalformedURLException {
synchronized (monitor) {
checkState(handler == null, "Datadog exporter is already registered.");
String agentEndpoint = configuration.getAgentEndpoint();
String service = configuration.getService();
String type = configuration.getType();
final DatadogExporterHandler exporterHandler =
new DatadogExporterHandler(agentEndpoint, service, type);
handler = exporterHandler;
Tracing.getExportComponent()
.getSpanExporter()
.registerHandler(REGISTER_NAME, exporterHandler);
}
}
|
java
|
@Deprecated
public static void createAndRegisterWithCredentialsAndProjectId(
Credentials credentials, String projectId, Duration exportInterval) throws IOException {
checkNotNull(credentials, "credentials");
checkNotNull(projectId, "projectId");
checkNotNull(exportInterval, "exportInterval");
createInternal(
credentials, projectId, exportInterval, DEFAULT_RESOURCE, null, DEFAULT_CONSTANT_LABELS);
}
|
java
|
@Deprecated
public static void createAndRegisterWithProjectId(String projectId, Duration exportInterval)
throws IOException {
checkNotNull(projectId, "projectId");
checkNotNull(exportInterval, "exportInterval");
createInternal(
null, projectId, exportInterval, DEFAULT_RESOURCE, null, DEFAULT_CONSTANT_LABELS);
}
|
java
|
@Deprecated
public static void createAndRegister(Duration exportInterval) throws IOException {
checkNotNull(exportInterval, "exportInterval");
checkArgument(
!DEFAULT_PROJECT_ID.isEmpty(), "Cannot find a project ID from application default.");
createInternal(
null, DEFAULT_PROJECT_ID, exportInterval, DEFAULT_RESOURCE, null, DEFAULT_CONSTANT_LABELS);
}
|
java
|
@Deprecated
public static void createAndRegisterWithProjectIdAndMonitoredResource(
String projectId, Duration exportInterval, MonitoredResource monitoredResource)
throws IOException {
checkNotNull(projectId, "projectId");
checkNotNull(exportInterval, "exportInterval");
checkNotNull(monitoredResource, "monitoredResource");
createInternal(
null, projectId, exportInterval, monitoredResource, null, DEFAULT_CONSTANT_LABELS);
}
|
java
|
@Deprecated
public static void createAndRegisterWithMonitoredResource(
Duration exportInterval, MonitoredResource monitoredResource) throws IOException {
checkNotNull(exportInterval, "exportInterval");
checkNotNull(monitoredResource, "monitoredResource");
checkArgument(
!DEFAULT_PROJECT_ID.isEmpty(), "Cannot find a project ID from application default.");
createInternal(
null, DEFAULT_PROJECT_ID, exportInterval, monitoredResource, null, DEFAULT_CONSTANT_LABELS);
}
|
java
|
@VisibleForTesting
static void unsafeResetExporter() {
synchronized (monitor) {
if (instance != null) {
instance.intervalMetricReader.stop();
}
instance = null;
}
}
|
java
|
private void export() {
if (exportRpcHandler == null || exportRpcHandler.isCompleted()) {
return;
}
ArrayList<Metric> metricsList = Lists.newArrayList();
for (MetricProducer metricProducer : metricProducerManager.getAllMetricProducer()) {
metricsList.addAll(metricProducer.getMetrics());
}
List<io.opencensus.proto.metrics.v1.Metric> metricProtos = Lists.newArrayList();
for (Metric metric : metricsList) {
// TODO(songya): determine if we should make the optimization on not sending already-existed
// MetricDescriptors.
// boolean registered = true;
// if (!registeredDescriptors.contains(metric.getMetricDescriptor())) {
// registered = false;
// registeredDescriptors.add(metric.getMetricDescriptor());
// }
metricProtos.add(MetricsProtoUtils.toMetricProto(metric, null));
}
exportRpcHandler.onExport(
// For now don't include Resource in the following messages, i.e don't allow Resource to
// mutate after the initial message.
ExportMetricsServiceRequest.newBuilder().addAllMetrics(metricProtos).build());
}
|
java
|
public static void setTraceStrategy(TraceStrategy traceStrategy) {
if (TraceTrampoline.traceStrategy != null) {
throw new IllegalStateException("traceStrategy was already set");
}
if (traceStrategy == null) {
throw new NullPointerException("traceStrategy");
}
TraceTrampoline.traceStrategy = traceStrategy;
}
|
java
|
public static <T> T createInstance(Class<?> rawClass, Class<T> superclass) {
try {
return rawClass.asSubclass(superclass).getConstructor().newInstance();
} catch (Exception e) {
throw new ServiceConfigurationError(
"Provider " + rawClass.getName() + " could not be instantiated.", e);
}
}
|
java
|
private static final TagKey createTagKey(String name) throws TagContextDeserializationException {
try {
return TagKey.create(name);
} catch (IllegalArgumentException e) {
throw new TagContextDeserializationException("Invalid tag key: " + name, e);
}
}
|
java
|
private static final TagValue createTagValue(TagKey key, String value)
throws TagContextDeserializationException {
try {
return TagValue.create(value);
} catch (IllegalArgumentException e) {
throw new TagContextDeserializationException(
"Invalid tag value for key " + key + ": " + value, e);
}
}
|
java
|
private boolean registerMetricDescriptor(
io.opencensus.metrics.export.MetricDescriptor metricDescriptor) {
String metricName = metricDescriptor.getName();
io.opencensus.metrics.export.MetricDescriptor existingMetricDescriptor =
registeredMetricDescriptors.get(metricName);
if (existingMetricDescriptor != null) {
if (existingMetricDescriptor.equals(metricDescriptor)) {
// Ignore metricDescriptor that are already registered.
return true;
} else {
logger.log(
Level.WARNING,
"A different metric with the same name is already registered: "
+ existingMetricDescriptor);
return false;
}
}
registeredMetricDescriptors.put(metricName, metricDescriptor);
if (isBuiltInMetric(metricName)) {
return true; // skip creating metric descriptor for stackdriver built-in metrics.
}
Span span = tracer.getCurrentSpan();
span.addAnnotation("Create Stackdriver Metric.");
MetricDescriptor stackDriverMetricDescriptor =
StackdriverExportUtils.createMetricDescriptor(
metricDescriptor, projectId, domain, displayNamePrefix, constantLabels);
CreateMetricDescriptorRequest request =
CreateMetricDescriptorRequest.newBuilder()
.setName(projectName.toString())
.setMetricDescriptor(stackDriverMetricDescriptor)
.build();
try {
metricServiceClient.createMetricDescriptor(request);
span.addAnnotation("Finish creating MetricDescriptor.");
return true;
} catch (ApiException e) {
logger.log(Level.WARNING, "ApiException thrown when creating MetricDescriptor.", e);
span.setStatus(
Status.CanonicalCode.valueOf(e.getStatusCode().getCode().name())
.toStatus()
.withDescription(
"ApiException thrown when creating MetricDescriptor: "
+ StackdriverExportUtils.exceptionMessage(e)));
return false;
} catch (Throwable e) {
logger.log(Level.WARNING, "Exception thrown when creating MetricDescriptor.", e);
span.setStatus(
Status.UNKNOWN.withDescription(
"Exception thrown when creating MetricDescriptor: "
+ StackdriverExportUtils.exceptionMessage(e)));
return false;
}
}
|
java
|
static String generateFullMetricName(String name, String type) {
return SOURCE + DELIMITER + name + DELIMITER + type;
}
|
java
|
static String generateFullMetricDescription(String metricName, Metric metric) {
return "Collected from "
+ SOURCE
+ " (metric="
+ metricName
+ ", type="
+ metric.getClass().getName()
+ ")";
}
|
java
|
public static void setContextStrategy(ContextStrategy contextStrategy) {
if (ContextTrampoline.contextStrategy != null) {
throw new IllegalStateException("contextStrategy was already set");
}
if (contextStrategy == null) {
throw new NullPointerException("contextStrategy");
}
ContextTrampoline.contextStrategy = contextStrategy;
}
|
java
|
private static Attributes toAttributesProto(
io.opencensus.trace.export.SpanData.Attributes attributes,
Map<String, AttributeValue> resourceLabels,
Map<String, AttributeValue> fixedAttributes) {
Attributes.Builder attributesBuilder =
toAttributesBuilderProto(
attributes.getAttributeMap(), attributes.getDroppedAttributesCount());
attributesBuilder.putAttributeMap(AGENT_LABEL_KEY, AGENT_LABEL_VALUE);
for (Entry<String, AttributeValue> entry : resourceLabels.entrySet()) {
attributesBuilder.putAttributeMap(entry.getKey(), entry.getValue());
}
for (Entry<String, AttributeValue> entry : fixedAttributes.entrySet()) {
attributesBuilder.putAttributeMap(entry.getKey(), entry.getValue());
}
return attributesBuilder.build();
}
|
java
|
public static void createAndRegister(String agentEndpoint) throws MalformedURLException {
synchronized (monitor) {
checkState(handler == null, "Instana exporter is already registered.");
Handler newHandler = new InstanaExporterHandler(new URL(agentEndpoint));
handler = newHandler;
register(Tracing.getExportComponent().getSpanExporter(), newHandler);
}
}
|
java
|
public synchronized Collection<T> getAll() {
List<T> all = new ArrayList<T>(size);
for (T e = head; e != null; e = e.getNext()) {
all.add(e);
}
return all;
}
|
java
|
public final void handleMessageSent(HttpRequestContext context, long bytes) {
checkNotNull(context, "context");
context.sentMessageSize.addAndGet(bytes);
if (context.span.getOptions().contains(Options.RECORD_EVENTS)) {
// record compressed size
recordMessageEvent(context.span, context.sentSeqId.addAndGet(1L), Type.SENT, bytes, 0L);
}
}
|
java
|
public final void handleMessageReceived(HttpRequestContext context, long bytes) {
checkNotNull(context, "context");
context.receiveMessageSize.addAndGet(bytes);
if (context.span.getOptions().contains(Options.RECORD_EVENTS)) {
// record compressed size
recordMessageEvent(
context.span, context.receviedSeqId.addAndGet(1L), Type.RECEIVED, bytes, 0L);
}
}
|
java
|
@GuardedBy("monitor")
private /*@Nullable*/ TreeNode findNode(/*@Nullable*/ String path) {
if (Strings.isNullOrEmpty(path) || "/".equals(path)) { // Go back to the root directory.
return root;
} else {
List<String> dirs = PATH_SPLITTER.splitToList(path);
TreeNode node = root;
for (int i = 0; i < dirs.size(); i++) {
String dir = dirs.get(i);
if ("".equals(dir) && i == 0) {
continue; // Skip the first "", the path of root node.
}
if (!node.children.containsKey(dir)) {
return null;
} else {
node = node.children.get(dir);
}
}
return node;
}
}
|
java
|
void record(
List</*@Nullable*/ TagValue> tagValues,
double value,
Map<String, AttachmentValue> attachments,
Timestamp timestamp) {
if (!tagValueAggregationMap.containsKey(tagValues)) {
tagValueAggregationMap.put(
tagValues, RecordUtils.createMutableAggregation(aggregation, measure));
}
tagValueAggregationMap.get(tagValues).add(value, attachments, timestamp);
}
|
java
|
static MetricDescriptor createMetricDescriptor(
io.opencensus.metrics.export.MetricDescriptor metricDescriptor,
String projectId,
String domain,
String displayNamePrefix,
Map<LabelKey, LabelValue> constantLabels) {
MetricDescriptor.Builder builder = MetricDescriptor.newBuilder();
String type = generateType(metricDescriptor.getName(), domain);
// Name format refers to
// cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.metricDescriptors/create
builder.setName("projects/" + projectId + "/metricDescriptors/" + type);
builder.setType(type);
builder.setDescription(metricDescriptor.getDescription());
builder.setDisplayName(createDisplayName(metricDescriptor.getName(), displayNamePrefix));
for (LabelKey labelKey : metricDescriptor.getLabelKeys()) {
builder.addLabels(createLabelDescriptor(labelKey));
}
for (LabelKey labelKey : constantLabels.keySet()) {
builder.addLabels(createLabelDescriptor(labelKey));
}
builder.setUnit(metricDescriptor.getUnit());
builder.setMetricKind(createMetricKind(metricDescriptor.getType()));
builder.setValueType(createValueType(metricDescriptor.getType()));
return builder.build();
}
|
java
|
@VisibleForTesting
static LabelDescriptor createLabelDescriptor(LabelKey labelKey) {
LabelDescriptor.Builder builder = LabelDescriptor.newBuilder();
builder.setKey(labelKey.getKey());
builder.setDescription(labelKey.getDescription());
// Now we only support String tags
builder.setValueType(ValueType.STRING);
return builder.build();
}
|
java
|
@VisibleForTesting
static MetricKind createMetricKind(Type type) {
if (type == Type.GAUGE_INT64 || type == Type.GAUGE_DOUBLE) {
return MetricKind.GAUGE;
} else if (type == Type.CUMULATIVE_INT64
|| type == Type.CUMULATIVE_DOUBLE
|| type == Type.CUMULATIVE_DISTRIBUTION) {
return MetricKind.CUMULATIVE;
}
return MetricKind.UNRECOGNIZED;
}
|
java
|
@VisibleForTesting
static MetricDescriptor.ValueType createValueType(Type type) {
if (type == Type.CUMULATIVE_DOUBLE || type == Type.GAUGE_DOUBLE) {
return MetricDescriptor.ValueType.DOUBLE;
} else if (type == Type.GAUGE_INT64 || type == Type.CUMULATIVE_INT64) {
return MetricDescriptor.ValueType.INT64;
} else if (type == Type.GAUGE_DISTRIBUTION || type == Type.CUMULATIVE_DISTRIBUTION) {
return MetricDescriptor.ValueType.DISTRIBUTION;
}
return MetricDescriptor.ValueType.UNRECOGNIZED;
}
|
java
|
@VisibleForTesting
static Metric createMetric(
io.opencensus.metrics.export.MetricDescriptor metricDescriptor,
List<LabelValue> labelValues,
String domain,
Map<LabelKey, LabelValue> constantLabels) {
Metric.Builder builder = Metric.newBuilder();
builder.setType(generateType(metricDescriptor.getName(), domain));
Map<String, String> stringTagMap = Maps.newHashMap();
List<LabelKey> labelKeys = metricDescriptor.getLabelKeys();
for (int i = 0; i < labelValues.size(); i++) {
String value = labelValues.get(i).getValue();
if (value == null) {
continue;
}
stringTagMap.put(labelKeys.get(i).getKey(), value);
}
for (Map.Entry<LabelKey, LabelValue> constantLabel : constantLabels.entrySet()) {
String constantLabelKey = constantLabel.getKey().getKey();
String constantLabelValue = constantLabel.getValue().getValue();
constantLabelValue = constantLabelValue == null ? "" : constantLabelValue;
stringTagMap.put(constantLabelKey, constantLabelValue);
}
builder.putAllLabels(stringTagMap);
return builder.build();
}
|
java
|
@VisibleForTesting
static TypedValue createTypedValue(Value value) {
return value.match(
typedValueDoubleFunction,
typedValueLongFunction,
typedValueDistributionFunction,
typedValueSummaryFunction,
Functions.<TypedValue>throwIllegalArgumentException());
}
|
java
|
@VisibleForTesting
static Distribution createDistribution(io.opencensus.metrics.export.Distribution distribution) {
Distribution.Builder builder =
Distribution.newBuilder()
.setBucketOptions(createBucketOptions(distribution.getBucketOptions()))
.setCount(distribution.getCount())
.setMean(
distribution.getCount() == 0 ? 0 : distribution.getSum() / distribution.getCount())
.setSumOfSquaredDeviation(distribution.getSumOfSquaredDeviations());
setBucketCountsAndExemplars(distribution.getBuckets(), builder);
return builder.build();
}
|
java
|
@VisibleForTesting
static Timestamp convertTimestamp(io.opencensus.common.Timestamp censusTimestamp) {
if (censusTimestamp.getSeconds() < 0) {
// StackDriver doesn't handle negative timestamps.
return Timestamp.newBuilder().build();
}
return Timestamp.newBuilder()
.setSeconds(censusTimestamp.getSeconds())
.setNanos(censusTimestamp.getNanos())
.build();
}
|
java
|
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public TagContext tagContextCreation(Data data) {
return TagsBenchmarksUtil.createTagContext(data.tagger.emptyBuilder(), data.numTags);
}
|
java
|
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public Scope scopeTagContext(Data data) {
Scope scope = data.tagger.withTagContext(data.tagContext);
scope.close();
return scope;
}
|
java
|
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public TagContext getCurrentTagContext(Data data) {
return data.tagger.getCurrentTagContext();
}
|
java
|
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public byte[] serializeTagContext(Data data) throws Exception {
return data.serializer.toByteArray(data.tagContext);
}
|
java
|
@Benchmark
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public TagContext deserializeTagContext(Data data) throws Exception {
return data.serializer.fromByteArray(data.serializedTagContext);
}
|
java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.