code
stringlengths 4
1.01M
| language
stringclasses 2
values |
|---|---|
//
// AppDelegate.h
// DemoApp
//
// Created by Stephen Anderson on 8/27/13.
// Copyright (c) 2013 Product & Technology. All rights reserved.
//
#import <UIKit/UIKit.h>
@class ViewController;
@class AdObserver;
@interface AppDelegate : UIResponder <UIApplicationDelegate>
@property (strong, nonatomic) UIWindow *window;
@property (strong, nonatomic) ViewController *viewController;
@property (strong, nonatomic) AdObserver *adObserver;
@end
|
Java
|
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Nov 22 2016 05:57:16).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard.
//
@protocol DVTFileSystemRepresentationProviding
- (void)dvt_provideFileSystemRepresentationToBlock:(void (^)(char *, unsigned long long))arg1;
@end
|
Java
|
#flash-messages {position: absolute;}
#flash-messages .success ul, .notice ul, .error ul { list-style: none; margin: 0; padding: 0; }
#flash-messages .success ul li, .notice ul li, .error ul li { float: none; display: block; }
#flash-messages .success ul, .notice ul, .error ul { list-style: none; margin: 0; padding: 0; font-weight: bold; }
#flash-messages .success ul li, .notice ul li, .error ul li { float: none; display: block; text-align: center;}
#flash-messages div.success, div.notice, div.error {
-webkit-border-radius: 8px;
-moz-border-radius: 8px;
border-radius: 8px;
-moz-box-shadow: 0 2px 4px #ccc;
-webkit-box-shadow: 0 2px 4px #ccc;
box-shadow: 0 2px 4px #ccc;
position: relative;
z-index: 1;
}
#flash-messages span.close {
position: absolute;
top: 1px;
right: 6px;
cursor: pointer;
font-weight: bold;
}
|
Java
|
# dpconverge
|
Java
|
import {InputWidget, InputWidgetView} from "./input_widget"
import {input} from "core/dom"
import * as p from "core/properties"
import {bk_input} from "styles/widgets/inputs"
export class TextInputView extends InputWidgetView {
model: TextInput
protected input_el: HTMLInputElement
connect_signals(): void {
super.connect_signals()
this.connect(this.model.properties.name.change, () => this.input_el.name = this.model.name || "")
this.connect(this.model.properties.value.change, () => this.input_el.value = this.model.value)
this.connect(this.model.properties.disabled.change, () => this.input_el.disabled = this.model.disabled)
this.connect(this.model.properties.placeholder.change, () => this.input_el.placeholder = this.model.placeholder)
}
render(): void {
super.render()
this.input_el = input({
type: "text",
class: bk_input,
name: this.model.name,
value: this.model.value,
disabled: this.model.disabled,
placeholder: this.model.placeholder,
})
this.input_el.addEventListener("change", () => this.change_input())
this.group_el.appendChild(this.input_el)
}
change_input(): void {
this.model.value = this.input_el.value
super.change_input()
}
}
export namespace TextInput {
export type Attrs = p.AttrsOf<Props>
export type Props = InputWidget.Props & {
value: p.Property<string>
placeholder: p.Property<string>
}
}
export interface TextInput extends TextInput.Attrs {}
export class TextInput extends InputWidget {
properties: TextInput.Props
constructor(attrs?: Partial<TextInput.Attrs>) {
super(attrs)
}
static initClass(): void {
this.prototype.default_view = TextInputView
this.define<TextInput.Props>({
value: [ p.String, "" ],
placeholder: [ p.String, "" ],
})
}
}
TextInput.initClass()
|
Java
|
#
# https://github.com/andreyk0/haskell-on-arm
#
# User: debian
# Passwd: /dev/null
#
FROM scratch
ADD jessie.tgz /
ADD qemu-arm-static /usr/bin/qemu-arm-static
CMD ["/bin/bash"]
|
Java
|
#!/usr/bin/python
# Copyright (c) 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
A chain with four possible intermediates with different notBefore and notAfter
dates, for testing path bulding prioritization.
"""
import sys
sys.path += ['../..']
import gencerts
DATE_A = '150101120000Z'
DATE_B = '150102120000Z'
DATE_C = '180101120000Z'
DATE_D = '180102120000Z'
root = gencerts.create_self_signed_root_certificate('Root')
root.set_validity_range(DATE_A, DATE_D)
int_ac = gencerts.create_intermediate_certificate('Intermediate', root)
int_ac.set_validity_range(DATE_A, DATE_C)
int_ad = gencerts.create_intermediate_certificate('Intermediate', root)
int_ad.set_validity_range(DATE_A, DATE_D)
int_ad.set_key(int_ac.get_key())
int_bc = gencerts.create_intermediate_certificate('Intermediate', root)
int_bc.set_validity_range(DATE_B, DATE_C)
int_bc.set_key(int_ac.get_key())
int_bd = gencerts.create_intermediate_certificate('Intermediate', root)
int_bd.set_validity_range(DATE_B, DATE_D)
int_bd.set_key(int_ac.get_key())
target = gencerts.create_end_entity_certificate('Target', int_ac)
target.set_validity_range(DATE_A, DATE_D)
gencerts.write_chain('The root', [root], out_pem='root.pem')
gencerts.write_chain('Intermediate with validity range A..C',
[int_ac], out_pem='int_ac.pem')
gencerts.write_chain('Intermediate with validity range A..D',
[int_ad], out_pem='int_ad.pem')
gencerts.write_chain('Intermediate with validity range B..C',
[int_bc], out_pem='int_bc.pem')
gencerts.write_chain('Intermediate with validity range B..D',
[int_bd], out_pem='int_bd.pem')
gencerts.write_chain('The target', [target], out_pem='target.pem')
|
Java
|
<?php
include_once "../../includes/easyparliament/init.php";
if (($date = get_http_var('d')) && preg_match('#^\d\d\d\d-\d\d-\d\d$#', $date)) {
$this_page = 'hansard_date';
$PAGE->set_hansard_headings(array('date'=>$date));
$URL = new URL($this_page);
$db = new ParlDB;
$q = $db->query("SELECT MIN(hdate) AS hdate FROM hansard WHERE hdate > '$date'");
if ($q->rows() > 0 && $q->field(0, 'hdate') != NULL) {
$URL->insert( array( 'd'=>$q->field(0, 'hdate') ) );
$title = format_date($q->field(0, 'hdate'), SHORTDATEFORMAT);
$nextprevdata['next'] = array (
'hdate' => $q->field(0, 'hdate'),
'url' => $URL->generate(),
'body' => 'Next day',
'title' => $title
);
}
$q = $db->query("SELECT MAX(hdate) AS hdate FROM hansard WHERE hdate < '$date'");
if ($q->rows() > 0 && $q->field(0, 'hdate') != NULL) {
$URL->insert( array( 'd'=>$q->field(0, 'hdate') ) );
$title = format_date($q->field(0, 'hdate'), SHORTDATEFORMAT);
$nextprevdata['prev'] = array (
'hdate' => $q->field(0, 'hdate'),
'url' => $URL->generate(),
'body' => 'Previous day',
'title' => $title
);
}
# $year = substr($date, 0, 4);
# $URL = new URL($hansardmajors[1]['page_year']);
# $URL->insert(array('y'=>$year));
# $nextprevdata['up'] = array (
# 'body' => "All of $year",
# 'title' => '',
# 'url' => $URL->generate()
# );
$DATA->set_page_metadata($this_page, 'nextprev', $nextprevdata);
$PAGE->page_start();
$PAGE->stripe_start();
include_once INCLUDESPATH . 'easyparliament/recess.php';
$time = strtotime($date);
$dayofweek = date('w', $time);
$recess = recess_prettify(date('j', $time), date('n', $time), date('Y', $time), 1);
if ($recess[0]) {
print '<p>The Houses of Parliament are in their ' . $recess[0] . ' at this time.</p>';
} elseif ($dayofweek == 0 || $dayofweek == 6) {
print '<p>The Houses of Parliament do not meet at weekends.</p>';
} else {
$data = array(
'date' => $date
);
foreach (array_keys($hansardmajors) as $major) {
$URL = new URL($hansardmajors[$major]['page_all']);
$URL->insert(array('d'=>$date));
$data[$major] = array('listurl'=>$URL->generate());
}
major_summary($data);
}
$PAGE->stripe_end(array(
array (
'type' => 'nextprev'
),
));
$PAGE->page_end();
} else {
header("Location: http://" . DOMAIN . "/");
exit;
}
|
Java
|
/*
* Copyright 2019 Google Inc.
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#include "src/gpu/dawn/GrDawnBuffer.h"
#include "src/gpu/dawn/GrDawnStagingBuffer.h"
#include "src/gpu/dawn/GrDawnGpu.h"
namespace {
wgpu::BufferUsage GrGpuBufferTypeToDawnUsageBit(GrGpuBufferType type) {
switch (type) {
case GrGpuBufferType::kVertex:
return wgpu::BufferUsage::Vertex;
case GrGpuBufferType::kIndex:
return wgpu::BufferUsage::Index;
case GrGpuBufferType::kXferCpuToGpu:
return wgpu::BufferUsage::CopySrc;
case GrGpuBufferType::kXferGpuToCpu:
return wgpu::BufferUsage::CopyDst;
default:
SkASSERT(!"buffer type not supported by Dawn");
return wgpu::BufferUsage::Vertex;
}
}
}
GrDawnBuffer::GrDawnBuffer(GrDawnGpu* gpu, size_t sizeInBytes, GrGpuBufferType type,
GrAccessPattern pattern)
: INHERITED(gpu, sizeInBytes, type, pattern) {
wgpu::BufferDescriptor bufferDesc;
bufferDesc.size = sizeInBytes;
bufferDesc.usage = GrGpuBufferTypeToDawnUsageBit(type) | wgpu::BufferUsage::CopyDst;
fBuffer = this->getDawnGpu()->device().CreateBuffer(&bufferDesc);
this->registerWithCache(SkBudgeted::kYes);
}
GrDawnBuffer::~GrDawnBuffer() {
}
void GrDawnBuffer::onMap() {
if (this->wasDestroyed()) {
return;
}
GrStagingBuffer::Slice slice = getGpu()->allocateStagingBufferSlice(this->size());
fStagingBuffer = static_cast<GrDawnStagingBuffer*>(slice.fBuffer)->buffer();
fStagingOffset = slice.fOffset;
fMapPtr = slice.fData;
}
void GrDawnBuffer::onUnmap() {
if (this->wasDestroyed()) {
return;
}
fMapPtr = nullptr;
getDawnGpu()->getCopyEncoder()
.CopyBufferToBuffer(fStagingBuffer, fStagingOffset, fBuffer, 0, this->size());
}
bool GrDawnBuffer::onUpdateData(const void* src, size_t srcSizeInBytes) {
if (this->wasDestroyed()) {
return false;
}
this->onMap();
memcpy(fMapPtr, src, srcSizeInBytes);
this->onUnmap();
return true;
}
GrDawnGpu* GrDawnBuffer::getDawnGpu() const {
SkASSERT(!this->wasDestroyed());
return static_cast<GrDawnGpu*>(this->getGpu());
}
|
Java
|
<?php
/*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* This software consists of voluntary contributions made by many individuals
* and is licensed under the LGPL. For more information, see
* <http://www.phpdoctrine.org>.
*/
namespace Doctrine\ORM\Mapping;
/**
* A MappingException indicates that something is wrong with the mapping setup.
*
* @since 2.0
*/
class MappingException extends \Doctrine\ORM\ORMException
{
public static function pathRequired()
{
return new self("Specifying the paths to your entities is required ".
"in the AnnotationDriver to retrieve all class names.");
}
public static function identifierRequired($entityName)
{
return new self("No identifier/primary key specified for Entity '$entityName'."
. " Every Entity must have an identifier/primary key.");
}
public static function invalidInheritanceType($entityName, $type)
{
return new self("The inheritance type '$type' specified for '$entityName' does not exist.");
}
public static function generatorNotAllowedWithCompositeId()
{
return new self("Id generators can't be used with a composite id.");
}
public static function missingFieldName()
{
return new self("The association mapping misses the 'fieldName' attribute.");
}
public static function missingTargetEntity($fieldName)
{
return new self("The association mapping '$fieldName' misses the 'targetEntity' attribute.");
}
public static function missingSourceEntity($fieldName)
{
return new self("The association mapping '$fieldName' misses the 'sourceEntity' attribute.");
}
public static function mappingFileNotFound($entityName, $fileName)
{
return new self("No mapping file found named '$fileName' for class '$entityName'.");
}
public static function mappingNotFound($className, $fieldName)
{
return new self("No mapping found for field '$fieldName' on class '$className'.");
}
public static function oneToManyRequiresMappedBy($fieldName)
{
return new self("OneToMany mapping on field '$fieldName' requires the 'mappedBy' attribute.");
}
public static function joinTableRequired($fieldName)
{
return new self("The mapping of field '$fieldName' requires an the 'joinTable' attribute.");
}
/**
* Called if a required option was not found but is required
*
* @param string $field which field cannot be processed?
* @param string $expectedOption which option is required
* @param string $hint Can optionally be used to supply a tip for common mistakes,
* e.g. "Did you think of the plural s?"
* @return MappingException
*/
static function missingRequiredOption($field, $expectedOption, $hint = '')
{
$message = "The mapping of field '{$field}' is invalid: The option '{$expectedOption}' is required.";
if ( ! empty($hint)) {
$message .= ' (Hint: ' . $hint . ')';
}
return new self($message);
}
/**
* Generic exception for invalid mappings.
*
* @param string $fieldName
*/
public static function invalidMapping($fieldName)
{
return new self("The mapping of field '$fieldName' is invalid.");
}
/**
* Exception for reflection exceptions - adds the entity name,
* because there might be long classnames that will be shortened
* within the stacktrace
*
* @param string $entity The entity's name
* @param \ReflectionException $previousException
*/
public static function reflectionFailure($entity, \ReflectionException $previousException)
{
return new self('An error occurred in ' . $entity, 0, $previousException);
}
public static function joinColumnMustPointToMappedField($className, $joinColumn)
{
return new self('The column ' . $joinColumn . ' must be mapped to a field in class '
. $className . ' since it is referenced by a join column of another class.');
}
public static function classIsNotAValidEntityOrMappedSuperClass($className)
{
return new self('Class '.$className.' is not a valid entity or mapped super class.');
}
public static function propertyTypeIsRequired($className, $propertyName)
{
return new self("The attribute 'type' is required for the column description of property ".$className."::\$".$propertyName.".");
}
public static function tableIdGeneratorNotImplemented($className)
{
return new self("TableIdGenerator is not yet implemented for use with class ".$className);
}
/**
*
* @param string $entity The entity's name
* @param string $fieldName The name of the field that was already declared
*/
public static function duplicateFieldMapping($entity, $fieldName) {
return new self('Property "'.$fieldName.'" in "'.$entity.'" was already declared, but it must be declared only once');
}
public static function duplicateAssociationMapping($entity, $fieldName) {
return new self('Property "'.$fieldName.'" in "'.$entity.'" was already declared, but it must be declared only once');
}
public static function singleIdNotAllowedOnCompositePrimaryKey($entity) {
return new self('Single id is not allowed on composite primary key in entity '.$entity);
}
public static function unsupportedOptimisticLockingType($entity, $fieldName, $unsupportedType) {
return new self('Locking type "'.$unsupportedType.'" (specified in "'.$entity.'", field "'.$fieldName.'") '
.'is not supported by Doctrine.'
);
}
public static function fileMappingDriversRequireConfiguredDirectoryPath($path = null)
{
if ( ! empty($path)) {
$path = '[' . $path . ']';
}
return new self(
'File mapping drivers must have a valid directory path, ' .
'however the given path ' . $path . ' seems to be incorrect!'
);
}
/**
* Throws an exception that indicates that a class used in a discriminator map does not exist.
* An example would be an outdated (maybe renamed) classname.
*
* @param string $className The class that could not be found
* @param string $owningClass The class that declares the discriminator map.
* @return self
*/
public static function invalidClassInDiscriminatorMap($className, $owningClass) {
return new self(
"Entity class '$className' used in the discriminator map of class '$owningClass' ".
"does not exist."
);
}
public static function missingDiscriminatorMap($className)
{
return new self("Entity class '$className' is using inheritance but no discriminator map was defined.");
}
public static function missingDiscriminatorColumn($className)
{
return new self("Entity class '$className' is using inheritance but no discriminator column was defined.");
}
public static function invalidDiscriminatorColumnType($className, $type)
{
return new self("Discriminator column type on entity class '$className' is not allowed to be '$type'. 'string' or 'integer' type variables are suggested!");
}
public static function cannotVersionIdField($className, $fieldName)
{
return new self("Setting Id field '$fieldName' as versionale in entity class '$className' is not supported.");
}
/**
* @param string $className
* @param string $columnName
* @return self
*/
public static function duplicateColumnName($className, $columnName)
{
return new self("Duplicate definition of column '".$columnName."' on entity '".$className."' in a field or discriminator column mapping.");
}
}
|
Java
|
# Try to find hwloc libraries and headers.
#
# Usage of this module:
#
# find_package(hwloc)
#
# Variables defined by this module:
#
# HWLOC_FOUND System has hwloc libraries and headers
# HWLOC_LIBRARIES The hwloc library
# HWLOC_INCLUDE_DIRS The location of HWLOC headers
find_path(
HWLOC_PREFIX
NAMES include/hwloc.h
)
if (NOT HWLOC_PREFIX AND NOT $ENV{HWLOC_BASE} STREQUAL "")
set(HWLOC_PREFIX $ENV{HWLOC_BASE})
endif()
message(STATUS "Searching for hwloc library in path " ${HWLOC_PREFIX})
find_library(
HWLOC_LIBRARIES
NAMES hwloc
HINTS ${HWLOC_PREFIX}/lib
)
find_path(
HWLOC_INCLUDE_DIRS
NAMES hwloc.h
HINTS ${HWLOC_PREFIX}/include
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(
HWLOC DEFAULT_MSG
HWLOC_LIBRARIES
HWLOC_INCLUDE_DIRS
)
mark_as_advanced(
HWLOC_LIBRARIES
HWLOC_INCLUDE_DIRS
)
if (HWLOC_FOUND)
if (NOT $ENV{HWLOC_LIB} STREQUAL "")
# set(HWLOC_LIBRARIES "$ENV{HWLOC_LIB}")
endif()
message(STATUS "hwloc includes: " ${HWLOC_INCLUDE_DIRS})
message(STATUS "hwloc libraries: " ${HWLOC_LIBRARIES})
endif()
|
Java
|
/*
* Copyright (c) 2016, Salesforce.com, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of Salesforce.com nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.salesforce.dva.argus.service.metric.transform;
import com.salesforce.dva.argus.entity.Metric;
import com.salesforce.dva.argus.system.SystemAssert;
import java.util.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* COUNT, GROUP, UNION.
*
* @author rzhang
*/
public class MetricUnionTransform implements Transform {
//~ Static fields/initializers *******************************************************************************************************************
/** The metric name for this transform is result. */
public static final String RESULT_METRIC_NAME = "result";
//~ Instance fields ******************************************************************************************************************************
private final ValueReducer valueUnionReducer;
private final String defaultScope;
private final String defaultMetricName;
//~ Constructors *********************************************************************************************************************************
/**
* Creates a new ReduceTransform object.
*
* @param valueUnionReducer valueReducerOrMapping The valueMapping.
*/
protected MetricUnionTransform(ValueReducer valueUnionReducer) {
this.defaultScope = TransformFactory.Function.UNION.name();
this.defaultMetricName = TransformFactory.DEFAULT_METRIC_NAME;
this.valueUnionReducer = valueUnionReducer;
}
//~ Methods **************************************************************************************************************************************
@Override
public String getResultScopeName() {
return defaultScope;
}
/**
* If constants is not null, apply mapping transform to metrics list. Otherwise, apply reduce transform to metrics list
*
* @param metrics The metrics to transform.
*
* @return The transformed metrics.
*/
@Override
public List<Metric> transform(List<Metric> metrics) {
return union(metrics);
}
/**
* Performs a columnar union of metrics.
*
* @param metrics The metrics to merge.
*
* @return The merged metrics.
*/
public List<Metric> union(List<Metric> metrics) {
SystemAssert.requireArgument(metrics != null, "Cannot transform empty metric/metrics");
if (metrics.isEmpty()) {
return metrics;
}
Metric newMetric = reduce(metrics);
Map<Long, String> reducedDatapoints = newMetric.getDatapoints();
Set<Long> sharedTimestamps = reducedDatapoints.keySet();
Map<Long, String> unionDatapoints = new TreeMap<Long, String>();
for (Metric metric : metrics) {
for (Map.Entry<Long, String> entry : metric.getDatapoints().entrySet()) {
if (!sharedTimestamps.contains(entry.getKey())) {
unionDatapoints.put(entry.getKey(), entry.getValue());
}
}
}
newMetric.addDatapoints(unionDatapoints);
return Arrays.asList(newMetric);
}
/**
* Reduce transform for the list of metrics.
*
* @param metrics The list of metrics to reduce.
*
* @return The reduced metric.
*/
protected Metric reduce(List<Metric> metrics) {
SystemAssert.requireArgument(metrics != null, "Cannot transform empty metric/metrics");
/*
* if (metrics.isEmpty()) { return new Metric(defaultScope, defaultMetricName); }
*/
MetricDistiller distiller = new MetricDistiller();
distiller.distill(metrics);
Map<Long, List<String>> collated = collate(metrics);
Map<Long, String> minDatapoints = reduce(collated, metrics);
String newMetricName = distiller.getMetric() == null ? defaultMetricName : distiller.getMetric();
Metric newMetric = new Metric(defaultScope, newMetricName);
newMetric.setDisplayName(distiller.getDisplayName());
newMetric.setUnits(distiller.getUnits());
newMetric.setTags(distiller.getTags());
newMetric.setDatapoints(minDatapoints);
return newMetric;
}
private Map<Long, List<String>> collate(List<Metric> metrics) {
Map<Long, List<String>> collated = new HashMap<Long, List<String>>();
for (Metric metric : metrics) {
for (Map.Entry<Long, String> point : metric.getDatapoints().entrySet()) {
if (!collated.containsKey(point.getKey())) {
collated.put(point.getKey(), new ArrayList<String>());
}
collated.get(point.getKey()).add(point.getValue());
}
}
return collated;
}
private Map<Long, String> reduce(Map<Long, List<String>> collated, List<Metric> metrics) {
Map<Long, String> reducedDatapoints = new HashMap<>();
for (Map.Entry<Long, List<String>> entry : collated.entrySet()) {
if (entry.getValue().size() < metrics.size()) {
continue;
}
reducedDatapoints.put(entry.getKey(), this.valueUnionReducer.reduce(entry.getValue()));
}
return reducedDatapoints;
}
@Override
public List<Metric> transform(List<Metric> metrics, List<String> constants) {
throw new UnsupportedOperationException("Union transform can't be used with constants!");
}
@Override
public List<Metric> transform(List<Metric>... listOfList) {
throw new UnsupportedOperationException("Union doesn't need list of list");
}
}
/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */
|
Java
|
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "net/url_request/url_fetcher_core.h"
#include <stdint.h>
#include "base/bind.h"
#include "base/logging.h"
#include "base/metrics/histogram.h"
#include "base/profiler/scoped_tracker.h"
#include "base/sequenced_task_runner.h"
#include "base/single_thread_task_runner.h"
#include "base/stl_util.h"
#include "base/thread_task_runner_handle.h"
#include "base/tracked_objects.h"
#include "net/base/elements_upload_data_stream.h"
#include "net/base/io_buffer.h"
#include "net/base/load_flags.h"
#include "net/base/net_errors.h"
#include "net/base/request_priority.h"
#include "net/base/upload_bytes_element_reader.h"
#include "net/base/upload_data_stream.h"
#include "net/base/upload_file_element_reader.h"
#include "net/http/http_response_headers.h"
#include "net/url_request/redirect_info.h"
#include "net/url_request/url_fetcher_delegate.h"
#include "net/url_request/url_fetcher_response_writer.h"
#include "net/url_request/url_request_context.h"
#include "net/url_request/url_request_context_getter.h"
#include "net/url_request/url_request_throttler_manager.h"
namespace {
const int kBufferSize = 4096;
const int kUploadProgressTimerInterval = 100;
bool g_ignore_certificate_requests = false;
void EmptyCompletionCallback(int result) {}
} // namespace
namespace net {
// URLFetcherCore::Registry ---------------------------------------------------
URLFetcherCore::Registry::Registry() {}
URLFetcherCore::Registry::~Registry() {}
void URLFetcherCore::Registry::AddURLFetcherCore(URLFetcherCore* core) {
DCHECK(!ContainsKey(fetchers_, core));
fetchers_.insert(core);
}
void URLFetcherCore::Registry::RemoveURLFetcherCore(URLFetcherCore* core) {
DCHECK(ContainsKey(fetchers_, core));
fetchers_.erase(core);
}
void URLFetcherCore::Registry::CancelAll() {
while (!fetchers_.empty())
(*fetchers_.begin())->CancelURLRequest(ERR_ABORTED);
}
// URLFetcherCore -------------------------------------------------------------
// static
base::LazyInstance<URLFetcherCore::Registry>
URLFetcherCore::g_registry = LAZY_INSTANCE_INITIALIZER;
URLFetcherCore::URLFetcherCore(URLFetcher* fetcher,
const GURL& original_url,
URLFetcher::RequestType request_type,
URLFetcherDelegate* d)
: fetcher_(fetcher),
original_url_(original_url),
request_type_(request_type),
delegate_(d),
delegate_task_runner_(base::ThreadTaskRunnerHandle::Get()),
load_flags_(LOAD_NORMAL),
response_code_(URLFetcher::RESPONSE_CODE_INVALID),
buffer_(new IOBuffer(kBufferSize)),
url_request_data_key_(NULL),
was_fetched_via_proxy_(false),
upload_content_set_(false),
upload_range_offset_(0),
upload_range_length_(0),
referrer_policy_(
URLRequest::CLEAR_REFERRER_ON_TRANSITION_FROM_SECURE_TO_INSECURE),
is_chunked_upload_(false),
was_cancelled_(false),
stop_on_redirect_(false),
stopped_on_redirect_(false),
automatically_retry_on_5xx_(true),
num_retries_on_5xx_(0),
max_retries_on_5xx_(0),
num_retries_on_network_changes_(0),
max_retries_on_network_changes_(0),
current_upload_bytes_(-1),
current_response_bytes_(0),
total_response_bytes_(-1) {
CHECK(original_url_.is_valid());
}
void URLFetcherCore::Start() {
DCHECK(delegate_task_runner_.get());
DCHECK(request_context_getter_.get()) << "We need an URLRequestContext!";
if (network_task_runner_.get()) {
DCHECK_EQ(network_task_runner_,
request_context_getter_->GetNetworkTaskRunner());
} else {
network_task_runner_ = request_context_getter_->GetNetworkTaskRunner();
}
DCHECK(network_task_runner_.get()) << "We need an IO task runner";
network_task_runner_->PostTask(
FROM_HERE, base::Bind(&URLFetcherCore::StartOnIOThread, this));
}
void URLFetcherCore::Stop() {
if (delegate_task_runner_.get()) // May be NULL in tests.
DCHECK(delegate_task_runner_->BelongsToCurrentThread());
delegate_ = NULL;
fetcher_ = NULL;
if (!network_task_runner_.get())
return;
if (network_task_runner_->RunsTasksOnCurrentThread()) {
CancelURLRequest(ERR_ABORTED);
} else {
network_task_runner_->PostTask(
FROM_HERE,
base::Bind(&URLFetcherCore::CancelURLRequest, this, ERR_ABORTED));
}
}
void URLFetcherCore::SetUploadData(const std::string& upload_content_type,
const std::string& upload_content) {
AssertHasNoUploadData();
DCHECK(!is_chunked_upload_);
DCHECK(upload_content_type_.empty());
// Empty |upload_content_type| is allowed iff the |upload_content| is empty.
DCHECK(upload_content.empty() || !upload_content_type.empty());
upload_content_type_ = upload_content_type;
upload_content_ = upload_content;
upload_content_set_ = true;
}
void URLFetcherCore::SetUploadFilePath(
const std::string& upload_content_type,
const base::FilePath& file_path,
uint64 range_offset,
uint64 range_length,
scoped_refptr<base::TaskRunner> file_task_runner) {
AssertHasNoUploadData();
DCHECK(!is_chunked_upload_);
DCHECK_EQ(upload_range_offset_, 0ULL);
DCHECK_EQ(upload_range_length_, 0ULL);
DCHECK(upload_content_type_.empty());
DCHECK(!upload_content_type.empty());
upload_content_type_ = upload_content_type;
upload_file_path_ = file_path;
upload_range_offset_ = range_offset;
upload_range_length_ = range_length;
upload_file_task_runner_ = file_task_runner;
upload_content_set_ = true;
}
void URLFetcherCore::SetUploadStreamFactory(
const std::string& upload_content_type,
const URLFetcher::CreateUploadStreamCallback& factory) {
AssertHasNoUploadData();
DCHECK(!is_chunked_upload_);
DCHECK(upload_content_type_.empty());
upload_content_type_ = upload_content_type;
upload_stream_factory_ = factory;
upload_content_set_ = true;
}
void URLFetcherCore::SetChunkedUpload(const std::string& content_type) {
if (!is_chunked_upload_) {
AssertHasNoUploadData();
DCHECK(upload_content_type_.empty());
}
// Empty |content_type| is not allowed here, because it is impossible
// to ensure non-empty upload content as it is not yet supplied.
DCHECK(!content_type.empty());
upload_content_type_ = content_type;
upload_content_.clear();
is_chunked_upload_ = true;
}
void URLFetcherCore::AppendChunkToUpload(const std::string& content,
bool is_last_chunk) {
DCHECK(delegate_task_runner_.get());
DCHECK(network_task_runner_.get());
network_task_runner_->PostTask(
FROM_HERE,
base::Bind(&URLFetcherCore::CompleteAddingUploadDataChunk, this, content,
is_last_chunk));
}
void URLFetcherCore::SetLoadFlags(int load_flags) {
load_flags_ = load_flags;
}
int URLFetcherCore::GetLoadFlags() const {
return load_flags_;
}
void URLFetcherCore::SetReferrer(const std::string& referrer) {
referrer_ = referrer;
}
void URLFetcherCore::SetReferrerPolicy(
URLRequest::ReferrerPolicy referrer_policy) {
referrer_policy_ = referrer_policy;
}
void URLFetcherCore::SetExtraRequestHeaders(
const std::string& extra_request_headers) {
extra_request_headers_.Clear();
extra_request_headers_.AddHeadersFromString(extra_request_headers);
}
void URLFetcherCore::AddExtraRequestHeader(const std::string& header_line) {
extra_request_headers_.AddHeaderFromString(header_line);
}
void URLFetcherCore::SetRequestContext(
URLRequestContextGetter* request_context_getter) {
DCHECK(!request_context_getter_.get());
DCHECK(request_context_getter);
request_context_getter_ = request_context_getter;
}
void URLFetcherCore::SetFirstPartyForCookies(
const GURL& first_party_for_cookies) {
DCHECK(first_party_for_cookies_.is_empty());
first_party_for_cookies_ = first_party_for_cookies;
}
void URLFetcherCore::SetURLRequestUserData(
const void* key,
const URLFetcher::CreateDataCallback& create_data_callback) {
DCHECK(key);
DCHECK(!create_data_callback.is_null());
url_request_data_key_ = key;
url_request_create_data_callback_ = create_data_callback;
}
void URLFetcherCore::SetStopOnRedirect(bool stop_on_redirect) {
stop_on_redirect_ = stop_on_redirect;
}
void URLFetcherCore::SetAutomaticallyRetryOn5xx(bool retry) {
automatically_retry_on_5xx_ = retry;
}
void URLFetcherCore::SetMaxRetriesOn5xx(int max_retries) {
max_retries_on_5xx_ = max_retries;
}
int URLFetcherCore::GetMaxRetriesOn5xx() const {
return max_retries_on_5xx_;
}
base::TimeDelta URLFetcherCore::GetBackoffDelay() const {
return backoff_delay_;
}
void URLFetcherCore::SetAutomaticallyRetryOnNetworkChanges(int max_retries) {
max_retries_on_network_changes_ = max_retries;
}
void URLFetcherCore::SaveResponseToFileAtPath(
const base::FilePath& file_path,
scoped_refptr<base::SequencedTaskRunner> file_task_runner) {
DCHECK(delegate_task_runner_->BelongsToCurrentThread());
SaveResponseWithWriter(scoped_ptr<URLFetcherResponseWriter>(
new URLFetcherFileWriter(file_task_runner, file_path)));
}
void URLFetcherCore::SaveResponseToTemporaryFile(
scoped_refptr<base::SequencedTaskRunner> file_task_runner) {
DCHECK(delegate_task_runner_->BelongsToCurrentThread());
SaveResponseWithWriter(scoped_ptr<URLFetcherResponseWriter>(
new URLFetcherFileWriter(file_task_runner, base::FilePath())));
}
void URLFetcherCore::SaveResponseWithWriter(
scoped_ptr<URLFetcherResponseWriter> response_writer) {
DCHECK(delegate_task_runner_->BelongsToCurrentThread());
response_writer_ = response_writer.Pass();
}
HttpResponseHeaders* URLFetcherCore::GetResponseHeaders() const {
return response_headers_.get();
}
// TODO(panayiotis): socket_address_ is written in the IO thread,
// if this is accessed in the UI thread, this could result in a race.
// Same for response_headers_ above and was_fetched_via_proxy_ below.
HostPortPair URLFetcherCore::GetSocketAddress() const {
return socket_address_;
}
bool URLFetcherCore::WasFetchedViaProxy() const {
return was_fetched_via_proxy_;
}
const GURL& URLFetcherCore::GetOriginalURL() const {
return original_url_;
}
const GURL& URLFetcherCore::GetURL() const {
return url_;
}
const URLRequestStatus& URLFetcherCore::GetStatus() const {
return status_;
}
int URLFetcherCore::GetResponseCode() const {
return response_code_;
}
const ResponseCookies& URLFetcherCore::GetCookies() const {
return cookies_;
}
void URLFetcherCore::ReceivedContentWasMalformed() {
DCHECK(delegate_task_runner_->BelongsToCurrentThread());
if (network_task_runner_.get()) {
network_task_runner_->PostTask(
FROM_HERE, base::Bind(&URLFetcherCore::NotifyMalformedContent, this));
}
}
bool URLFetcherCore::GetResponseAsString(
std::string* out_response_string) const {
URLFetcherStringWriter* string_writer =
response_writer_ ? response_writer_->AsStringWriter() : NULL;
if (!string_writer)
return false;
*out_response_string = string_writer->data();
UMA_HISTOGRAM_MEMORY_KB("UrlFetcher.StringResponseSize",
(string_writer->data().length() / 1024));
return true;
}
bool URLFetcherCore::GetResponseAsFilePath(bool take_ownership,
base::FilePath* out_response_path) {
DCHECK(delegate_task_runner_->BelongsToCurrentThread());
URLFetcherFileWriter* file_writer =
response_writer_ ? response_writer_->AsFileWriter() : NULL;
if (!file_writer)
return false;
*out_response_path = file_writer->file_path();
if (take_ownership) {
// Intentionally calling a file_writer_ method directly without posting
// the task to network_task_runner_.
//
// This is for correctly handling the case when file_writer_->DisownFile()
// is soon followed by URLFetcherCore::Stop(). We have to make sure that
// DisownFile takes effect before Stop deletes file_writer_.
//
// This direct call should be thread-safe, since DisownFile itself does no
// file operation. It just flips the state to be referred in destruction.
file_writer->DisownFile();
}
return true;
}
void URLFetcherCore::OnReceivedRedirect(URLRequest* request,
const RedirectInfo& redirect_info,
bool* defer_redirect) {
DCHECK_EQ(request, request_.get());
DCHECK(network_task_runner_->BelongsToCurrentThread());
if (stop_on_redirect_) {
stopped_on_redirect_ = true;
url_ = redirect_info.new_url;
response_code_ = request_->GetResponseCode();
was_fetched_via_proxy_ = request_->was_fetched_via_proxy();
request->Cancel();
OnReadCompleted(request, 0);
}
}
void URLFetcherCore::OnResponseStarted(URLRequest* request) {
DCHECK_EQ(request, request_.get());
DCHECK(network_task_runner_->BelongsToCurrentThread());
if (request_->status().is_success()) {
response_code_ = request_->GetResponseCode();
response_headers_ = request_->response_headers();
socket_address_ = request_->GetSocketAddress();
was_fetched_via_proxy_ = request_->was_fetched_via_proxy();
total_response_bytes_ = request_->GetExpectedContentSize();
}
ReadResponse();
}
void URLFetcherCore::OnCertificateRequested(
URLRequest* request,
SSLCertRequestInfo* cert_request_info) {
DCHECK_EQ(request, request_.get());
DCHECK(network_task_runner_->BelongsToCurrentThread());
if (g_ignore_certificate_requests) {
request->ContinueWithCertificate(NULL);
} else {
request->Cancel();
}
}
void URLFetcherCore::OnReadCompleted(URLRequest* request,
int bytes_read) {
DCHECK(request == request_);
DCHECK(network_task_runner_->BelongsToCurrentThread());
if (!stopped_on_redirect_)
url_ = request->url();
URLRequestThrottlerManager* throttler_manager =
request->context()->throttler_manager();
if (throttler_manager)
url_throttler_entry_ = throttler_manager->RegisterRequestUrl(url_);
do {
if (!request_->status().is_success() || bytes_read <= 0)
break;
current_response_bytes_ += bytes_read;
InformDelegateDownloadProgress();
const int result =
WriteBuffer(new DrainableIOBuffer(buffer_.get(), bytes_read));
if (result < 0) {
// Write failed or waiting for write completion.
return;
}
} while (request_->Read(buffer_.get(), kBufferSize, &bytes_read));
const URLRequestStatus status = request_->status();
if (status.is_success())
request_->GetResponseCookies(&cookies_);
// See comments re: HEAD requests in ReadResponse().
if (!status.is_io_pending() || request_type_ == URLFetcher::HEAD) {
status_ = status;
ReleaseRequest();
// No more data to write.
const int result = response_writer_->Finish(
base::Bind(&URLFetcherCore::DidFinishWriting, this));
if (result != ERR_IO_PENDING)
DidFinishWriting(result);
}
}
void URLFetcherCore::CancelAll() {
g_registry.Get().CancelAll();
}
int URLFetcherCore::GetNumFetcherCores() {
return g_registry.Get().size();
}
void URLFetcherCore::SetIgnoreCertificateRequests(bool ignored) {
g_ignore_certificate_requests = ignored;
}
URLFetcherCore::~URLFetcherCore() {
// |request_| should be NULL. If not, it's unsafe to delete it here since we
// may not be on the IO thread.
DCHECK(!request_.get());
}
void URLFetcherCore::StartOnIOThread() {
DCHECK(network_task_runner_->BelongsToCurrentThread());
if (!response_writer_)
response_writer_.reset(new URLFetcherStringWriter);
const int result = response_writer_->Initialize(
base::Bind(&URLFetcherCore::DidInitializeWriter, this));
if (result != ERR_IO_PENDING)
DidInitializeWriter(result);
}
void URLFetcherCore::StartURLRequest() {
DCHECK(network_task_runner_->BelongsToCurrentThread());
if (was_cancelled_) {
// Since StartURLRequest() is posted as a *delayed* task, it may
// run after the URLFetcher was already stopped.
return;
}
DCHECK(request_context_getter_.get());
DCHECK(!request_.get());
g_registry.Get().AddURLFetcherCore(this);
current_response_bytes_ = 0;
request_ = request_context_getter_->GetURLRequestContext()->CreateRequest(
original_url_, DEFAULT_PRIORITY, this);
request_->set_stack_trace(stack_trace_);
int flags = request_->load_flags() | load_flags_;
if (is_chunked_upload_)
request_->EnableChunkedUpload();
request_->SetLoadFlags(flags);
request_->SetReferrer(referrer_);
request_->set_referrer_policy(referrer_policy_);
request_->set_first_party_for_cookies(first_party_for_cookies_.is_empty() ?
original_url_ : first_party_for_cookies_);
if (url_request_data_key_ && !url_request_create_data_callback_.is_null()) {
request_->SetUserData(url_request_data_key_,
url_request_create_data_callback_.Run());
}
switch (request_type_) {
case URLFetcher::GET:
break;
case URLFetcher::POST:
case URLFetcher::PUT:
case URLFetcher::PATCH: {
// Upload content must be set.
DCHECK(is_chunked_upload_ || upload_content_set_);
request_->set_method(
request_type_ == URLFetcher::POST ? "POST" :
request_type_ == URLFetcher::PUT ? "PUT" : "PATCH");
if (!upload_content_type_.empty()) {
extra_request_headers_.SetHeader(HttpRequestHeaders::kContentType,
upload_content_type_);
}
if (!upload_content_.empty()) {
scoped_ptr<UploadElementReader> reader(new UploadBytesElementReader(
upload_content_.data(), upload_content_.size()));
request_->set_upload(
ElementsUploadDataStream::CreateWithReader(reader.Pass(), 0));
} else if (!upload_file_path_.empty()) {
scoped_ptr<UploadElementReader> reader(
new UploadFileElementReader(upload_file_task_runner_.get(),
upload_file_path_,
upload_range_offset_,
upload_range_length_,
base::Time()));
request_->set_upload(
ElementsUploadDataStream::CreateWithReader(reader.Pass(), 0));
} else if (!upload_stream_factory_.is_null()) {
scoped_ptr<UploadDataStream> stream = upload_stream_factory_.Run();
DCHECK(stream);
request_->set_upload(stream.Pass());
}
current_upload_bytes_ = -1;
// TODO(kinaba): http://crbug.com/118103. Implement upload callback in the
// layer and avoid using timer here.
upload_progress_checker_timer_.reset(
new base::RepeatingTimer<URLFetcherCore>());
upload_progress_checker_timer_->Start(
FROM_HERE,
base::TimeDelta::FromMilliseconds(kUploadProgressTimerInterval),
this,
&URLFetcherCore::InformDelegateUploadProgress);
break;
}
case URLFetcher::HEAD:
request_->set_method("HEAD");
break;
case URLFetcher::DELETE_REQUEST:
request_->set_method("DELETE");
break;
default:
NOTREACHED();
}
if (!extra_request_headers_.IsEmpty())
request_->SetExtraRequestHeaders(extra_request_headers_);
request_->Start();
}
void URLFetcherCore::DidInitializeWriter(int result) {
if (result != OK) {
CancelURLRequest(result);
delegate_task_runner_->PostTask(
FROM_HERE,
base::Bind(&URLFetcherCore::InformDelegateFetchIsComplete, this));
return;
}
StartURLRequestWhenAppropriate();
}
void URLFetcherCore::StartURLRequestWhenAppropriate() {
DCHECK(network_task_runner_->BelongsToCurrentThread());
if (was_cancelled_)
return;
DCHECK(request_context_getter_.get());
int64 delay = 0;
if (!original_url_throttler_entry_.get()) {
URLRequestThrottlerManager* manager =
request_context_getter_->GetURLRequestContext()->throttler_manager();
if (manager) {
original_url_throttler_entry_ =
manager->RegisterRequestUrl(original_url_);
}
}
if (original_url_throttler_entry_.get()) {
delay = original_url_throttler_entry_->ReserveSendingTimeForNextRequest(
GetBackoffReleaseTime());
}
if (delay == 0) {
StartURLRequest();
} else {
base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
FROM_HERE, base::Bind(&URLFetcherCore::StartURLRequest, this),
base::TimeDelta::FromMilliseconds(delay));
}
}
void URLFetcherCore::CancelURLRequest(int error) {
DCHECK(network_task_runner_->BelongsToCurrentThread());
if (request_.get()) {
request_->CancelWithError(error);
ReleaseRequest();
}
// Set the error manually.
// Normally, calling URLRequest::CancelWithError() results in calling
// OnReadCompleted() with bytes_read = -1 via an asynchronous task posted by
// URLRequestJob::NotifyDone(). But, because the request was released
// immediately after being canceled, the request could not call
// OnReadCompleted() which overwrites |status_| with the error status.
status_.set_status(URLRequestStatus::CANCELED);
status_.set_error(error);
// Release the reference to the request context. There could be multiple
// references to URLFetcher::Core at this point so it may take a while to
// delete the object, but we cannot delay the destruction of the request
// context.
request_context_getter_ = NULL;
first_party_for_cookies_ = GURL();
url_request_data_key_ = NULL;
url_request_create_data_callback_.Reset();
was_cancelled_ = true;
}
void URLFetcherCore::OnCompletedURLRequest(
base::TimeDelta backoff_delay) {
DCHECK(delegate_task_runner_->BelongsToCurrentThread());
// Save the status and backoff_delay so that delegates can read it.
if (delegate_) {
backoff_delay_ = backoff_delay;
InformDelegateFetchIsComplete();
}
}
void URLFetcherCore::InformDelegateFetchIsComplete() {
DCHECK(delegate_task_runner_->BelongsToCurrentThread());
if (delegate_)
delegate_->OnURLFetchComplete(fetcher_);
}
void URLFetcherCore::NotifyMalformedContent() {
DCHECK(network_task_runner_->BelongsToCurrentThread());
if (url_throttler_entry_.get()) {
int status_code = response_code_;
if (status_code == URLFetcher::RESPONSE_CODE_INVALID) {
// The status code will generally be known by the time clients
// call the |ReceivedContentWasMalformed()| function (which ends up
// calling the current function) but if it's not, we need to assume
// the response was successful so that the total failure count
// used to calculate exponential back-off goes up.
status_code = 200;
}
url_throttler_entry_->ReceivedContentWasMalformed(status_code);
}
}
void URLFetcherCore::DidFinishWriting(int result) {
if (result != OK) {
CancelURLRequest(result);
delegate_task_runner_->PostTask(
FROM_HERE,
base::Bind(&URLFetcherCore::InformDelegateFetchIsComplete, this));
return;
}
// If the file was successfully closed, then the URL request is complete.
RetryOrCompleteUrlFetch();
}
void URLFetcherCore::RetryOrCompleteUrlFetch() {
DCHECK(network_task_runner_->BelongsToCurrentThread());
base::TimeDelta backoff_delay;
// Checks the response from server.
if (response_code_ >= 500 ||
status_.error() == ERR_TEMPORARILY_THROTTLED) {
// When encountering a server error, we will send the request again
// after backoff time.
++num_retries_on_5xx_;
// Note that backoff_delay may be 0 because (a) the
// URLRequestThrottlerManager and related code does not
// necessarily back off on the first error, (b) it only backs off
// on some of the 5xx status codes, (c) not all URLRequestContexts
// have a throttler manager.
base::TimeTicks backoff_release_time = GetBackoffReleaseTime();
backoff_delay = backoff_release_time - base::TimeTicks::Now();
if (backoff_delay < base::TimeDelta())
backoff_delay = base::TimeDelta();
if (automatically_retry_on_5xx_ &&
num_retries_on_5xx_ <= max_retries_on_5xx_) {
StartOnIOThread();
return;
}
} else {
backoff_delay = base::TimeDelta();
}
// Retry if the request failed due to network changes.
if (status_.error() == ERR_NETWORK_CHANGED &&
num_retries_on_network_changes_ < max_retries_on_network_changes_) {
++num_retries_on_network_changes_;
// Retry soon, after flushing all the current tasks which may include
// further network change observers.
network_task_runner_->PostTask(
FROM_HERE, base::Bind(&URLFetcherCore::StartOnIOThread, this));
return;
}
request_context_getter_ = NULL;
first_party_for_cookies_ = GURL();
url_request_data_key_ = NULL;
url_request_create_data_callback_.Reset();
bool posted = delegate_task_runner_->PostTask(
FROM_HERE,
base::Bind(&URLFetcherCore::OnCompletedURLRequest, this, backoff_delay));
// If the delegate message loop does not exist any more, then the delegate
// should be gone too.
DCHECK(posted || !delegate_);
}
void URLFetcherCore::ReleaseRequest() {
upload_progress_checker_timer_.reset();
request_.reset();
g_registry.Get().RemoveURLFetcherCore(this);
}
base::TimeTicks URLFetcherCore::GetBackoffReleaseTime() {
DCHECK(network_task_runner_->BelongsToCurrentThread());
if (!original_url_throttler_entry_.get())
return base::TimeTicks();
base::TimeTicks original_url_backoff =
original_url_throttler_entry_->GetExponentialBackoffReleaseTime();
base::TimeTicks destination_url_backoff;
if (url_throttler_entry_.get() &&
original_url_throttler_entry_.get() != url_throttler_entry_.get()) {
destination_url_backoff =
url_throttler_entry_->GetExponentialBackoffReleaseTime();
}
return original_url_backoff > destination_url_backoff ?
original_url_backoff : destination_url_backoff;
}
void URLFetcherCore::CompleteAddingUploadDataChunk(
const std::string& content, bool is_last_chunk) {
if (was_cancelled_) {
// Since CompleteAddingUploadDataChunk() is posted as a *delayed* task, it
// may run after the URLFetcher was already stopped.
return;
}
DCHECK(is_chunked_upload_);
DCHECK(request_.get());
DCHECK(!content.empty());
request_->AppendChunkToUpload(content.data(),
static_cast<int>(content.length()),
is_last_chunk);
}
int URLFetcherCore::WriteBuffer(scoped_refptr<DrainableIOBuffer> data) {
while (data->BytesRemaining() > 0) {
const int result = response_writer_->Write(
data.get(),
data->BytesRemaining(),
base::Bind(&URLFetcherCore::DidWriteBuffer, this, data));
if (result < 0) {
if (result != ERR_IO_PENDING)
DidWriteBuffer(data, result);
return result;
}
data->DidConsume(result);
}
return OK;
}
void URLFetcherCore::DidWriteBuffer(scoped_refptr<DrainableIOBuffer> data,
int result) {
if (result < 0) { // Handle errors.
CancelURLRequest(result);
response_writer_->Finish(base::Bind(&EmptyCompletionCallback));
delegate_task_runner_->PostTask(
FROM_HERE,
base::Bind(&URLFetcherCore::InformDelegateFetchIsComplete, this));
return;
}
// Continue writing.
data->DidConsume(result);
if (WriteBuffer(data) < 0)
return;
// Finished writing buffer_. Read some more, unless the request has been
// cancelled and deleted.
DCHECK_EQ(0, data->BytesRemaining());
if (request_.get())
ReadResponse();
}
void URLFetcherCore::ReadResponse() {
// Some servers may treat HEAD requests as GET requests. To free up the
// network connection as soon as possible, signal that the request has
// completed immediately, without trying to read any data back (all we care
// about is the response code and headers, which we already have).
int bytes_read = 0;
if (request_->status().is_success() &&
(request_type_ != URLFetcher::HEAD)) {
if (!request_->Read(buffer_.get(), kBufferSize, &bytes_read))
bytes_read = -1; // Match OnReadCompleted() interface contract.
}
OnReadCompleted(request_.get(), bytes_read);
}
void URLFetcherCore::InformDelegateUploadProgress() {
DCHECK(network_task_runner_->BelongsToCurrentThread());
if (request_.get()) {
int64 current = request_->GetUploadProgress().position();
if (current_upload_bytes_ != current) {
current_upload_bytes_ = current;
int64 total = -1;
if (!is_chunked_upload_) {
total = static_cast<int64>(request_->GetUploadProgress().size());
// Total may be zero if the UploadDataStream::Init has not been called
// yet. Don't send the upload progress until the size is initialized.
if (!total)
return;
}
delegate_task_runner_->PostTask(
FROM_HERE,
base::Bind(
&URLFetcherCore::InformDelegateUploadProgressInDelegateThread,
this, current, total));
}
}
}
void URLFetcherCore::InformDelegateUploadProgressInDelegateThread(
int64 current, int64 total) {
DCHECK(delegate_task_runner_->BelongsToCurrentThread());
if (delegate_)
delegate_->OnURLFetchUploadProgress(fetcher_, current, total);
}
void URLFetcherCore::InformDelegateDownloadProgress() {
DCHECK(network_task_runner_->BelongsToCurrentThread());
// TODO(pkasting): Remove ScopedTracker below once crbug.com/455952 is fixed.
tracked_objects::ScopedTracker tracking_profile2(
FROM_HERE_WITH_EXPLICIT_FUNCTION(
"455952 delegate_task_runner_->PostTask()"));
delegate_task_runner_->PostTask(
FROM_HERE,
base::Bind(
&URLFetcherCore::InformDelegateDownloadProgressInDelegateThread,
this, current_response_bytes_, total_response_bytes_));
}
void URLFetcherCore::InformDelegateDownloadProgressInDelegateThread(
int64 current, int64 total) {
DCHECK(delegate_task_runner_->BelongsToCurrentThread());
if (delegate_)
delegate_->OnURLFetchDownloadProgress(fetcher_, current, total);
}
void URLFetcherCore::AssertHasNoUploadData() const {
DCHECK(!upload_content_set_);
DCHECK(upload_content_.empty());
DCHECK(upload_file_path_.empty());
DCHECK(upload_stream_factory_.is_null());
}
} // namespace net
|
Java
|
<?php
class SortWeightRegistry {
public static $override_default_sort = true;
public static $relations = array();
public static $default_sorts = array(); // original default_sort
public static $add_weight_columns = array();
public static $direction = 'ASC'; // ASC || DESC
public static $module_path;
public static function set_module_path($directory)
{
self::$module_path = $directory;
}
public static function decorate($class, $relationName = null) {
if(!isset(self::$relations[$class]))
{
self::$relations[$class] = array();
}
// if relationName is false, enable the sorting on object iteslf (skip SortWeight map)
if(!class_exists($class) || !$sng = new $class())
{
user_error('Unknown class passed (' . $class .')', E_USER_WARNING);
}
elseif($relationName === null )
{
user_error('You must provide the Component to order for ' . $class, E_USER_WARNING);
}
elseif(!$sng->hasMethod($relationName) || !$component = $sng->$relationName())
{
user_error('Component "' . $relationName . '" must exist on ' . $class,E_USER_WARNING);
}
elseif(isset(self::$relations[$class][$relationName]))
{
user_error('Component "' . $relationName . '" already decorates ' . $class,E_USER_WARNING);
}
else
{
$relationClass = ($component->is_a('ComponentSet')) ?
$component->childClass : $component->class;
self::$relations[$class][$relationName] = $relationClass;
$current_sort = Object::get_static($relationClass, 'default_sort');
if(self::$override_default_sort || empty($current_sort))
{
Object::set_static($relationClass,'default_sort','[SortWeight]');
if($current_sort != '[SortWeight]')
{
self::$default_sorts[$relationClass] = $current_sort;
}
}
if(!Object::has_extension($relationClass,'SortWeightDecoration'))
{
Object::add_extension($relationClass,'SortWeightDecoration');
}
return;
}
return user_error('SortWeight decoration failed for ' . __CLASS__ . '::' . __FUNCTION__ . "(\"$class\",\"$relationName\")",E_USER_WARNING);
}
}
|
Java
|
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef UI_GFX_MOJOM_RRECT_F_MOJOM_TRAITS_H_
#define UI_GFX_MOJOM_RRECT_F_MOJOM_TRAITS_H_
#include "ui/gfx/geometry/mojom/geometry_mojom_traits.h"
#include "ui/gfx/mojom/rrect_f.mojom-shared.h"
#include "ui/gfx/rrect_f.h"
#include "ui/gfx/rrect_f_builder.h"
namespace mojo {
namespace {
gfx::mojom::RRectFType GfxRRectFTypeToMojo(gfx::RRectF::Type type) {
switch (type) {
case gfx::RRectF::Type::kEmpty:
return gfx::mojom::RRectFType::kEmpty;
case gfx::RRectF::Type::kRect:
return gfx::mojom::RRectFType::kRect;
case gfx::RRectF::Type::kSingle:
return gfx::mojom::RRectFType::kSingle;
case gfx::RRectF::Type::kSimple:
return gfx::mojom::RRectFType::kSimple;
case gfx::RRectF::Type::kOval:
return gfx::mojom::RRectFType::kOval;
case gfx::RRectF::Type::kComplex:
return gfx::mojom::RRectFType::kComplex;
}
NOTREACHED();
return gfx::mojom::RRectFType::kEmpty;
}
gfx::RRectF::Type MojoRRectFTypeToGfx(gfx::mojom::RRectFType type) {
switch (type) {
case gfx::mojom::RRectFType::kEmpty:
return gfx::RRectF::Type::kEmpty;
case gfx::mojom::RRectFType::kRect:
return gfx::RRectF::Type::kRect;
case gfx::mojom::RRectFType::kSingle:
return gfx::RRectF::Type::kSingle;
case gfx::mojom::RRectFType::kSimple:
return gfx::RRectF::Type::kSimple;
case gfx::mojom::RRectFType::kOval:
return gfx::RRectF::Type::kOval;
case gfx::mojom::RRectFType::kComplex:
return gfx::RRectF::Type::kComplex;
}
NOTREACHED();
return gfx::RRectF::Type::kEmpty;
}
} // namespace
template <>
struct StructTraits<gfx::mojom::RRectFDataView, gfx::RRectF> {
static gfx::mojom::RRectFType type(const gfx::RRectF& input) {
return GfxRRectFTypeToMojo(input.GetType());
}
static gfx::RectF rect(const gfx::RRectF& input) { return input.rect(); }
static gfx::Vector2dF upper_left(const gfx::RRectF& input) {
return input.GetCornerRadii(gfx::RRectF::Corner::kUpperLeft);
}
static gfx::Vector2dF upper_right(const gfx::RRectF& input) {
return input.GetCornerRadii(gfx::RRectF::Corner::kUpperRight);
}
static gfx::Vector2dF lower_right(const gfx::RRectF& input) {
return input.GetCornerRadii(gfx::RRectF::Corner::kLowerRight);
}
static gfx::Vector2dF lower_left(const gfx::RRectF& input) {
return input.GetCornerRadii(gfx::RRectF::Corner::kLowerLeft);
}
static bool Read(gfx::mojom::RRectFDataView data, gfx::RRectF* out) {
gfx::RRectF::Type type(MojoRRectFTypeToGfx(data.type()));
gfx::RectF rect;
if (!data.ReadRect(&rect))
return false;
if (type <= gfx::RRectF::Type::kRect) {
*out = gfx::RRectFBuilder().set_rect(rect).Build();
return true;
}
gfx::Vector2dF upper_left;
if (!data.ReadUpperLeft(&upper_left))
return false;
if (type <= gfx::RRectF::Type::kSimple) {
*out = gfx::RRectFBuilder()
.set_rect(rect)
.set_radius(upper_left.x(), upper_left.y())
.Build();
return true;
}
gfx::Vector2dF upper_right;
gfx::Vector2dF lower_right;
gfx::Vector2dF lower_left;
if (!data.ReadUpperRight(&upper_right) ||
!data.ReadLowerRight(&lower_right) ||
!data.ReadLowerLeft(&lower_left)) {
return false;
}
*out = gfx::RRectFBuilder()
.set_rect(rect)
.set_upper_left(upper_left.x(), upper_left.y())
.set_upper_right(upper_right.x(), upper_right.y())
.set_lower_right(lower_right.x(), lower_right.y())
.set_lower_left(lower_left.x(), lower_left.y())
.Build();
return true;
}
};
} // namespace mojo
#endif // UI_GFX_MOJOM_RRECT_F_MOJOM_TRAITS_H_
|
Java
|
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/password_manager/core/browser/sql_table_builder.h"
#include <algorithm>
#include <set>
#include <utility>
#include "base/numerics/safe_conversions.h"
#include "base/stl_util.h"
#include "base/strings/string_util.h"
#include "base/strings/stringprintf.h"
#include "sql/database.h"
#include "sql/transaction.h"
namespace password_manager {
namespace {
// Appends |name| to |list_of_names|, separating items with ", ".
void Append(const std::string& name, std::string* list_of_names) {
if (list_of_names->empty())
*list_of_names = name;
else
*list_of_names += ", " + name;
}
} // namespace
// static
unsigned constexpr SQLTableBuilder::kInvalidVersion;
struct SQLTableBuilder::Column {
std::string name;
std::string type;
// Whether this column is the table's PRIMARY KEY.
bool is_primary_key;
// Whether this column is part of the table's UNIQUE constraint.
bool part_of_unique_key;
// The first version this column is part of.
unsigned min_version;
// The last version this column is part of. The value of kInvalidVersion
// means that it is part of all versions since |min_version|.
unsigned max_version;
// Renaming of a column is stored as a sequence of one removed and one added
// column in |columns_|. To distinguish it from an unrelated removal and
// addition, the following bit is set to true for the added columns which
// are part of renaming. Those columns will get the data of their
// predecessors. If the bit is false, the column will be filled with the
// default value on creation.
bool gets_previous_data;
};
struct SQLTableBuilder::Index {
// The name of this index.
std::string name;
// The names of columns this index is built from.
std::vector<std::string> columns;
// The first version this index is part of.
unsigned min_version;
// The last version this index is part of. The value of kInvalidVersion
// means that it is part of all versions since |min_version|.
unsigned max_version;
};
SQLTableBuilder::SQLTableBuilder(const std::string& table_name)
: table_name_(table_name) {}
SQLTableBuilder::~SQLTableBuilder() = default;
void SQLTableBuilder::AddColumn(std::string name, std::string type) {
DCHECK(FindLastColumnByName(name) == columns_.rend());
columns_.push_back({std::move(name), std::move(type), false, false,
sealed_version_ + 1, kInvalidVersion, false});
}
void SQLTableBuilder::AddPrimaryKeyColumn(std::string name) {
for (const Column& column : columns_) {
DCHECK(!column.is_primary_key);
}
AddColumn(std::move(name), "INTEGER");
columns_.back().is_primary_key = true;
}
void SQLTableBuilder::AddColumnToUniqueKey(std::string name, std::string type) {
AddColumn(std::move(name), std::move(type));
columns_.back().part_of_unique_key = true;
}
void SQLTableBuilder::RenameColumn(const std::string& old_name,
const std::string& new_name) {
auto old_column = FindLastColumnByName(old_name);
DCHECK(old_column != columns_.rend());
if (old_name == new_name) // The easy case.
return;
DCHECK(FindLastColumnByName(new_name) == columns_.rend());
// Check there is no index in the current version that references |old_name|.
DCHECK(std::none_of(indices_.begin(), indices_.end(),
[&old_name](const Index& index) {
return index.max_version == kInvalidVersion &&
base::Contains(index.columns, old_name);
}));
if (sealed_version_ != kInvalidVersion &&
old_column->min_version <= sealed_version_) {
// This column exists in the last sealed version. Therefore it cannot be
// just replaced, it needs to be kept for generating the migration code.
Column new_column = {new_name,
old_column->type,
old_column->is_primary_key,
old_column->part_of_unique_key,
sealed_version_ + 1,
kInvalidVersion,
true};
old_column->max_version = sealed_version_;
auto past_old =
old_column.base(); // Points one element after |old_column|.
columns_.insert(past_old, std::move(new_column));
} else {
// This column was just introduced in the currently unsealed version. To
// rename it, it is enough just to modify the entry in columns_.
old_column->name = new_name;
}
}
// Removes column |name|. |name| must have been added in the past.
void SQLTableBuilder::DropColumn(const std::string& name) {
auto column = FindLastColumnByName(name);
DCHECK(column != columns_.rend());
// Check there is no index in the current version that references |old_name|.
DCHECK(std::none_of(indices_.begin(), indices_.end(),
[&name](const Index& index) {
return index.max_version == kInvalidVersion &&
base::Contains(index.columns, name);
}));
if (sealed_version_ != kInvalidVersion &&
column->min_version <= sealed_version_) {
// This column exists in the last sealed version. Therefore it cannot be
// just deleted, it needs to be kept for generating the migration code.
column->max_version = sealed_version_;
} else {
// This column was just introduced in the currently unsealed version. It
// can be just erased from |columns_|.
columns_.erase(
--(column.base())); // base() points one element after |column|.
}
}
void SQLTableBuilder::AddIndex(std::string name,
std::vector<std::string> columns) {
DCHECK(!columns.empty());
// Check if all entries of |columns| are unique.
DCHECK_EQ(std::set<std::string>(columns.begin(), columns.end()).size(),
columns.size());
// |name| must not have been added before.
DCHECK(FindLastIndexByName(name) == indices_.rend());
// Check that all referenced columns are present in the last version by making
// sure that the inner predicate applies to all columns names in |columns|.
DCHECK(std::all_of(
columns.begin(), columns.end(), [this](const std::string& column_name) {
// Check if there is any column with the required name which is also
// present in the latest version. Note that we don't require the last
// version to be sealed.
return std::any_of(columns_.begin(), columns_.end(),
[&column_name](const Column& column) {
return column.name == column_name &&
column.max_version == kInvalidVersion;
});
}));
indices_.push_back({std::move(name), std::move(columns), sealed_version_ + 1,
kInvalidVersion});
}
std::string SQLTableBuilder::ComputeConstraints(unsigned version) const {
std::string unique_key;
for (const Column& column : columns_) {
// Ignore dropped columns.
if (column.max_version < version)
continue;
// Ignore columns columns from future versions.
if (column.min_version > version)
continue;
if (column.part_of_unique_key)
Append(column.name, &unique_key);
}
std::string constraints;
if (!unique_key.empty())
Append("UNIQUE (" + unique_key + ")", &constraints);
return constraints;
}
unsigned SQLTableBuilder::SealVersion() {
return ++sealed_version_;
}
bool SQLTableBuilder::MigrateFrom(
unsigned old_version,
sql::Database* db,
const base::RepeatingCallback<bool(sql::Database*, unsigned)>&
post_migration_step_callback) const {
for (; old_version < sealed_version_; ++old_version) {
if (!MigrateToNextFrom(old_version, db))
return false;
if (post_migration_step_callback &&
!post_migration_step_callback.Run(db, old_version + 1))
return false;
}
return true;
}
bool SQLTableBuilder::CreateTable(sql::Database* db) const {
DCHECK(IsVersionLastAndSealed(sealed_version_));
if (db->DoesTableExist(table_name_.c_str()))
return true;
std::string constraints = ComputeConstraints(sealed_version_);
DCHECK(!constraints.empty() || std::any_of(columns_.begin(), columns_.end(),
[](const Column& column) {
return column.is_primary_key;
}));
std::string names; // Names and types of the current columns.
for (const Column& column : columns_) {
if (IsColumnInLastVersion(column)) {
std::string suffix;
if (column.is_primary_key)
suffix = " PRIMARY KEY AUTOINCREMENT";
Append(column.name + " " + column.type + suffix, &names);
}
}
std::vector<std::string>
create_index_sqls; // CREATE INDEX statements for the current indices.
for (const Index& index : indices_) {
if (IsIndexInLastVersion(index)) {
create_index_sqls.push_back(base::StringPrintf(
"CREATE INDEX %s ON %s (%s)", index.name.c_str(), table_name_.c_str(),
base::JoinString(index.columns, ", ").c_str()));
}
}
std::string create_table_statement =
constraints.empty()
? base::StringPrintf("CREATE TABLE %s (%s)", table_name_.c_str(),
names.c_str())
: base::StringPrintf("CREATE TABLE %s (%s, %s)", table_name_.c_str(),
names.c_str(), constraints.c_str());
sql::Transaction transaction(db);
return transaction.Begin() && db->Execute(create_table_statement.c_str()) &&
std::all_of(create_index_sqls.begin(), create_index_sqls.end(),
[&db](const std::string& sql) {
return db->Execute(sql.c_str());
}) &&
transaction.Commit();
}
std::string SQLTableBuilder::ListAllColumnNames() const {
DCHECK(IsVersionLastAndSealed(sealed_version_));
std::string result;
for (const Column& column : columns_) {
if (IsColumnInLastVersion(column))
Append(column.name, &result);
}
return result;
}
std::string SQLTableBuilder::ListAllNonuniqueKeyNames() const {
DCHECK(IsVersionLastAndSealed(sealed_version_));
std::string result;
for (const Column& column : columns_) {
if (IsColumnInLastVersion(column) &&
!(column.is_primary_key || column.part_of_unique_key))
Append(column.name + "=?", &result);
}
return result;
}
std::string SQLTableBuilder::ListAllUniqueKeyNames() const {
DCHECK(IsVersionLastAndSealed(sealed_version_));
std::string result;
for (const Column& column : columns_) {
if (IsColumnInLastVersion(column) && column.part_of_unique_key) {
if (!result.empty())
result += " AND ";
result += column.name + "=?";
}
}
return result;
}
std::vector<base::StringPiece> SQLTableBuilder::AllPrimaryKeyNames() const {
DCHECK(IsVersionLastAndSealed(sealed_version_));
std::vector<base::StringPiece> result;
result.reserve(columns_.size());
for (const Column& column : columns_) {
if (IsColumnInLastVersion(column) && column.is_primary_key) {
result.emplace_back(column.name);
}
}
DCHECK(result.size() < 2);
return result;
}
size_t SQLTableBuilder::NumberOfColumns() const {
DCHECK(IsVersionLastAndSealed(sealed_version_));
return base::checked_cast<size_t>(std::count_if(
columns_.begin(), columns_.end(),
[this](const Column& column) { return IsColumnInLastVersion(column); }));
}
bool SQLTableBuilder::MigrateToNextFrom(unsigned old_version,
sql::Database* db) const {
DCHECK_LT(old_version, sealed_version_);
DCHECK_GE(old_version, 0u);
DCHECK(IsVersionLastAndSealed(sealed_version_));
// Names of columns from old version, values of which are copied. This
// contains only the names without their types.
std::string old_names_of_existing_columns_without_types;
// Names of columns in new version, except for added ones.
std::string new_names_of_existing_columns;
// Names of columns in new version, except for added ones. This contains only
// the names without their types.
std::string new_names_of_existing_columns_without_types;
std::vector<std::string>
names_of_new_columns_list; // Names of added columns.
// A temporary table will be needed if some columns are dropped or renamed,
// because that is not supported by a single SQLite command.
bool needs_temp_table = false;
bool has_primary_key = false;
for (auto column = columns_.begin(); column != columns_.end(); ++column) {
if (column->max_version == old_version) {
// This column was deleted after |old_version|. It can have two reasons:
needs_temp_table = true;
auto next_column = std::next(column);
if (next_column != columns_.end() && next_column->gets_previous_data) {
// (1) The column is being renamed.
DCHECK_EQ(column->type, next_column->type);
DCHECK_NE(column->name, next_column->name);
Append(column->name, &old_names_of_existing_columns_without_types);
Append(next_column->name + " " + next_column->type,
&new_names_of_existing_columns);
Append(next_column->name, &new_names_of_existing_columns_without_types);
++column; // Avoid processing next_column in the next loop.
} else {
// (2) The column is being dropped.
}
} else if (column->min_version == old_version + 1) {
// This column was added after old_version.
if (column->is_primary_key || column->part_of_unique_key)
needs_temp_table = true;
std::string suffix;
if (column->is_primary_key) {
suffix = " PRIMARY KEY AUTOINCREMENT";
has_primary_key = true;
}
names_of_new_columns_list.push_back(column->name + " " + column->type +
suffix);
} else if (column->min_version <= old_version &&
(column->max_version == kInvalidVersion ||
column->max_version > old_version)) {
std::string suffix;
if (column->is_primary_key) {
suffix = " PRIMARY KEY AUTOINCREMENT";
has_primary_key = true;
}
// This column stays.
Append(column->name, &old_names_of_existing_columns_without_types);
Append(column->name + " " + column->type + suffix,
&new_names_of_existing_columns);
Append(column->name, &new_names_of_existing_columns_without_types);
}
}
if (old_names_of_existing_columns_without_types.empty()) {
// Table didn't exist in this version, and nothing to migrate.
return true;
}
if (needs_temp_table) {
// Following the instructions from
// https://www.sqlite.org/lang_altertable.html#otheralter, this code works
// around the fact that SQLite does not allow dropping or renaming
// columns. Instead, a new table is constructed, with the new column
// names, and data from all but dropped columns from the current table are
// copied into it. After that, the new table is renamed to the current
// one.
std::string constraints = ComputeConstraints(old_version + 1);
DCHECK(has_primary_key || !constraints.empty());
// Foreign key constraints are not enabled for the login database, so no
// PRAGMA foreign_keys=off needed.
const std::string temp_table_name = "temp_" + table_name_;
std::string names_of_all_columns = new_names_of_existing_columns;
for (const std::string& new_column : names_of_new_columns_list) {
Append(new_column, &names_of_all_columns);
}
std::string create_table_statement =
constraints.empty()
? base::StringPrintf("CREATE TABLE %s (%s)",
temp_table_name.c_str(),
names_of_all_columns.c_str())
: base::StringPrintf(
"CREATE TABLE %s (%s, %s)", temp_table_name.c_str(),
names_of_all_columns.c_str(), constraints.c_str());
sql::Transaction transaction(db);
if (!(transaction.Begin() && db->Execute(create_table_statement.c_str()) &&
db->Execute(base::StringPrintf(
"INSERT OR REPLACE INTO %s (%s) SELECT %s FROM %s",
temp_table_name.c_str(),
new_names_of_existing_columns_without_types.c_str(),
old_names_of_existing_columns_without_types.c_str(),
table_name_.c_str())
.c_str()) &&
db->Execute(base::StringPrintf("DROP TABLE %s", table_name_.c_str())
.c_str()) &&
db->Execute(base::StringPrintf("ALTER TABLE %s RENAME TO %s",
temp_table_name.c_str(),
table_name_.c_str())
.c_str()) &&
transaction.Commit())) {
return false;
}
} else if (!names_of_new_columns_list.empty()) {
// If no new table has been created, we need to add the new columns here if
// any.
sql::Transaction transaction(db);
if (!(transaction.Begin() &&
std::all_of(names_of_new_columns_list.begin(),
names_of_new_columns_list.end(),
[this, &db](const std::string& name) {
return db->Execute(
base::StringPrintf("ALTER TABLE %s ADD COLUMN %s",
table_name_.c_str(),
name.c_str())
.c_str());
}) &&
transaction.Commit())) {
return false;
}
}
return MigrateIndicesToNextFrom(old_version, db);
}
bool SQLTableBuilder::MigrateIndicesToNextFrom(unsigned old_version,
sql::Database* db) const {
DCHECK_LT(old_version, sealed_version_);
DCHECK(IsVersionLastAndSealed(sealed_version_));
sql::Transaction transaction(db);
if (!transaction.Begin())
return false;
for (const auto& index : indices_) {
std::string sql;
if (index.max_version <= old_version) {
// Index is not supposed to exist in the new version.
sql = base::StringPrintf("DROP INDEX IF EXISTS %s", index.name.c_str());
} else if (index.min_version <= old_version + 1) {
// Index is supposed to exist in the new version.
sql = base::StringPrintf("CREATE INDEX IF NOT EXISTS %s ON %s (%s)",
index.name.c_str(), table_name_.c_str(),
base::JoinString(index.columns, ", ").c_str());
} else {
continue;
}
if (!db->Execute(sql.c_str()))
return false;
}
return transaction.Commit();
}
std::vector<SQLTableBuilder::Column>::reverse_iterator
SQLTableBuilder::FindLastColumnByName(const std::string& name) {
return std::find_if(
columns_.rbegin(), columns_.rend(),
[&name](const Column& column) { return name == column.name; });
}
std::vector<SQLTableBuilder::Index>::reverse_iterator
SQLTableBuilder::FindLastIndexByName(const std::string& name) {
return std::find_if(
indices_.rbegin(), indices_.rend(),
[&name](const Index& index) { return name == index.name; });
}
bool SQLTableBuilder::IsVersionLastAndSealed(unsigned version) const {
// Is |version| the last sealed one?
if (sealed_version_ != version)
return false;
// Is the current version the last sealed one? In other words, is there
// neither a column or index added past the sealed version (min_version >
// sealed) nor deleted one version after the sealed (max_version == sealed)?
return std::none_of(columns_.begin(), columns_.end(),
[this](const Column& column) {
return column.min_version > sealed_version_ ||
column.max_version == sealed_version_;
}) &&
std::none_of(indices_.begin(), indices_.end(),
[this](const Index& index) {
return index.min_version > sealed_version_ ||
index.max_version == sealed_version_;
});
}
bool SQLTableBuilder::IsColumnInLastVersion(const Column& column) const {
DCHECK(IsVersionLastAndSealed(sealed_version_));
return (column.min_version <= sealed_version_ &&
(column.max_version == kInvalidVersion ||
column.max_version >= sealed_version_));
}
bool SQLTableBuilder::IsIndexInLastVersion(const Index& index) const {
DCHECK(IsVersionLastAndSealed(sealed_version_));
return (index.min_version <= sealed_version_ &&
(index.max_version == kInvalidVersion ||
index.max_version >= sealed_version_));
}
} // namespace password_manager
|
Java
|
# Copyright (c) 2017 David Sorokin <david.sorokin@gmail.com>
#
# Licensed under BSD3. See the LICENSE.txt file in the root of this distribution.
from simulation.aivika.modeler.model import *
from simulation.aivika.modeler.port import *
from simulation.aivika.modeler.stream import *
from simulation.aivika.modeler.data_type import *
from simulation.aivika.modeler.pdf import *
def uniform_random_stream(transact_type, min_delay, max_delay):
"""Return a new stream of transacts with random delays distributed uniformly."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomUniformStream ' + str(min_delay) + ' ' + str(max_delay)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def uniform_int_random_stream(transact_type, min_delay, max_delay):
"""Return a new stream of transacts with integer random delays distributed uniformly."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomUniformIntStream ' + str(min_delay) + ' ' + str(max_delay)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def triangular_random_stream(transact_type, min_delay, median_delay, max_delay):
"""Return a new stream of transacts with random delays having the triangular distribution."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomTriangularStream ' + str(min_delay) + ' ' + str(median_delay) + ' ' + str(max_delay)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def normal_random_stream(transact_type, mean_delay, delay_deviation):
"""Return a new stream of transacts with random delays having the normal distribution."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomNormalStream ' + str(mean_delay) + ' ' + str(delay_deviation)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def lognormal_random_stream(transact_type, normal_mean_delay, normal_delay_deviation):
"""Return a new stream of transacts with random delays having the lognormal distribution.
The numerical parameters are related to the normal distribution that
this distribution is derived from.
"""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomLogNormalStream ' + str(normal_mean_delay) + ' ' + str(normal_delay_deviation)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def exponential_random_stream(transact_type, mean_delay):
"""Return a new stream of transacts with random delays having the exponential distribution with the specified mean (a reciprocal of the rate)."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomExponentialStream ' + str(mean_delay)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def erlang_random_stream(transact_type, scale, shape):
"""Return a new stream of transacts with random delays having the Erlang distribution with the specified scale (a reciprocal of the rate) and shape parameters."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomErlangStream ' + str(scale) + ' ' + str(shape)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def poisson_random_stream(transact_type, mean_delay):
"""Return a new stream of transacts with random delays having the Poisson distribution with the specified mean."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomPoissonStream ' + str(mean_delay)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def binomial_random_stream(transact_type, probability, trials):
"""Return a new stream of transacts with random delays having the binomial distribution with the specified probability and trials."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomBinomialStream ' + str(probability) + ' ' + str(trials)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def gamma_random_stream(transact_type, shape, scale):
"""Return a new stream of transacts with random delays having the Gamma distribution by the specified shape and scale."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomGammaStream ' + str(shape) + ' ' + str(scale)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def beta_random_stream(transact_type, alpha, beta):
"""Return a new stream of transacts with random delays having the Beta distribution by the specified shape parameters (alpha and beta)."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomBetaStream ' + str(alpha) + ' ' + str(beta)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def weibull_random_stream(transact_type, shape, scale):
"""Return a new stream of transacts with random delays having the Weibull distribution by the specified shape and scale."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomWeibullStream ' + str(shape) + ' ' + str(scale)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def discrete_random_stream(transact_type, pdf):
"""Return a new stream of transacts with random delays having the discrete distribution by the specified probability density function."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomDiscreteStream ' + encode_pdf(pdf)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
|
Java
|
# apis_v1/documentation_source/voter_star_on_save_doc.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
def voter_star_on_save_doc_template_values(url_root):
"""
Show documentation about voterStarOnSave
"""
required_query_parameter_list = [
{
'name': 'api_key',
'value': 'string (from post, cookie, or get (in that order))', # boolean, integer, long, string
'description': 'The unique key provided to any organization using the WeVoteServer APIs',
},
{
'name': 'voter_device_id',
'value': 'string', # boolean, integer, long, string
'description': 'An 88 character unique identifier linked to a voter record on the server',
},
{
'name': 'kind_of_ballot_item',
'value': 'string', # boolean, integer, long, string
'description': 'What is the type of ballot item for which we are saving the \'on\' status? '
'(kind_of_ballot_item is either "OFFICE", "CANDIDATE", "POLITICIAN" or "MEASURE")',
},
{
'name': 'ballot_item_id',
'value': 'integer', # boolean, integer, long, string
'description': 'The unique internal identifier for this ballot_item '
'(either ballot_item_id OR ballot_item_we_vote_id required -- not both. '
'If it exists, ballot_item_id is used instead of ballot_item_we_vote_id)',
},
{
'name': 'ballot_item_we_vote_id',
'value': 'string', # boolean, integer, long, string
'description': 'The unique identifier for this ballot_item across all networks '
'(either ballot_item_id OR ballot_item_we_vote_id required -- not both. '
'NOTE: In the future we might support other identifiers used in the industry.',
},
]
optional_query_parameter_list = [
]
potential_status_codes_list = [
{
'code': 'VALID_VOTER_DEVICE_ID_MISSING',
'description': 'Cannot proceed. A valid voter_device_id parameter was not included.',
},
{
'code': 'VALID_VOTER_ID_MISSING',
'description': 'Cannot proceed. Missing voter_id while trying to save.',
},
{
'code': 'STAR_ON_OFFICE CREATE/UPDATE ITEM_STARRED',
'description': '',
},
{
'code': 'STAR_ON_CANDIDATE CREATE/UPDATE ITEM_STARRED',
'description': '',
},
{
'code': 'STAR_ON_MEASURE CREATE/UPDATE ITEM_STARRED',
'description': '',
},
]
try_now_link_variables_dict = {
'kind_of_ballot_item': 'CANDIDATE',
'ballot_item_id': '5655',
}
api_response = '{\n' \
' "status": string (description of what happened),\n' \
' "success": boolean (did the save happen?),\n' \
' "ballot_item_id": integer,\n' \
' "ballot_item_we_vote_id": string,\n' \
' "kind_of_ballot_item": string (CANDIDATE, MEASURE),\n' \
'}'
template_values = {
'api_name': 'voterStarOnSave',
'api_slug': 'voterStarOnSave',
'api_introduction':
"Save or create private 'star on' state for the current voter for a measure, an office or candidate.",
'try_now_link': 'apis_v1:voterStarOnSaveView',
'try_now_link_variables_dict': try_now_link_variables_dict,
'url_root': url_root,
'get_or_post': 'GET',
'required_query_parameter_list': required_query_parameter_list,
'optional_query_parameter_list': optional_query_parameter_list,
'api_response': api_response,
'api_response_notes':
"",
'potential_status_codes_list': potential_status_codes_list,
}
return template_values
|
Java
|
define(['../Property', '../Model', 'dojo/_base/declare', 'json-schema/lib/validate'],
function (Property, Model, declare, jsonSchemaValidator) {
// module:
// dstore/extensions/JsonSchema
// summary:
// This module generates a dstore schema from a JSON Schema to enabled validation of objects
// and property changes with JSON Schema
return function (jsonSchema) {
// create the schema that can be used by dstore/Model
var modelSchema = {};
var properties = jsonSchema.properties || jsonSchema;
// the validation function, this can be used for all the properties
function checkForErrors() {
var value = this.valueOf();
var key = this.name;
// get the current value and test it against the property's definition
var validation = jsonSchemaValidator.validate(value, properties[key]);
// set any errors
var errors = validation.errors;
if (errors) {
// assign the property names to the errors
for (var i = 0; i < errors.length; i++) {
errors[i].property = key;
}
}
return errors;
}
// iterate through the schema properties, creating property validators
for (var i in properties) {
var jsDefinition = properties[i];
var definition = modelSchema[i] = new Property({
checkForErrors: checkForErrors
});
if (typeof jsDefinition.type === 'string') {
// copy the type so it can be used for coercion
definition.type = jsDefinition.type;
}
if (typeof jsDefinition['default'] === 'string') {
// and copy the default
definition['default'] = jsDefinition['default'];
}
}
return declare(Model, {
schema: modelSchema
});
};
});
|
Java
|
using System;
using System.Collections.Generic;
using System.Linq;
using MonoTouch.Foundation;
using MonoTouch.UIKit;
namespace SqliteExpressionsTest.iOS
{
public class Application
{
// This is the main entry point of the application.
static void Main(string[] args)
{
// if you want to use a different Application Delegate class from "AppDelegate"
// you can specify it here.
UIApplication.Main(args, null, "AppDelegate");
}
}
}
|
Java
|
<?php
/**
* Zend Framework (http://framework.zend.com/)
*
* @link http://github.com/zendframework/ZendSkeletonApplication for the canonical source repository
* @copyright Copyright (c) 2005-2013 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
namespace Competitions;
use Zend\Mvc\ModuleRouteListener;
use Zend\Mvc\MvcEvent;
class Module
{
public function onBootstrap(MvcEvent $e)
{
$e->getApplication()->getServiceManager()->get('translator');
$eventManager = $e->getApplication()->getEventManager();
$moduleRouteListener = new ModuleRouteListener();
$moduleRouteListener->attach($eventManager);
}
public function getConfig()
{
return include __DIR__ . '/config/module.config.php';
}
public function getAutoloaderConfig()
{
return array(
'Zend\Loader\StandardAutoloader' => array(
'namespaces' => array(
__NAMESPACE__ => __DIR__ . '/src/' . __NAMESPACE__,
),
),
);
}
}
|
Java
|
#include "stdafx.h"
#include "CGGuildApply.h"
BOOL CGGuildApply::Read( SocketInputStream& iStream )
{
__ENTER_FUNCTION
iStream.Read( (CHAR*)(&m_GuildNameSize), sizeof(BYTE) );
if(m_GuildNameSize<MAX_GUILD_NAME_SIZE)
{
iStream.Read( (CHAR*)(m_GuildName), m_GuildNameSize );
}
iStream.Read( (CHAR*)(&m_GuildDescSize), sizeof(BYTE) );
if(m_GuildDescSize<MAX_GUILD_DESC_SIZE)
{
iStream.Read( (CHAR*)(m_GuildDesc), m_GuildDescSize);
}
return TRUE;
__LEAVE_FUNCTION
return FALSE;
}
BOOL CGGuildApply::Write( SocketOutputStream& oStream ) const
{
__ENTER_FUNCTION
oStream.Write( (CHAR*)(&m_GuildNameSize), sizeof(BYTE) );
if(m_GuildNameSize<MAX_GUILD_NAME_SIZE)
{
oStream.Write( (CHAR*)(m_GuildName), m_GuildNameSize );
}
oStream.Write( (CHAR*)(&m_GuildDescSize), sizeof(BYTE) );
if(m_GuildDescSize<MAX_GUILD_DESC_SIZE)
{
oStream.Write( (CHAR*)(m_GuildDesc), m_GuildDescSize);
}
return TRUE;
__LEAVE_FUNCTION
return FALSE;
}
UINT CGGuildApply::Execute( Player* pPlayer )
{
__ENTER_FUNCTION
return CGGuildApplyHandler::Execute( this, pPlayer );
__LEAVE_FUNCTION
return FALSE;
}
|
Java
|
<?php namespace Laravella\Ravel\Facades;
Class Facade
{
protected static $resolvedInstance;
/**
* Get the registered name of the component.
*
* @return string
*/
protected static function getFacadeAccessor()
{
throw new \RuntimeException("Facade does not implement getFacadeAccessor method.");
}
/**
* Resolve the facade root instance from the container.
*
* @param string $name
* @return mixed
*/
protected static function resolveFacadeInstance($name)
{
if (is_object($name)) return $name;
if (isset(static::$resolvedInstance[$name]))
{
return static::$resolvedInstance[$name];
}
$NP = ucfirst($name);
$classInstance = "Laravella\\Ravel\\$NP\\$NP";
return static::$resolvedInstance[$name] = new $classInstance;
}
public static function resolveMethod($method, $args)
{
$instance = static::resolveFacadeInstance(static::getFacadeAccessor());
return call_user_func_array(array($instance, $method), $args);
}
public static function __callStatic($method, $args)
{
return static::resolveMethod($method, $args);
}
public function __call($method, $args)
{
return static::resolveMethod($method, $args);
}
}
|
Java
|
import numpy as np
from nose.tools import (assert_true, assert_false, assert_equal,
assert_almost_equal)
from numpy.testing import (assert_array_equal, assert_array_almost_equal,
assert_)
from dipy.sims.voxel import (_check_directions, SingleTensor, MultiTensor,
multi_tensor_odf, all_tensor_evecs, add_noise,
single_tensor, sticks_and_ball, multi_tensor_dki,
kurtosis_element, DKI_signal)
from dipy.core.geometry import (vec2vec_rotmat, sphere2cart)
from dipy.data import get_data, get_sphere
from dipy.core.gradients import gradient_table
from dipy.io.gradients import read_bvals_bvecs
fimg, fbvals, fbvecs = get_data('small_64D')
bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs)
gtab = gradient_table(bvals, bvecs)
# 2 shells for techniques that requires multishell data
bvals_2s = np.concatenate((bvals, bvals * 2), axis=0)
bvecs_2s = np.concatenate((bvecs, bvecs), axis=0)
gtab_2s = gradient_table(bvals_2s, bvecs_2s)
def diff2eigenvectors(dx, dy, dz):
""" numerical derivatives 2 eigenvectors
"""
u = np.array([dx, dy, dz])
u = u / np.linalg.norm(u)
R = vec2vec_rotmat(basis[:, 0], u)
eig0 = u
eig1 = np.dot(R, basis[:, 1])
eig2 = np.dot(R, basis[:, 2])
eigs = np.zeros((3, 3))
eigs[:, 0] = eig0
eigs[:, 1] = eig1
eigs[:, 2] = eig2
return eigs, R
def test_check_directions():
# Testing spherical angles for two principal coordinate axis
angles = [(0, 0)] # axis z
sticks = _check_directions(angles)
assert_array_almost_equal(sticks, [[0, 0, 1]])
angles = [(0, 90)] # axis z again (phi can be anything it theta is zero)
sticks = _check_directions(angles)
assert_array_almost_equal(sticks, [[0, 0, 1]])
angles = [(90, 0)] # axis x
sticks = _check_directions(angles)
assert_array_almost_equal(sticks, [[1, 0, 0]])
# Testing if directions are already given in cartesian coordinates
angles = [(0, 0, 1)]
sticks = _check_directions(angles)
assert_array_almost_equal(sticks, [[0, 0, 1]])
# Testing more than one direction simultaneously
angles = np.array([[90, 0], [30, 0]])
sticks = _check_directions(angles)
ref_vec = [np.sin(np.pi*30/180), 0, np.cos(np.pi*30/180)]
assert_array_almost_equal(sticks, [[1, 0, 0], ref_vec])
# Testing directions not aligned to planes x = 0, y = 0, or z = 0
the1 = 0
phi1 = 90
the2 = 30
phi2 = 45
angles = np.array([(the1, phi1), (the2, phi2)])
sticks = _check_directions(angles)
ref_vec1 = (np.sin(np.pi*the1/180) * np.cos(np.pi*phi1/180),
np.sin(np.pi*the1/180) * np.sin(np.pi*phi1/180),
np.cos(np.pi*the1/180))
ref_vec2 = (np.sin(np.pi*the2/180) * np.cos(np.pi*phi2/180),
np.sin(np.pi*the2/180) * np.sin(np.pi*phi2/180),
np.cos(np.pi*the2/180))
assert_array_almost_equal(sticks, [ref_vec1, ref_vec2])
def test_sticks_and_ball():
d = 0.0015
S, sticks = sticks_and_ball(gtab, d=d, S0=1, angles=[(0, 0), ],
fractions=[100], snr=None)
assert_array_equal(sticks, [[0, 0, 1]])
S_st = SingleTensor(gtab, 1, evals=[d, 0, 0], evecs=[[0, 0, 0],
[0, 0, 0],
[1, 0, 0]])
assert_array_almost_equal(S, S_st)
def test_single_tensor():
evals = np.array([1.4, .35, .35]) * 10 ** (-3)
evecs = np.eye(3)
S = SingleTensor(gtab, 100, evals, evecs, snr=None)
assert_array_almost_equal(S[gtab.b0s_mask], 100)
assert_(np.mean(S[~gtab.b0s_mask]) < 100)
from dipy.reconst.dti import TensorModel
m = TensorModel(gtab)
t = m.fit(S)
assert_array_almost_equal(t.fa, 0.707, decimal=3)
def test_multi_tensor():
sphere = get_sphere('symmetric724')
vertices = sphere.vertices
mevals = np.array(([0.0015, 0.0003, 0.0003],
[0.0015, 0.0003, 0.0003]))
e0 = np.array([np.sqrt(2) / 2., np.sqrt(2) / 2., 0])
e1 = np.array([0, np.sqrt(2) / 2., np.sqrt(2) / 2.])
mevecs = [all_tensor_evecs(e0), all_tensor_evecs(e1)]
# odf = multi_tensor_odf(vertices, [0.5, 0.5], mevals, mevecs)
# assert_(odf.shape == (len(vertices),))
# assert_(np.all(odf <= 1) & np.all(odf >= 0))
fimg, fbvals, fbvecs = get_data('small_101D')
bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs)
gtab = gradient_table(bvals, bvecs)
s1 = single_tensor(gtab, 100, mevals[0], mevecs[0], snr=None)
s2 = single_tensor(gtab, 100, mevals[1], mevecs[1], snr=None)
Ssingle = 0.5*s1 + 0.5*s2
S, sticks = MultiTensor(gtab, mevals, S0=100, angles=[(90, 45), (45, 90)],
fractions=[50, 50], snr=None)
assert_array_almost_equal(S, Ssingle)
def test_snr():
np.random.seed(1978)
s = single_tensor(gtab)
# For reasonably large SNR, var(signal) ~= sigma**2, where sigma = 1/SNR
for snr in [5, 10, 20]:
sigma = 1.0 / snr
for j in range(1000):
s_noise = add_noise(s, snr, 1, noise_type='rician')
assert_array_almost_equal(np.var(s_noise - s), sigma ** 2, decimal=2)
def test_all_tensor_evecs():
e0 = np.array([1/np.sqrt(2), 1/np.sqrt(2), 0])
desired = np.array([[1/np.sqrt(2), 1/np.sqrt(2), 0],
[-1/np.sqrt(2), 1/np.sqrt(2), 0],
[0, 0, 1]]).T
assert_array_almost_equal(all_tensor_evecs(e0), desired)
def test_kurtosis_elements():
""" Testing symmetry of the elements of the KT
As an 4th order tensor, KT has 81 elements. However, due to diffusion
symmetry the KT is fully characterized by 15 independent elements. This
test checks for this property.
"""
# two fiber not aligned to planes x = 0, y = 0, or z = 0
mevals = np.array([[0.00099, 0, 0], [0.00226, 0.00087, 0.00087],
[0.00099, 0, 0], [0.00226, 0.00087, 0.00087]])
angles = [(80, 10), (80, 10), (20, 30), (20, 30)]
fie = 0.49 # intra axonal water fraction
frac = [fie * 50, (1-fie) * 50, fie * 50, (1-fie) * 50]
sticks = _check_directions(angles)
mD = np.zeros((len(frac), 3, 3))
for i in range(len(frac)):
R = all_tensor_evecs(sticks[i])
mD[i] = np.dot(np.dot(R, np.diag(mevals[i])), R.T)
# compute global DT
D = np.zeros((3, 3))
for i in range(len(frac)):
D = D + frac[i]*mD[i]
# compute voxel's MD
MD = (D[0][0] + D[1][1] + D[2][2]) / 3
# Reference dictionary with the 15 independent elements.
# Note: The multiplication of the indexes (i+1) * (j+1) * (k+1) * (l+1)
# for of an elements is only equal to this multiplication for another
# element if an only if the element corresponds to an symmetry element.
# Thus indexes multiplication is used as key of the reference dictionary
kt_ref = {1: kurtosis_element(mD, frac, 0, 0, 0, 0),
16: kurtosis_element(mD, frac, 1, 1, 1, 1),
81: kurtosis_element(mD, frac, 2, 2, 2, 2),
2: kurtosis_element(mD, frac, 0, 0, 0, 1),
3: kurtosis_element(mD, frac, 0, 0, 0, 2),
8: kurtosis_element(mD, frac, 0, 1, 1, 1),
24: kurtosis_element(mD, frac, 1, 1, 1, 2),
27: kurtosis_element(mD, frac, 0, 2, 2, 2),
54: kurtosis_element(mD, frac, 1, 2, 2, 2),
4: kurtosis_element(mD, frac, 0, 0, 1, 1),
9: kurtosis_element(mD, frac, 0, 0, 2, 2),
36: kurtosis_element(mD, frac, 1, 1, 2, 2),
6: kurtosis_element(mD, frac, 0, 0, 1, 2),
12: kurtosis_element(mD, frac, 0, 1, 1, 2),
18: kurtosis_element(mD, frac, 0, 1, 2, 2)}
# Testing all 81 possible elements
xyz = [0, 1, 2]
for i in xyz:
for j in xyz:
for k in xyz:
for l in xyz:
key = (i+1) * (j+1) * (k+1) * (l+1)
assert_almost_equal(kurtosis_element(mD, frac, i, k, j, l),
kt_ref[key])
# Testing optional funtion inputs
assert_almost_equal(kurtosis_element(mD, frac, i, k, j, l),
kurtosis_element(mD, frac, i, k, j, l,
D, MD))
def test_DKI_simulations_aligned_fibers():
"""
Testing DKI simulations when aligning the same fiber to different axis.
If biological parameters don't change, kt[0] of a fiber aligned to axis x
has to be equal to kt[1] of a fiber aligned to the axis y and equal to
kt[2] of a fiber aligned to axis z. The same is applicable for dt
"""
# Defining parameters based on Neto Henriques et al., 2015. NeuroImage 111
mevals = np.array([[0.00099, 0, 0], # Intra-cellular
[0.00226, 0.00087, 0.00087]]) # Extra-cellular
frac = [49, 51] # Compartment volume fraction
# axis x
angles = [(90, 0), (90, 0)]
signal_fx, dt_fx, kt_fx = multi_tensor_dki(gtab_2s, mevals, angles=angles,
fractions=frac)
# axis y
angles = [(90, 90), (90, 90)]
signal_fy, dt_fy, kt_fy = multi_tensor_dki(gtab_2s, mevals, angles=angles,
fractions=frac)
# axis z
angles = [(0, 0), (0, 0)]
signal_fz, dt_fz, kt_fz = multi_tensor_dki(gtab_2s, mevals, angles=angles,
fractions=frac)
assert_array_equal([kt_fx[0], kt_fx[1], kt_fx[2]],
[kt_fy[1], kt_fy[0], kt_fy[2]])
assert_array_equal([kt_fx[0], kt_fx[1], kt_fx[2]],
[kt_fz[2], kt_fz[0], kt_fz[1]])
assert_array_equal([dt_fx[0], dt_fx[2], dt_fx[5]],
[dt_fy[2], dt_fy[0], dt_fy[5]])
assert_array_equal([dt_fx[0], dt_fx[2], dt_fx[5]],
[dt_fz[5], dt_fz[0], dt_fz[2]])
# testing S signal along axis x, y and z
bvals = np.array([0, 0, 0, 1000, 1000, 1000, 2000, 2000, 2000])
bvecs = np.asarray([[1, 0, 0], [0, 1, 0], [0, 0, 1],
[1, 0, 0], [0, 1, 0], [0, 0, 1],
[1, 0, 0], [0, 1, 0], [0, 0, 1]])
gtab_axis = gradient_table(bvals, bvecs)
# axis x
S_fx = DKI_signal(gtab_axis, dt_fx, kt_fx, S0=100)
assert_array_almost_equal(S_fx[0:3], [100, 100, 100]) # test S f0r b=0
# axis y
S_fy = DKI_signal(gtab_axis, dt_fy, kt_fy, S0=100)
assert_array_almost_equal(S_fy[0:3], [100, 100, 100]) # test S f0r b=0
# axis z
S_fz = DKI_signal(gtab_axis, dt_fz, kt_fz, S0=100)
assert_array_almost_equal(S_fz[0:3], [100, 100, 100]) # test S f0r b=0
# test S for b = 1000
assert_array_almost_equal([S_fx[3], S_fx[4], S_fx[5]],
[S_fy[4], S_fy[3], S_fy[5]])
assert_array_almost_equal([S_fx[3], S_fx[4], S_fx[5]],
[S_fz[5], S_fz[3], S_fz[4]])
# test S for b = 2000
assert_array_almost_equal([S_fx[6], S_fx[7], S_fx[8]],
[S_fy[7], S_fy[6], S_fy[8]])
assert_array_almost_equal([S_fx[6], S_fx[7], S_fx[8]],
[S_fz[8], S_fz[6], S_fz[7]])
def test_DKI_crossing_fibers_simulations():
""" Testing DKI simulations of a crossing fiber
"""
# two fiber not aligned to planes x = 0, y = 0, or z = 0
mevals = np.array([[0.00099, 0, 0], [0.00226, 0.00087, 0.00087],
[0.00099, 0, 0], [0.00226, 0.00087, 0.00087]])
angles = [(80, 10), (80, 10), (20, 30), (20, 30)]
fie = 0.49
frac = [fie*50, (1 - fie)*50, fie*50, (1 - fie)*50]
signal, dt, kt = multi_tensor_dki(gtab_2s, mevals, angles=angles,
fractions=frac, snr=None)
# in this simulations dt and kt cannot have zero elements
for i in range(len(dt)):
assert dt[i] != 0
for i in range(len(kt)):
assert kt[i] != 0
# test S, dt and kt relative to the expected values computed from another
# DKI package - UDKI (Neto Henriques et al., 2015)
dt_ref = [1.0576161e-3, 0.1292542e-3, 0.4786179e-3,
0.2667081e-3, 0.1136643e-3, 0.9888660e-3]
kt_ref = [2.3529944, 0.8226448, 2.3011221, 0.2017312, -0.0437535,
0.0404011, 0.0355281, 0.2449859, 0.2157668, 0.3495910,
0.0413366, 0.3461519, -0.0537046, 0.0133414, -0.017441]
assert_array_almost_equal(dt, dt_ref)
assert_array_almost_equal(kt, kt_ref)
assert_array_almost_equal(signal,
DKI_signal(gtab_2s, dt_ref, kt_ref, S0=100,
snr=None),
decimal=5)
if __name__ == "__main__":
test_multi_tensor()
|
Java
|
import Ember from 'ember';
import ajax from 'ic-ajax';
import config from '../config/environment';
import SlydApi from '../utils/slyd-api';
import Timer from '../utils/timer';
import ApplicationUtils from '../mixins/application-utils';
var UUID = Ember.Object.extend(ApplicationUtils, {});
export function initialize(container, application) {
application.deferReadiness();
var hash = {};
hash.type = 'GET';
hash.url = (config.SLYD_URL || window.location.protocol + '//' +
window.location.host) + '/server_capabilities';
ajax(hash).then(function(settings) {
this.set('serverCapabilities', settings['capabilities']);
this.set('serverCustomization', settings['custom']);
container.register('api:capabilities',
Ember.Object.create().setProperties(application.get('serverCapabilities')),
{ instantiate: false });
container.register('app:custom',
Ember.Object.create().setProperties(application.get('serverCustomization')),
{ instantiate: false });
var api = new SlydApi();
api.set('username', settings.username);
api.set('sessionid', new UUID().shortGuid());
api.set('serverCapabilities', container.lookup('api:capabilities'));
api.set('timer', new Timer());
container.register('api:slyd', api, { instantiate: false });
application.inject('route', 'slyd', 'api:slyd');
application.inject('adapter', 'slyd', 'api:slyd');
application.inject('controller', 'slyd', 'api:slyd');
application.inject('component', 'slyd', 'api:slyd');
application.inject('controller', 'customizations', 'app:custom');
application.inject('component', 'customizations', 'app:custom');
application.inject('controller', 'capabilities', 'api:capabilities');
application.inject('route', 'capabilities', 'api:capabilities');
this.advanceReadiness();
}.bind(application));
}
export default {
name: 'register-api',
initialize: initialize
};
|
Java
|
# License: BSD 3 clause <https://opensource.org/licenses/BSD-3-Clause>
# Copyright (c) 2016, Fabricio Vargas Matos <fabriciovargasmatos@gmail.com>
# All rights reserved.
''''
Tune the 3 most promissing algorithms and compare them
'''
# Load libraries
import os
import time
import pandas
import numpy
import matplotlib.pyplot as plt
from pandas.tools.plotting import scatter_matrix
from pandas import DataFrame
from sklearn.preprocessing import StandardScaler
from sklearn.preprocessing import MinMaxScaler
from sklearn import cross_validation
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score
from sklearn.pipeline import Pipeline
from sklearn.grid_search import GridSearchCV
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.naive_bayes import GaussianNB
from sklearn.svm import SVC
from sklearn.ensemble import AdaBoostClassifier
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.datasets import load_digits
from sklearn.model_selection import GridSearchCV
from sklearn.decomposition import PCA, NMF
from sklearn.feature_selection import SelectKBest, chi2
import lib.eda1 as eda1
import lib.eda3 as eda3
#constants
N_DIGITS = 3
NUM_FOLDS = 10
RAND_SEED = 7
SCORING = 'accuracy'
VALIDATION_SIZE = 0.20
N_JOBS = 6
#global variables
start = time.clock()
imageidx = 1
createImages = True
results = []
names = []
params = []
bestResults = []
# RandomForestClassifier
def tuneRF(X_train, Y_train, outputPath):
global results, names, params, bestResults
print 'tune LR (Random Forest Classifier)'
pipeline = Pipeline([('PCA', PCA()),('MinMaxScaler', MinMaxScaler(feature_range=(0, 1))),('Scaler', StandardScaler())])
scaler = pipeline.fit(X_train)
rescaledX = scaler.transform(X_train)
#tune para meters
# http://scikit-learn.org/stable/modules/generated/sklearn.ensemble.RandomForestClassifier.html
#n_estimators_values = [5, 10, 100, 1000, 3000]
n_estimators_values = [1000]
max_features_values = [0.1, 'auto', 'sqrt', 'log2', None] # (float)0.1=>10%
criterion_values = ['gini', 'entropy']
param_grid = dict(n_estimators=n_estimators_values, max_features=max_features_values, criterion=criterion_values)
model = RandomForestClassifier()
kfold = cross_validation.KFold(n=len(X_train), n_folds=NUM_FOLDS, random_state=RAND_SEED)
grid = GridSearchCV(n_jobs=N_JOBS, verbose=10, estimator=model, param_grid=param_grid, scoring=SCORING, cv=kfold)
grid_result = grid.fit(rescaledX, Y_train)
print("Best: %f using %s" % (grid_result.best_score_, grid_result.best_params_))
best_idx = grid_result.best_index_
#TODO: check it out if 'mean_test_score' is really what I want here
cv_results = grid_result.cv_results_['mean_test_score']
results.append(cv_results)
grid_scores = sorted(grid_result.grid_scores_, key=lambda x: x[2].mean(), reverse=True)
first = True
for param, mean_score, scores in grid_scores:
if first:
bestResults.append({'name':'RF', 'mean':scores.mean(), 'std':scores.std(), 'params':param})
first = False
print("%f (%f) with: %r" % (scores.mean(), scores.std(), param))
# ExtraTreesClassifier
def tuneET(X_train, Y_train, outputPath):
global results, names, params, bestResults
print 'tune ET (Extra Trees Classifier)'
pipeline = Pipeline([('PCA', PCA()),('MinMaxScaler', MinMaxScaler(feature_range=(0, 1))),('Scaler', StandardScaler())])
scaler = pipeline.fit(X_train)
rescaledX = scaler.transform(X_train)
#tune para meters
# http://scikit-learn.org/stable/modules/generated/sklearn.ensemble.ExtraTreesClassifier.html
#n_estimators_values = [5, 10, 100, 1000, 3000]
n_estimators_values = [1000]
max_features_values = [0.1, 'auto', 'sqrt', 'log2', None] # (float)0.1=>10%
criterion_values = ['gini', 'entropy']
param_grid = dict(n_estimators=n_estimators_values, max_features=max_features_values, criterion=criterion_values)
model = ExtraTreesClassifier()
kfold = cross_validation.KFold(n=len(X_train), n_folds=NUM_FOLDS, random_state=RAND_SEED)
grid = GridSearchCV(n_jobs=N_JOBS, verbose=10, estimator=model, param_grid=param_grid, scoring=SCORING, cv=kfold)
grid_result = grid.fit(rescaledX, Y_train)
print("Best: %f using %s" % (grid_result.best_score_, grid_result.best_params_))
best_idx = grid_result.best_index_
#TODO: check it out if 'mean_test_score' is really what a want here
cv_results = grid_result.cv_results_['mean_test_score']
results.append(cv_results)
grid_scores = sorted(grid_result.grid_scores_, key=lambda x: x[2].mean(), reverse=True)
first = True
for param, mean_score, scores in grid_scores:
if first:
bestResults.append({'name':'ET', 'mean':scores.mean(), 'std':scores.std(), 'params':param})
first = False
print("%f (%f) with: %r" % (scores.mean(), scores.std(), param))
# Tune scaled SVM
def tuneSVM(X_train, Y_train, outputPath):
global results, names, params, bestResults
print 'tune SVM (Support Vector Machines Classifier)'
pipeline = Pipeline([('PCA', PCA()),('MinMaxScaler', MinMaxScaler(feature_range=(0, 1))),('Scaler', StandardScaler())])
scaler = pipeline.fit(X_train)
rescaledX = scaler.transform(X_train)
#c_values = [0.1, 1.0, 100.0, 10000.0, 100000.0]
c_values = [10000.0, 100000.0]
kernel_values = ['linear', 'poly', 'rbf', 'sigmoid']
param_grid = dict(C=c_values, kernel=kernel_values)
model = SVC()
kfold = cross_validation.KFold(n=len(X_train), n_folds=NUM_FOLDS, random_state=RAND_SEED)
grid = GridSearchCV(n_jobs=N_JOBS, verbose=10, estimator=model, param_grid=param_grid, scoring=SCORING, cv=kfold)
grid_result = grid.fit(rescaledX, Y_train)
print("Best: %f using %s" % (grid_result.best_score_, grid_result.best_params_))
best_idx = grid_result.best_index_
#TODO: check it out if 'mean_test_score' is really what a want here
cv_results = grid_result.cv_results_['mean_test_score']
results.append(cv_results)
grid_scores = sorted(grid_result.grid_scores_, key=lambda x: x[2].mean(), reverse=True)
first = True
for param, mean_score, scores in grid_scores:
if first:
bestResults.append({'name':'SVM', 'mean':scores.mean(), 'std':scores.std(), 'params':param})
first = False
print("%f (%f) with: %r" % (scores.mean(), scores.std(), param))
def drawTunedAlgorithmsComparison(results, names, outputPath):
global imageidx
print '\n === Tuned Algorithms Comparison ===\n'
#print bestResults
for x in bestResults:
print x
# Compare Algorithms
if (createImages):
fig = plt.figure()
fig.suptitle('Final Tuned-Algorithms Comparison')
ax = fig.add_subplot(111)
plt.boxplot(results)
ax.set_xticklabels(names)
#plt.show()
plt.savefig(outputPath + str(imageidx).zfill(N_DIGITS) + '-Tuned-Algorithm-Comparison.png')
imageidx += 1
plt.close('all')
def set_createImages(value):
global createImages
createImages = value
# ===================================================
# ================== main function ==================
# ===================================================
def run(inputFilePath, outputPath, createImagesFlag, dropColumns):
global start
print '####################################################################'
print '############### Running Exploratory Data Analysis #4 ###############'
print '####################################################################'
print ''
set_createImages(createImagesFlag)
start = time.clock()
eda1.reset_imageidx()
eda1.set_createImages(createImagesFlag)
if not os.path.exists(outputPath):
os.makedirs(outputPath)
# Load dataset
dataframe = eda1.loadDataframe(inputFilePath)
# drop out 'not fair' features
dataframe = eda1.dataCleansing(dataframe, dropColumns)
#Split-out train/validation dataset
X_train, X_validation, Y_train, Y_validation = eda1.splitoutValidationDataset(dataframe)
'''
# tune each algorithm
try:
tuneRF(X_train, Y_train, outputPath)
except Exception as e:
print "ERROR: couldn't tune RF"
print "Message: %s" % str(e)
try:
tuneET(X_train, Y_train, outputPath)
except Exception as e:
print "ERROR: couldn't tune ET"
print "Message: %s" % str(e)
'''
try:
tuneSVM(X_train, Y_train, outputPath)
except Exception as e:
print "ERROR: couldn't tune SVM"
print "Message: %s" % str(e)
#print the results comparing the algorithms with the best tune for each one
drawTunedAlgorithmsComparison(results, names, outputPath)
print '\n<<< THEN END - Running Exploratory Data Analysis #4 >>>'
#RF - Best: 0.853451 using {'max_features': 'log2', 'n_estimators': 1000, 'criterion': 'gini'}
#ET - Best: 0.855320 using {'max_features': None, 'n_estimators': 1000, 'criterion': 'gini'}
|
Java
|
/* $KAME: fsm.h,v 1.2 2005/05/25 01:49:24 keiichi Exp $ */
/*
* Copyright (C) 2004 WIDE Project. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the project nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE PROJECT AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE PROJECT OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#ifndef _SHISAD_FSM_H_
#define _SHISAD_FSM_H_
/* states for the primary fsm. */
#define MIP6_BUL_REG_FSM_STATE_IDLE 0
#define MIP6_BUL_REG_FSM_STATE_RRINIT 1
#define MIP6_BUL_REG_FSM_STATE_RRREDO 2
#define MIP6_BUL_REG_FSM_STATE_RRDEL 3
#define MIP6_BUL_REG_FSM_STATE_WAITA 4
#define MIP6_BUL_REG_FSM_STATE_WAITAR 5
#define MIP6_BUL_REG_FSM_STATE_WAITD 6
#define MIP6_BUL_REG_FSM_STATE_BOUND 7
#define MIP6_BUL_REG_FSM_STATE_DHAAD 8
/* states for the secondary fsm. */
#define MIP6_BUL_RR_FSM_STATE_START 0
#define MIP6_BUL_RR_FSM_STATE_WAITHC 1
#define MIP6_BUL_RR_FSM_STATE_WAITH 2
#define MIP6_BUL_RR_FSM_STATE_WAITC 3
#define MIP6_BUL_IS_RR_FSM_RUNNING(bul) \
(((bul)->bul_rr_fsm_state == MIP6_BUL_RR_FSM_STATE_WAITHC) \
|| ((bul)->bul_rr_fsm_state == MIP6_BUL_RR_FSM_STATE_WAITH) \
|| ((bul)->bul_rr_fsm_state == MIP6_BUL_RR_FSM_STATE_WAITC))
/* events for the registration fsm. */
#define MIP6_BUL_FSM_EVENT_MOVEMENT 0
#define MIP6_BUL_FSM_EVENT_RETURNING_HOME 1
#define MIP6_BUL_FSM_EVENT_REVERSE_PACKET 2
#define MIP6_BUL_FSM_EVENT_RR_DONE 3
#define MIP6_BUL_FSM_EVENT_RR_FAILED 4
#define MIP6_BUL_FSM_EVENT_BRR 5
#define MIP6_BUL_FSM_EVENT_BACK 6
#define MIP6_BUL_FSM_EVENT_REGISTERED 7
#define MIP6_BUL_FSM_EVENT_DEREGISTERED 8
#define MIP6_BUL_FSM_EVENT_UNKNOWN_HAO 9
#define MIP6_BUL_FSM_EVENT_UNKNOWN_MH 10
#define MIP6_BUL_FSM_EVENT_ICMP6_PARAM_PROB 11
#define MIP6_BUL_FSM_EVENT_EXPIRE_TIMER 12
#define MIP6_BUL_FSM_EVENT_DHAAD_REPLY 13
#define MIP6_BUL_IS_REG_FSM_EVENT(ev) \
(((ev) >= 0) \
&& ((ev) <= MIP6_BUL_FSM_EVENT_DHAAD_REPLY))
/* events for the rr fsm. */
#define MIP6_BUL_FSM_EVENT_START_RR 20
#define MIP6_BUL_FSM_EVENT_START_HOME_RR 21
#define MIP6_BUL_FSM_EVENT_STOP_RR 22
#define MIP6_BUL_FSM_EVENT_HOT 23
#define MIP6_BUL_FSM_EVENT_COT 24
#define MIP6_BUL_IS_RR_FSM_EVENT(ev) \
(((ev) >= MIP6_BUL_FSM_EVENT_START_RR) \
&& (((ev) <= MIP6_BUL_FSM_EVENT_COT)))
/* timeout events */
#define MIP6_BUL_FSM_EVENT_RETRANS_TIMER 30
struct fsm_message {
struct in6_addr *fsmm_src;
struct in6_addr *fsmm_dst;
struct in6_addr *fsmm_hoa;
struct in6_addr *fsmm_rtaddr;
void *fsmm_data;
size_t fsmm_datalen;
};
int bul_kick_fsm_by_mh(struct in6_addr *, struct in6_addr *, struct in6_addr *,
struct in6_addr *, struct ip6_mh *, int);
int bul_kick_fsm(struct binding_update_list *, int, struct fsm_message *);
void bul_retrans_timer(void *);
void bul_expire_timer(void *);
#endif /* !_SHISAD_FSM_H_ */
|
Java
|
import numpy as np
from scipy.linalg import norm
from .base import AppearanceLucasKanade
class SimultaneousForwardAdditive(AppearanceLucasKanade):
@property
def algorithm(self):
return 'Simultaneous-FA'
def _fit(self, lk_fitting, max_iters=20, project=True):
# Initial error > eps
error = self.eps + 1
image = lk_fitting.image
lk_fitting.weights = []
n_iters = 0
# Number of shape weights
n_params = self.transform.n_parameters
# Initial appearance weights
if project:
# Obtained weights by projection
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
weights = self.appearance_model.project(IWxp)
# Reset template
self.template = self.appearance_model.instance(weights)
else:
# Set all weights to 0 (yielding the mean)
weights = np.zeros(self.appearance_model.n_active_components)
lk_fitting.weights.append(weights)
# Compute appearance model Jacobian wrt weights
appearance_jacobian = self.appearance_model._jacobian.T
# Forward Additive Algorithm
while n_iters < max_iters and error > self.eps:
# Compute warped image with current weights
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
# Compute warp Jacobian
dW_dp = self.transform.jacobian(
self.template.mask.true_indices)
# Compute steepest descent images, VI_dW_dp
J = self.residual.steepest_descent_images(
image, dW_dp, forward=(self.template, self.transform,
self.interpolator))
# Concatenate VI_dW_dp with appearance model Jacobian
self._J = np.hstack((J, appearance_jacobian))
# Compute Hessian and inverse
self._H = self.residual.calculate_hessian(self._J)
# Compute steepest descent parameter updates
sd_delta_p = self.residual.steepest_descent_update(
self._J, self.template, IWxp)
# Compute gradient descent parameter updates
delta_p = np.real(self._calculate_delta_p(sd_delta_p))
# Update warp weights
parameters = self.transform.as_vector() + delta_p[:n_params]
self.transform.from_vector_inplace(parameters)
lk_fitting.parameters.append(parameters)
# Update appearance weights
weights -= delta_p[n_params:]
self.template = self.appearance_model.instance(weights)
lk_fitting.weights.append(weights)
# Test convergence
error = np.abs(norm(delta_p))
n_iters += 1
lk_fitting.fitted = True
return lk_fitting
class SimultaneousForwardCompositional(AppearanceLucasKanade):
@property
def algorithm(self):
return 'Simultaneous-FC'
def _set_up(self):
# Compute warp Jacobian
self._dW_dp = self.transform.jacobian(
self.template.mask.true_indices)
def _fit(self, lk_fitting, max_iters=20, project=True):
# Initial error > eps
error = self.eps + 1
image = lk_fitting.image
lk_fitting.weights = []
n_iters = 0
# Number of shape weights
n_params = self.transform.n_parameters
# Initial appearance weights
if project:
# Obtained weights by projection
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
weights = self.appearance_model.project(IWxp)
# Reset template
self.template = self.appearance_model.instance(weights)
else:
# Set all weights to 0 (yielding the mean)
weights = np.zeros(self.appearance_model.n_active_components)
lk_fitting.weights.append(weights)
# Compute appearance model Jacobian wrt weights
appearance_jacobian = self.appearance_model._jacobian.T
# Forward Additive Algorithm
while n_iters < max_iters and error > self.eps:
# Compute warped image with current weights
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
# Compute steepest descent images, VI_dW_dp
J = self.residual.steepest_descent_images(IWxp, self._dW_dp)
# Concatenate VI_dW_dp with appearance model Jacobian
self._J = np.hstack((J, appearance_jacobian))
# Compute Hessian and inverse
self._H = self.residual.calculate_hessian(self._J)
# Compute steepest descent parameter updates
sd_delta_p = self.residual.steepest_descent_update(
self._J, self.template, IWxp)
# Compute gradient descent parameter updates
delta_p = np.real(self._calculate_delta_p(sd_delta_p))
# Update warp weights
self.transform.compose_after_from_vector_inplace(delta_p[:n_params])
lk_fitting.parameters.append(self.transform.as_vector())
# Update appearance weights
weights -= delta_p[n_params:]
self.template = self.appearance_model.instance(weights)
lk_fitting.weights.append(weights)
# Test convergence
error = np.abs(norm(delta_p))
n_iters += 1
lk_fitting.fitted = True
return lk_fitting
class SimultaneousInverseCompositional(AppearanceLucasKanade):
@property
def algorithm(self):
return 'Simultaneous-IA'
def _set_up(self):
# Compute the Jacobian of the warp
self._dW_dp = self.transform.jacobian(
self.appearance_model.mean.mask.true_indices)
def _fit(self, lk_fitting, max_iters=20, project=True):
# Initial error > eps
error = self.eps + 1
image = lk_fitting.image
lk_fitting.weights = []
n_iters = 0
# Number of shape weights
n_params = self.transform.n_parameters
# Initial appearance weights
if project:
# Obtained weights by projection
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
weights = self.appearance_model.project(IWxp)
# Reset template
self.template = self.appearance_model.instance(weights)
else:
# Set all weights to 0 (yielding the mean)
weights = np.zeros(self.appearance_model.n_active_components)
lk_fitting.weights.append(weights)
# Compute appearance model Jacobian wrt weights
appearance_jacobian = -self.appearance_model._jacobian.T
# Baker-Matthews, Inverse Compositional Algorithm
while n_iters < max_iters and error > self.eps:
# Compute warped image with current weights
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
# Compute steepest descent images, VT_dW_dp
J = self.residual.steepest_descent_images(self.template,
self._dW_dp)
# Concatenate VI_dW_dp with appearance model Jacobian
self._J = np.hstack((J, appearance_jacobian))
# Compute Hessian and inverse
self._H = self.residual.calculate_hessian(self._J)
# Compute steepest descent parameter updates
sd_delta_p = self.residual.steepest_descent_update(
self._J, IWxp, self.template)
# Compute gradient descent parameter updates
delta_p = -np.real(self._calculate_delta_p(sd_delta_p))
# Update warp weights
self.transform.compose_after_from_vector_inplace(delta_p[:n_params])
lk_fitting.parameters.append(self.transform.as_vector())
# Update appearance weights
weights -= delta_p[n_params:]
self.template = self.appearance_model.instance(weights)
lk_fitting.weights.append(weights)
# Test convergence
error = np.abs(norm(delta_p))
n_iters += 1
lk_fitting.fitted = True
return lk_fitting
|
Java
|
package org.cagrid.gme.common.exceptions;
import java.io.IOException;
@SuppressWarnings("serial")
public class SchemaParsingException extends IOException {
public SchemaParsingException() {
super();
}
public SchemaParsingException(String s) {
super(s);
}
}
|
Java
|
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef UI_EVENTS_ANDROID_EVENT_HANDLER_ANDROID_H_
#define UI_EVENTS_ANDROID_EVENT_HANDLER_ANDROID_H_
#include "ui/events/events_export.h"
namespace ui {
class DragEventAndroid;
class GestureEventAndroid;
class KeyEventAndroid;
class MotionEventAndroid;
// Dispatches events to appropriate targets. The default implementations of
// all of the specific handlers do nothing. Implementations should set
// themselves to the ViewAndroid in the view tree to get the calls routed.
// Use bool return type to stop propagating the call i.e. overriden method
// should return true to indicate that the event was handled and stop
// the processing.
class EVENTS_EXPORT EventHandlerAndroid {
public:
virtual bool OnDragEvent(const DragEventAndroid& event);
virtual bool OnTouchEvent(const MotionEventAndroid& event);
virtual bool OnMouseEvent(const MotionEventAndroid& event);
virtual bool OnMouseWheelEvent(const MotionEventAndroid& event);
virtual bool OnGestureEvent(const GestureEventAndroid& event);
virtual void OnSizeChanged();
virtual void OnPhysicalBackingSizeChanged();
virtual void OnBrowserControlsHeightChanged();
virtual bool OnGenericMotionEvent(const MotionEventAndroid& event);
virtual bool OnKeyUp(const KeyEventAndroid& event);
virtual bool DispatchKeyEvent(const KeyEventAndroid& event);
virtual bool ScrollBy(float delta_x, float delta_y);
virtual bool ScrollTo(float x, float y);
};
} // namespace ui
#endif // UI_EVENTS_ANDROID_EVENT_HANDLER_ANDROID_H_
|
Java
|
/*=========================================================================
Program: Visualization Toolkit
Module: vtkDynamic2DLabelMapper.h
Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
All rights reserved.
See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notice for more information.
=========================================================================*/
/*-------------------------------------------------------------------------
Copyright 2008 Sandia Corporation.
Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
the U.S. Government retains certain rights in this software.
-------------------------------------------------------------------------*/
// .NAME vtkDynamic2DLabelMapper - draw text labels at 2D dataset points
// .SECTION Description
// vtkDynamic2DLabelMapper is a mapper that renders text at dataset
// points such that the labels do not overlap.
// Various items can be labeled including point ids, scalars,
// vectors, normals, texture coordinates, tensors, and field data components.
// This mapper assumes that the points are located on the x-y plane
// and that the camera remains perpendicular to that plane with a y-up
// axis (this can be constrained using vtkImageInteractor).
// On the first render, the mapper computes the visiblility of all labels
// at all scales, and queries this information on successive renders.
// This causes the first render to be much slower. The visibility algorithm
// is a greedy approach based on the point id, so the label for a point
// will be drawn unless the label for a point with lower id overlaps it.
// .SECTION Caveats
// Use this filter in combination with vtkSelectVisiblePoints if you want
// to label only points that are visible. If you want to label cells rather
// than points, use the filter vtkCellCenters to generate points at the
// center of the cells. Also, you can use the class vtkIdFilter to
// generate ids as scalars or field data, which can then be labeled.
// .SECTION See Also
// vtkLabeledDataMapper
// .SECTION Thanks
// This algorithm was developed in the paper
// Ken Been and Chee Yap. Dynamic Map Labeling. IEEE Transactions on
// Visualization and Computer Graphics, Vol. 12, No. 5, 2006. pp. 773-780.
#ifndef __vtkDynamic2DLabelMapper_h
#define __vtkDynamic2DLabelMapper_h
#include "vtkRenderingCoreExport.h" // For export macro
#include "vtkLabeledDataMapper.h"
class VTKRENDERINGCORE_EXPORT vtkDynamic2DLabelMapper : public vtkLabeledDataMapper
{
public:
// Description:
// Instantiate object with %%-#6.3g label format. By default, point ids
// are labeled.
static vtkDynamic2DLabelMapper *New();
vtkTypeMacro(vtkDynamic2DLabelMapper, vtkLabeledDataMapper);
void PrintSelf(ostream& os, vtkIndent indent);
// Description:
// Set the points array name to use to give priority to labels.
// Defaults to "priority".
void SetPriorityArrayName(const char* name);
// Description:
// Whether to reverse the priority order (i.e. low values have high priority).
// Default is off.
vtkSetMacro(ReversePriority, bool);
vtkGetMacro(ReversePriority, bool);
vtkBooleanMacro(ReversePriority, bool);
// Description:
// Set the label height padding as a percentage. The percentage
// is a percentage of your label height.
// Default is 50%.
vtkSetMacro(LabelHeightPadding, float);
vtkGetMacro(LabelHeightPadding, float);
// Description:
// Set the label width padding as a percentage. The percentage
// is a percentage of your label ^height^ (yes, not a typo).
// Default is 50%.
vtkSetMacro(LabelWidthPadding, float);
vtkGetMacro(LabelWidthPadding, float);
// Description:
// Draw non-overlapping labels to the screen.
void RenderOpaqueGeometry(vtkViewport* viewport, vtkActor2D* actor);
void RenderOverlay(vtkViewport *viewport, vtkActor2D *actor);
protected:
vtkDynamic2DLabelMapper();
~vtkDynamic2DLabelMapper();
// Description:
// Calculate the current zoom scale of the viewport.
double GetCurrentScale(vtkViewport *viewport);
float* LabelWidth;
float* LabelHeight;
float* Cutoff;
float ReferenceScale;
float LabelHeightPadding;
float LabelWidthPadding;
bool ReversePriority;
private:
vtkDynamic2DLabelMapper(const vtkDynamic2DLabelMapper&); // Not implemented.
void operator=(const vtkDynamic2DLabelMapper&); // Not implemented.
};
#endif
|
Java
|
require 'erb'
describe "ERB.new" do
before :all do
@eruby_str = <<'END'
<ul>
<% list = [1,2,3] %>
<% for item in list %>
<% if item %>
<li><%= item %></li>
<% end %>
<% end %>
</ul>
END
@eruby_str2 = <<'END'
<ul>
% list = [1,2,3]
%for item in list
% if item
<li><%= item %>
<% end %>
<% end %>
</ul>
%%%
END
end
it "compiles eRuby script into ruby code when trim mode is 0 or not specified" do
expected = "<ul>\n\n\n\n<li>1</li>\n\n\n\n<li>2</li>\n\n\n\n<li>3</li>\n\n\n</ul>\n"
[0, '', nil].each do |trim_mode|
ERB.new(@eruby_str, nil, trim_mode).result.should == expected
end
end
it "removes '\n' when trim_mode is 1 or '>'" do
expected = "<ul>\n<li>1</li>\n<li>2</li>\n<li>3</li>\n</ul>\n"
[1, '>'].each do |trim_mode|
ERB.new(@eruby_str, nil, trim_mode).result.should == expected
end
end
it "removes spaces at beginning of line and '\n' when trim_mode is 2 or '<>'" do
expected = "<ul>\n<li>1</li>\n<li>2</li>\n<li>3</li>\n</ul>\n"
[2, '<>'].each do |trim_mode|
ERB.new(@eruby_str, nil, trim_mode).result.should == expected
end
end
it "removes spaces around '<%- -%>' when trim_mode is '-'" do
expected = "<ul>\n <li>1 <li>2 <li>3</ul>\n"
input = <<'END'
<ul>
<%- for item in [1,2,3] -%>
<%- if item -%>
<li><%= item -%>
<%- end -%>
<%- end -%>
</ul>
END
ERB.new(input, nil, '-').result.should == expected
end
it "not support '<%-= expr %> even when trim_mode is '-'" do
input = <<'END'
<p>
<%= expr -%>
<%-= expr -%>
</p>
END
lambda { ERB.new(input, nil, '-').result }.should raise_error
end
ruby_bug "#213", "1.8.7" do
it "regards lines starting with '%' as '<% ... %>' when trim_mode is '%'" do
expected = "<ul>\n <li>1\n \n <li>2\n \n <li>3\n \n\n</ul>\n%%\n"
ERB.new(@eruby_str2, nil, "%").result.should == expected
end
end
it "regards lines starting with '%' as '<% ... %>' and remove \"\\n\" when trim_mode is '%>'" do
expected = "<ul>\n <li>1 <li>2 <li>3 </ul>\n%%\n"
ERB.new(@eruby_str2, nil, '%>').result.should == expected
end
it "regard lines starting with '%' as '<% ... %>' and remove \"\\n\" when trim_mode is '%<>'" do
expected = "<ul>\n <li>1\n \n <li>2\n \n <li>3\n \n</ul>\n%%\n"
ERB.new(@eruby_str2, nil, '%<>').result.should == expected
end
it "regard lines starting with '%' as '<% ... %>' and spaces around '<%- -%>' when trim_mode is '%-'" do
expected = "<ul>\n<li>1</li>\n<li>2</li>\n</ul>\n%%\n"
input = <<'END'
<ul>
%list = [1,2]
%for item in list
<li><%= item %></li>
<% end %></ul>
%%%
END
trim_mode = '%-'
ERB.new(input, nil, '%-').result.should == expected
end
not_compliant_on :rubinius do
it "accepts a safe level as second argument" do
input = "<b><%=- 2+2 %>"
safe_level = 3
lambda { ERB.new(input, safe_level).result }.should_not raise_error
end
end
it "changes '_erbout' variable name in the produced source" do
input = @eruby_str
match_erbout = ERB.new(input, nil, nil).src
match_buf = ERB.new(input, nil, nil, 'buf').src
match_erbout.gsub("_erbout", "buf").should == match_buf
end
it "ignores '<%# ... %>'" do
input = <<'END'
<%# for item in list %>
<b><%#= item %></b>
<%# end %>
END
ERB.new(input).result.should == "\n<b></b>\n\n"
ERB.new(input, nil, '<>').result.should == "<b></b>\n"
end
ruby_version_is ""..."2.0" do
it "remember local variables defined previous one" do
ERB.new(@eruby_str).result
ERB.new("<%= list.inspect %>").result.should == "[1, 2, 3]"
end
end
ruby_version_is "2.0" do
it "forget local variables defined previous one" do
ERB.new(@eruby_str).result
lambda{ ERB.new("<%= list %>").result }.should raise_error(NameError)
end
end
end
|
Java
|
/*
* Copyright (C) 2013 Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#include "HTMLMediaSource.h"
namespace WebCore {
URLRegistry* HTMLMediaSource::s_registry = 0;
void HTMLMediaSource::setRegistry(URLRegistry* registry)
{
ASSERT(!s_registry);
s_registry = registry;
}
}
|
Java
|
/***************************************************************************
* Copyright (C) 2008 by Ralf Kaestner *
* ralf.kaestner@gmail.com *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program; if not, write to the *
* Free Software Foundation, Inc., *
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
***************************************************************************/
#ifndef CAN_H
#define CAN_H
/** \file
* \brief Generic CAN communication
* Common commands used to communicate via the CAN protocol.
* These methods are implemented by all CAN communication backends.
*/
#include <tulibs/config.h>
/** Predefined CAN constants
*/
#define CAN_CONFIG_ARG_PREFIX "can"
/** Predefined CAN error codes
*/
#define CAN_ERROR_NONE 0
#define CAN_ERROR_OPEN 1
#define CAN_ERROR_SETUP 2
#define CAN_ERROR_CLOSE 3
#define CAN_ERROR_SEND 4
#define CAN_ERROR_RECEIVE 5
/** \brief Predefined CAN error descriptions
*/
extern const char* can_errors[];
/** \brief Structure defining a CAN message
*/
typedef struct can_message_t {
int id; //!< The CAN message identifier.
unsigned char content[8]; //!< The actual CAN message content.
ssize_t length; //!< The length of the CAN message.
} can_message_t, *can_message_p;
/** \brief Structure defining a CAN device
*/
typedef struct can_device_t {
void* comm_dev; //!< The CAN communication device.
config_t config; //!< The CAN configuration parameters.
ssize_t num_references; //!< Number of references to this device.
ssize_t num_sent; //!< The number of CAN messages sent.
ssize_t num_received; //!< The number of CAN messages read.
} can_device_t, *can_device_p;
/** \brief Predefined CAN default configuration
*/
extern config_t can_default_config;
/** \brief Initialize CAN device
* \param[in] dev The CAN device to be initialized.
* \param[in] config The optional CAN device configuration parameters.
* Can be null.
*/
void can_init(
can_device_p dev,
config_p config);
/** \brief Initialize CAN device from command line arguments
* \param[in] dev The CAN device to be initialized.
* \param[in] argc The number of supplied command line arguments.
* \param[in] argv The list of supplied command line arguments.
* \param[in] prefix An optional argument prefix.
*/
void can_init_arg(
can_device_p dev,
int argc,
char **argv,
const char* prefix);
/** \brief Destroy an existing CAN device
* \param[in] dev The CAN device to be destroyed.
*/
void can_destroy(
can_device_p dev);
/** \brief Open CAN communication
* \note This method is implemented by the CAN communication backend.
* \param[in] dev The initialized CAN device to be opened.
* \return The resulting error code.
*/
int can_open(
can_device_p dev);
/** \brief Close CAN communication
* \note This method is implemented by the CAN communication backend.
* \param[in] dev The opened CAN device to be closed.
* \return The resulting error code.
*/
int can_close(
can_device_p dev);
/** \brief Send a CAN message
* \note This method is implemented by the CAN communication backend.
* \param[in] dev The CAN device to be used for sending the message.
* \param[in] message The CAN message to be sent.
* \return The resulting error code.
*/
int can_send_message(
can_device_p dev,
can_message_p message);
/** \brief Synchronously receive a CAN message
* \note This method is implemented by the CAN communication backend.
* \param[in] dev The CAN device to be used for receiving the message.
* \param[in,out] message The sent CAN message that will be transformed
* into the CAN message received.
* \return The resulting error code.
*/
int can_receive_message(
can_device_p dev,
can_message_p message);
#endif
|
Java
|
/* Copyright (c) 2009, University of Oslo, Norway
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of the University of Oslo nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package vtk.text.tl.expr;
import java.math.BigDecimal;
import vtk.text.tl.Symbol;
public class Multiply extends NumericOperator {
public Multiply(Symbol symbol) {
super(symbol);
}
@Override
protected Object evalNumeric(BigDecimal n1, BigDecimal n2) {
return n1.multiply(n2);
}
}
|
Java
|
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// Definition of ModelTypePayloadMap and various utility functions.
#ifndef SYNC_INTERNAL_PUBLIC_API_BASE_MODEL_TYPE_PAYLOAD_MAP_H_
#define SYNC_INTERNAL_PUBLIC_API_BASE_MODEL_TYPE_PAYLOAD_MAP_H_
#include <map>
#include <string>
#include "sync/base/sync_export.h"
#include "sync/internal_api/public/base/model_type.h"
// TODO(akalin): Move the non-exported functions in this file to a
// private header.
namespace base {
class DictionaryValue;
}
namespace syncer {
// A container that contains a set of datatypes with possible string
// payloads.
typedef std::map<ModelType, std::string> ModelTypePayloadMap;
// Helper functions for building ModelTypePayloadMaps.
// Make a TypePayloadMap from all the types in a ModelTypeSet using a
// default payload.
SYNC_EXPORT ModelTypePayloadMap ModelTypePayloadMapFromEnumSet(
ModelTypeSet model_types, const std::string& payload);
ModelTypeSet ModelTypePayloadMapToEnumSet(
const ModelTypePayloadMap& payload_map);
std::string ModelTypePayloadMapToString(
const ModelTypePayloadMap& model_type_payloads);
// Caller takes ownership of the returned dictionary.
base::DictionaryValue* ModelTypePayloadMapToValue(
const ModelTypePayloadMap& model_type_payloads);
// Coalesce |update| into |original|, overwriting only when |update| has
// a non-empty payload.
void CoalescePayloads(ModelTypePayloadMap* original,
const ModelTypePayloadMap& update);
} // namespace syncer
#endif // SYNC_INTERNAL_PUBLIC_API_BASE_MODEL_TYPE_PAYLOAD_MAP_H_
|
Java
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides fakes for several of Telemetry's internal objects.
These allow code like story_runner and Benchmark to be run and tested
without compiling or starting a browser. Class names prepended with an
underscore are intended to be implementation details, and should not
be subclassed; however, some, like _FakeBrowser, have public APIs that
may need to be called in tests.
"""
from telemetry.internal.backends.chrome_inspector import websocket
from telemetry.internal.browser import browser_options
from telemetry.internal.platform import system_info
from telemetry.page import shared_page_state
from telemetry.util import image_util
from telemetry.testing.internal import fake_gpu_info
from types import ModuleType
# Classes and functions which are intended to be part of the public
# fakes API.
class FakePlatform(object):
def __init__(self):
self._network_controller = None
self._tracing_controller = None
self._has_battor = False
self._os_name = 'FakeOS'
self._device_type_name = 'abc'
self._is_svelte = False
self._is_aosp = True
@property
def is_host_platform(self):
raise NotImplementedError
@property
def network_controller(self):
if self._network_controller is None:
self._network_controller = _FakeNetworkController()
return self._network_controller
@property
def tracing_controller(self):
if self._tracing_controller is None:
self._tracing_controller = _FakeTracingController()
return self._tracing_controller
def Initialize(self):
pass
def CanMonitorThermalThrottling(self):
return False
def IsThermallyThrottled(self):
return False
def HasBeenThermallyThrottled(self):
return False
def GetArchName(self):
raise NotImplementedError
def SetOSName(self, name):
self._os_name = name
def GetOSName(self):
return self._os_name
def GetOSVersionName(self):
raise NotImplementedError
def GetOSVersionDetailString(self):
raise NotImplementedError
def StopAllLocalServers(self):
pass
def WaitForBatteryTemperature(self, _):
pass
def HasBattOrConnected(self):
return self._has_battor
def SetBattOrDetected(self, b):
assert isinstance(b, bool)
self._has_battor = b
# TODO(rnephew): Investigate moving from setters to @property.
def SetDeviceTypeName(self, name):
self._device_type_name = name
def GetDeviceTypeName(self):
return self._device_type_name
def SetIsSvelte(self, b):
assert isinstance(b, bool)
self._is_svelte = b
def IsSvelte(self):
if self._os_name != 'android':
raise NotImplementedError
return self._is_svelte
def SetIsAosp(self, b):
assert isinstance(b, bool)
self._is_aosp = b
def IsAosp(self):
return self._is_aosp and self._os_name == 'android'
class FakeLinuxPlatform(FakePlatform):
def __init__(self):
super(FakeLinuxPlatform, self).__init__()
self.screenshot_png_data = None
self.http_server_directories = []
self.http_server = FakeHTTPServer()
@property
def is_host_platform(self):
return True
def GetDeviceTypeName(self):
return 'Desktop'
def GetArchName(self):
return 'x86_64'
def GetOSName(self):
return 'linux'
def GetOSVersionName(self):
return 'trusty'
def GetOSVersionDetailString(self):
return ''
def CanTakeScreenshot(self):
return bool(self.screenshot_png_data)
def TakeScreenshot(self, file_path):
if not self.CanTakeScreenshot():
raise NotImplementedError
img = image_util.FromBase64Png(self.screenshot_png_data)
image_util.WritePngFile(img, file_path)
return True
def SetHTTPServerDirectories(self, paths):
self.http_server_directories.append(paths)
class FakeHTTPServer(object):
def UrlOf(self, url):
del url # unused
return 'file:///foo'
class FakePossibleBrowser(object):
def __init__(self, execute_on_startup=None,
execute_after_browser_creation=None):
self._returned_browser = _FakeBrowser(FakeLinuxPlatform())
self.browser_type = 'linux'
self.supports_tab_control = False
self.is_remote = False
self.execute_on_startup = execute_on_startup
self.execute_after_browser_creation = execute_after_browser_creation
@property
def returned_browser(self):
"""The browser object that will be returned through later API calls."""
return self._returned_browser
def Create(self, finder_options):
if self.execute_on_startup is not None:
self.execute_on_startup()
del finder_options # unused
if self.execute_after_browser_creation is not None:
self.execute_after_browser_creation(self._returned_browser)
return self.returned_browser
@property
def platform(self):
"""The platform object from the returned browser.
To change this or set it up, change the returned browser's
platform.
"""
return self.returned_browser.platform
def IsRemote(self):
return self.is_remote
def SetCredentialsPath(self, _):
pass
class FakeSharedPageState(shared_page_state.SharedPageState):
def __init__(self, test, finder_options, story_set):
super(FakeSharedPageState, self).__init__(test, finder_options, story_set)
def _GetPossibleBrowser(self, test, finder_options):
p = FakePossibleBrowser()
self.ConfigurePossibleBrowser(p)
return p
def ConfigurePossibleBrowser(self, possible_browser):
"""Override this to configure the PossibleBrowser.
Can make changes to the browser's configuration here via e.g.:
possible_browser.returned_browser.returned_system_info = ...
"""
pass
def DidRunStory(self, results):
# TODO(kbr): add a test which throws an exception from DidRunStory
# to verify the fix from https://crrev.com/86984d5fc56ce00e7b37ebe .
super(FakeSharedPageState, self).DidRunStory(results)
class FakeSystemInfo(system_info.SystemInfo):
def __init__(self, model_name='', gpu_dict=None, command_line=''):
if gpu_dict == None:
gpu_dict = fake_gpu_info.FAKE_GPU_INFO
super(FakeSystemInfo, self).__init__(model_name, gpu_dict, command_line)
class _FakeBrowserFinderOptions(browser_options.BrowserFinderOptions):
def __init__(self, execute_on_startup=None,
execute_after_browser_creation=None, *args, **kwargs):
browser_options.BrowserFinderOptions.__init__(self, *args, **kwargs)
self.fake_possible_browser = \
FakePossibleBrowser(
execute_on_startup=execute_on_startup,
execute_after_browser_creation=execute_after_browser_creation)
def CreateBrowserFinderOptions(browser_type=None, execute_on_startup=None,
execute_after_browser_creation=None):
"""Creates fake browser finder options for discovering a browser."""
return _FakeBrowserFinderOptions(
browser_type=browser_type,
execute_on_startup=execute_on_startup,
execute_after_browser_creation=execute_after_browser_creation)
# Internal classes. Note that end users may still need to both call
# and mock out methods of these classes, but they should not be
# subclassed.
class _FakeBrowser(object):
def __init__(self, platform):
self._tabs = _FakeTabList(self)
# Fake the creation of the first tab.
self._tabs.New()
self._returned_system_info = FakeSystemInfo()
self._platform = platform
self._browser_type = 'release'
self._is_crashed = False
@property
def platform(self):
return self._platform
@platform.setter
def platform(self, incoming):
"""Allows overriding of the fake browser's platform object."""
assert isinstance(incoming, FakePlatform)
self._platform = incoming
@property
def returned_system_info(self):
"""The object which will be returned from calls to GetSystemInfo."""
return self._returned_system_info
@returned_system_info.setter
def returned_system_info(self, incoming):
"""Allows overriding of the returned SystemInfo object.
Incoming argument must be an instance of FakeSystemInfo."""
assert isinstance(incoming, FakeSystemInfo)
self._returned_system_info = incoming
@property
def browser_type(self):
"""The browser_type this browser claims to be ('debug', 'release', etc.)"""
return self._browser_type
@browser_type.setter
def browser_type(self, incoming):
"""Allows setting of the browser_type."""
self._browser_type = incoming
@property
def credentials(self):
return _FakeCredentials()
def Close(self):
self._is_crashed = False
@property
def supports_system_info(self):
return True
def GetSystemInfo(self):
return self.returned_system_info
@property
def supports_tab_control(self):
return True
@property
def tabs(self):
return self._tabs
def DumpStateUponFailure(self):
pass
class _FakeCredentials(object):
def WarnIfMissingCredentials(self, _):
pass
class _FakeTracingController(object):
def __init__(self):
self._is_tracing = False
def StartTracing(self, tracing_config, timeout=10):
self._is_tracing = True
del tracing_config
del timeout
def StopTracing(self):
self._is_tracing = False
@property
def is_tracing_running(self):
return self._is_tracing
def ClearStateIfNeeded(self):
pass
def IsChromeTracingSupported(self):
return True
class _FakeNetworkController(object):
def __init__(self):
self.wpr_mode = None
self.extra_wpr_args = None
self.is_initialized = False
self.is_open = False
self.use_live_traffic = None
def InitializeIfNeeded(self, use_live_traffic=False):
self.use_live_traffic = use_live_traffic
def UpdateTrafficSettings(self, round_trip_latency_ms=None,
download_bandwidth_kbps=None, upload_bandwidth_kbps=None):
pass
def Open(self, wpr_mode, extra_wpr_args, use_wpr_go=False):
del use_wpr_go # Unused.
self.wpr_mode = wpr_mode
self.extra_wpr_args = extra_wpr_args
self.is_open = True
def Close(self):
self.wpr_mode = None
self.extra_wpr_args = None
self.is_initialized = False
self.is_open = False
def StartReplay(self, archive_path, make_javascript_deterministic=False):
del make_javascript_deterministic # Unused.
assert self.is_open
self.is_initialized = archive_path is not None
def StopReplay(self):
self.is_initialized = False
class _FakeTab(object):
def __init__(self, browser, tab_id):
self._browser = browser
self._tab_id = str(tab_id)
self._collect_garbage_count = 0
self.test_png = None
@property
def collect_garbage_count(self):
return self._collect_garbage_count
@property
def id(self):
return self._tab_id
@property
def browser(self):
return self._browser
def WaitForDocumentReadyStateToBeComplete(self, timeout=0):
pass
def Navigate(self, url, script_to_evaluate_on_commit=None,
timeout=0):
del script_to_evaluate_on_commit, timeout # unused
if url == 'chrome://crash':
self.browser._is_crashed = True
raise Exception
def WaitForDocumentReadyStateToBeInteractiveOrBetter(self, timeout=0):
pass
def WaitForFrameToBeDisplayed(self, timeout=0):
pass
def IsAlive(self):
return True
def CloseConnections(self):
pass
def CollectGarbage(self):
self._collect_garbage_count += 1
def Close(self):
pass
@property
def screenshot_supported(self):
return self.test_png is not None
def Screenshot(self):
assert self.screenshot_supported, 'Screenshot is not supported'
return image_util.FromBase64Png(self.test_png)
class _FakeTabList(object):
_current_tab_id = 0
def __init__(self, browser):
self._tabs = []
self._browser = browser
def New(self, timeout=300):
del timeout # unused
type(self)._current_tab_id += 1
t = _FakeTab(self._browser, type(self)._current_tab_id)
self._tabs.append(t)
return t
def __iter__(self):
return self._tabs.__iter__()
def __len__(self):
return len(self._tabs)
def __getitem__(self, index):
if self._tabs[index].browser._is_crashed:
raise Exception
else:
return self._tabs[index]
def GetTabById(self, identifier):
"""The identifier of a tab can be accessed with tab.id."""
for tab in self._tabs:
if tab.id == identifier:
return tab
return None
class FakeInspectorWebsocket(object):
_NOTIFICATION_EVENT = 1
_NOTIFICATION_CALLBACK = 2
"""A fake InspectorWebsocket.
A fake that allows tests to send pregenerated data. Normal
InspectorWebsockets allow for any number of domain handlers. This fake only
allows up to 1 domain handler, and assumes that the domain of the response
always matches that of the handler.
"""
def __init__(self, mock_timer):
self._mock_timer = mock_timer
self._notifications = []
self._response_handlers = {}
self._pending_callbacks = {}
self._handler = None
def RegisterDomain(self, _, handler):
self._handler = handler
def AddEvent(self, method, params, time):
if self._notifications:
assert self._notifications[-1][1] < time, (
'Current response is scheduled earlier than previous response.')
response = {'method': method, 'params': params}
self._notifications.append((response, time, self._NOTIFICATION_EVENT))
def AddAsyncResponse(self, method, result, time):
if self._notifications:
assert self._notifications[-1][1] < time, (
'Current response is scheduled earlier than previous response.')
response = {'method': method, 'result': result}
self._notifications.append((response, time, self._NOTIFICATION_CALLBACK))
def AddResponseHandler(self, method, handler):
self._response_handlers[method] = handler
def SyncRequest(self, request, *args, **kwargs):
del args, kwargs # unused
handler = self._response_handlers[request['method']]
return handler(request) if handler else None
def AsyncRequest(self, request, callback):
self._pending_callbacks.setdefault(request['method'], []).append(callback)
def SendAndIgnoreResponse(self, request):
pass
def Connect(self, _):
pass
def DispatchNotifications(self, timeout):
current_time = self._mock_timer.time()
if not self._notifications:
self._mock_timer.SetTime(current_time + timeout + 1)
raise websocket.WebSocketTimeoutException()
response, time, kind = self._notifications[0]
if time - current_time > timeout:
self._mock_timer.SetTime(current_time + timeout + 1)
raise websocket.WebSocketTimeoutException()
self._notifications.pop(0)
self._mock_timer.SetTime(time + 1)
if kind == self._NOTIFICATION_EVENT:
self._handler(response)
elif kind == self._NOTIFICATION_CALLBACK:
callback = self._pending_callbacks.get(response['method']).pop(0)
callback(response)
else:
raise Exception('Unexpected response type')
class FakeTimer(object):
""" A fake timer to fake out the timing for a module.
Args:
module: module to fake out the time
"""
def __init__(self, module=None):
self._elapsed_time = 0
self._module = module
self._actual_time = None
if module:
assert isinstance(module, ModuleType)
self._actual_time = module.time
self._module.time = self
def sleep(self, time):
self._elapsed_time += time
def time(self):
return self._elapsed_time
def SetTime(self, time):
self._elapsed_time = time
def __del__(self):
self.Restore()
def Restore(self):
if self._module:
self._module.time = self._actual_time
self._module = None
self._actual_time = None
|
Java
|
/* Copyright (c) 2015-2020 The Khronos Group Inc.
* Copyright (c) 2015-2020 Valve Corporation
* Copyright (c) 2015-2020 LunarG, Inc.
* Copyright (C) 2015-2020 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Author: Courtney Goeltzenleuchter <courtneygo@google.com>
* Author: Tobin Ehlis <tobine@google.com>
* Author: Chris Forbes <chrisf@ijw.co.nz>
* Author: Mark Lobodzinski <mark@lunarg.com>
* Author: Dave Houlton <daveh@lunarg.com>
* Author: John Zulauf <jzulauf@lunarg.com>
*/
#ifndef CORE_VALIDATION_TYPES_H_
#define CORE_VALIDATION_TYPES_H_
#include "cast_utils.h"
#include "hash_vk_types.h"
#include "sparse_containers.h"
#include "vk_safe_struct.h"
#include "vulkan/vulkan.h"
#include "vk_layer_logging.h"
#include "vk_object_types.h"
#include "vk_extension_helper.h"
#include "vk_typemap_helper.h"
#include "convert_to_renderpass2.h"
#include "layer_chassis_dispatch.h"
#include "image_layout_map.h"
#include <array>
#include <atomic>
#include <functional>
#include <list>
#include <map>
#include <memory>
#include <set>
#include <string.h>
#include <unordered_map>
#include <unordered_set>
#include <vector>
#include <memory>
#include <list>
#ifdef VK_USE_PLATFORM_ANDROID_KHR
#include "android_ndk_types.h"
#endif // VK_USE_PLATFORM_ANDROID_KHR
// Fwd declarations -- including descriptor_set.h creates an ugly include loop
namespace cvdescriptorset {
class DescriptorSetLayoutDef;
class DescriptorSetLayout;
class DescriptorSet;
} // namespace cvdescriptorset
// Only CoreChecks uses this, but the state tracker stores it.
constexpr static auto kInvalidLayout = image_layout_map::kInvalidLayout;
using ImageSubresourceLayoutMap = image_layout_map::ImageSubresourceLayoutMap;
struct CMD_BUFFER_STATE;
class CoreChecks;
class ValidationStateTracker;
enum CALL_STATE {
UNCALLED, // Function has not been called
QUERY_COUNT, // Function called once to query a count
QUERY_DETAILS, // Function called w/ a count to query details
};
class BASE_NODE {
public:
// Track when object is being used by an in-flight command buffer
std::atomic_int in_use;
// Track command buffers that this object is bound to
// binding initialized when cmd referencing object is bound to command buffer
// binding removed when command buffer is reset or destroyed
// When an object is destroyed, any bound cbs are set to INVALID.
// "int" value is an index into object_bindings where the corresponding
// backpointer to this node is stored.
small_unordered_map<CMD_BUFFER_STATE *, int, 8> cb_bindings;
// Set to true when the API-level object is destroyed, but this object may
// hang around until its shared_ptr refcount goes to zero.
bool destroyed;
BASE_NODE() {
in_use.store(0);
destroyed = false;
};
};
// Track command pools and their command buffers
struct COMMAND_POOL_STATE : public BASE_NODE {
VkCommandPoolCreateFlags createFlags;
uint32_t queueFamilyIndex;
// Cmd buffers allocated from this pool
std::unordered_set<VkCommandBuffer> commandBuffers;
};
// Utilities for barriers and the commmand pool
template <typename Barrier>
static bool IsTransferOp(const Barrier *barrier) {
return barrier->srcQueueFamilyIndex != barrier->dstQueueFamilyIndex;
}
template <typename Barrier, bool assume_transfer = false>
static bool TempIsReleaseOp(const COMMAND_POOL_STATE *pool, const Barrier *barrier) {
return (assume_transfer || IsTransferOp(barrier)) && (pool->queueFamilyIndex == barrier->srcQueueFamilyIndex);
}
template <typename Barrier, bool assume_transfer = false>
static bool IsAcquireOp(const COMMAND_POOL_STATE *pool, const Barrier *barrier) {
return (assume_transfer || IsTransferOp(barrier)) && (pool->queueFamilyIndex == barrier->dstQueueFamilyIndex);
}
static inline bool QueueFamilyIsSpecial(const uint32_t queue_family_index) {
return (queue_family_index == VK_QUEUE_FAMILY_EXTERNAL_KHR) || (queue_family_index == VK_QUEUE_FAMILY_FOREIGN_EXT);
}
static inline bool QueueFamilyIsIgnored(uint32_t queue_family_index) { return queue_family_index == VK_QUEUE_FAMILY_IGNORED; }
// Intentionally ignore VulkanTypedHandle::node, it is optional
inline bool operator==(const VulkanTypedHandle &a, const VulkanTypedHandle &b) NOEXCEPT {
return a.handle == b.handle && a.type == b.type;
}
namespace std {
template <>
struct hash<VulkanTypedHandle> {
size_t operator()(VulkanTypedHandle obj) const NOEXCEPT { return hash<uint64_t>()(obj.handle) ^ hash<uint32_t>()(obj.type); }
};
} // namespace std
// Flags describing requirements imposed by the pipeline on a descriptor. These
// can't be checked at pipeline creation time as they depend on the Image or
// ImageView bound.
enum descriptor_req {
DESCRIPTOR_REQ_VIEW_TYPE_1D = 1 << VK_IMAGE_VIEW_TYPE_1D,
DESCRIPTOR_REQ_VIEW_TYPE_1D_ARRAY = 1 << VK_IMAGE_VIEW_TYPE_1D_ARRAY,
DESCRIPTOR_REQ_VIEW_TYPE_2D = 1 << VK_IMAGE_VIEW_TYPE_2D,
DESCRIPTOR_REQ_VIEW_TYPE_2D_ARRAY = 1 << VK_IMAGE_VIEW_TYPE_2D_ARRAY,
DESCRIPTOR_REQ_VIEW_TYPE_3D = 1 << VK_IMAGE_VIEW_TYPE_3D,
DESCRIPTOR_REQ_VIEW_TYPE_CUBE = 1 << VK_IMAGE_VIEW_TYPE_CUBE,
DESCRIPTOR_REQ_VIEW_TYPE_CUBE_ARRAY = 1 << VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,
DESCRIPTOR_REQ_ALL_VIEW_TYPE_BITS = (1 << (VK_IMAGE_VIEW_TYPE_CUBE_ARRAY + 1)) - 1,
DESCRIPTOR_REQ_SINGLE_SAMPLE = 2 << VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,
DESCRIPTOR_REQ_MULTI_SAMPLE = DESCRIPTOR_REQ_SINGLE_SAMPLE << 1,
DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT = DESCRIPTOR_REQ_MULTI_SAMPLE << 1,
DESCRIPTOR_REQ_COMPONENT_TYPE_SINT = DESCRIPTOR_REQ_COMPONENT_TYPE_FLOAT << 1,
DESCRIPTOR_REQ_COMPONENT_TYPE_UINT = DESCRIPTOR_REQ_COMPONENT_TYPE_SINT << 1,
};
extern unsigned DescriptorRequirementsBitsFromFormat(VkFormat fmt);
typedef std::map<uint32_t, descriptor_req> BindingReqMap;
struct DESCRIPTOR_POOL_STATE : BASE_NODE {
VkDescriptorPool pool;
uint32_t maxSets; // Max descriptor sets allowed in this pool
uint32_t availableSets; // Available descriptor sets in this pool
safe_VkDescriptorPoolCreateInfo createInfo;
std::unordered_set<cvdescriptorset::DescriptorSet *> sets; // Collection of all sets in this pool
std::map<uint32_t, uint32_t> maxDescriptorTypeCount; // Max # of descriptors of each type in this pool
std::map<uint32_t, uint32_t> availableDescriptorTypeCount; // Available # of descriptors of each type in this pool
DESCRIPTOR_POOL_STATE(const VkDescriptorPool pool, const VkDescriptorPoolCreateInfo *pCreateInfo)
: pool(pool),
maxSets(pCreateInfo->maxSets),
availableSets(pCreateInfo->maxSets),
createInfo(pCreateInfo),
maxDescriptorTypeCount(),
availableDescriptorTypeCount() {
// Collect maximums per descriptor type.
for (uint32_t i = 0; i < createInfo.poolSizeCount; ++i) {
uint32_t typeIndex = static_cast<uint32_t>(createInfo.pPoolSizes[i].type);
// Same descriptor types can appear several times
maxDescriptorTypeCount[typeIndex] += createInfo.pPoolSizes[i].descriptorCount;
availableDescriptorTypeCount[typeIndex] = maxDescriptorTypeCount[typeIndex];
}
}
};
struct MemRange {
VkDeviceSize offset = 0;
VkDeviceSize size = 0;
};
// Data struct for tracking memory object
struct DEVICE_MEMORY_STATE : public BASE_NODE {
void *object; // Dispatchable object used to create this memory (device of swapchain)
VkDeviceMemory mem;
safe_VkMemoryAllocateInfo alloc_info;
bool is_dedicated;
VkBuffer dedicated_buffer;
VkImage dedicated_image;
bool is_export;
VkExternalMemoryHandleTypeFlags export_handle_type_flags;
std::unordered_set<VulkanTypedHandle> obj_bindings; // objects bound to this memory
// Convenience vectors of handles to speed up iterating over objects independently
std::unordered_set<VkImage> bound_images;
std::unordered_set<VkBuffer> bound_buffers;
std::unordered_set<VkAccelerationStructureNV> bound_acceleration_structures;
MemRange mapped_range;
void *shadow_copy_base; // Base of layer's allocation for guard band, data, and alignment space
void *shadow_copy; // Pointer to start of guard-band data before mapped region
uint64_t shadow_pad_size; // Size of the guard-band data before and after actual data. It MUST be a
// multiple of limits.minMemoryMapAlignment
void *p_driver_data; // Pointer to application's actual memory
DEVICE_MEMORY_STATE(void *disp_object, const VkDeviceMemory in_mem, const VkMemoryAllocateInfo *p_alloc_info)
: object(disp_object),
mem(in_mem),
alloc_info(p_alloc_info),
is_dedicated(false),
dedicated_buffer(VK_NULL_HANDLE),
dedicated_image(VK_NULL_HANDLE),
is_export(false),
export_handle_type_flags(0),
mapped_range{},
shadow_copy_base(0),
shadow_copy(0),
shadow_pad_size(0),
p_driver_data(0){};
};
// Generic memory binding struct to track objects bound to objects
struct MEM_BINDING {
std::shared_ptr<DEVICE_MEMORY_STATE> mem_state;
VkDeviceSize offset;
VkDeviceSize size;
};
struct BufferBinding {
VkBuffer buffer;
VkDeviceSize size;
VkDeviceSize offset;
};
struct IndexBufferBinding : BufferBinding {
VkIndexType index_type;
};
inline bool operator==(MEM_BINDING a, MEM_BINDING b) NOEXCEPT {
return a.mem_state == b.mem_state && a.offset == b.offset && a.size == b.size;
}
namespace std {
template <>
struct hash<MEM_BINDING> {
size_t operator()(MEM_BINDING mb) const NOEXCEPT {
auto intermediate = hash<uint64_t>()(reinterpret_cast<uint64_t &>(mb.mem_state)) ^ hash<uint64_t>()(mb.offset);
return intermediate ^ hash<uint64_t>()(mb.size);
}
};
} // namespace std
// Superclass for bindable object state (currently images and buffers)
class BINDABLE : public BASE_NODE {
public:
bool sparse; // Is this object being bound with sparse memory or not?
// Non-sparse binding data
MEM_BINDING binding;
// Memory requirements for this BINDABLE
VkMemoryRequirements requirements;
// bool to track if memory requirements were checked
bool memory_requirements_checked;
// Tracks external memory types creating resource
VkExternalMemoryHandleTypeFlags external_memory_handle;
// Sparse binding data, initially just tracking MEM_BINDING per mem object
// There's more data for sparse bindings so need better long-term solution
// TODO : Need to update solution to track all sparse binding data
std::unordered_set<MEM_BINDING> sparse_bindings;
small_unordered_set<DEVICE_MEMORY_STATE *, 1> bound_memory_set_;
BINDABLE()
: sparse(false),
binding{},
requirements{},
memory_requirements_checked(false),
external_memory_handle(0),
sparse_bindings{},
bound_memory_set_{} {};
// Update the cached set of memory bindings.
// Code that changes binding.mem or sparse_bindings must call UpdateBoundMemorySet()
void UpdateBoundMemorySet() {
bound_memory_set_.clear();
if (!sparse) {
if (binding.mem_state) bound_memory_set_.insert(binding.mem_state.get());
} else {
for (auto sb : sparse_bindings) {
bound_memory_set_.insert(sb.mem_state.get());
}
}
}
// Return unordered set of memory objects that are bound
// Instead of creating a set from scratch each query, return the cached one
const small_unordered_set<DEVICE_MEMORY_STATE *, 1> &GetBoundMemory() const { return bound_memory_set_; }
};
class BUFFER_STATE : public BINDABLE {
public:
VkBuffer buffer;
VkBufferCreateInfo createInfo;
VkDeviceAddress deviceAddress;
BUFFER_STATE(VkBuffer buff, const VkBufferCreateInfo *pCreateInfo) : buffer(buff), createInfo(*pCreateInfo) {
if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) {
uint32_t *pQueueFamilyIndices = new uint32_t[createInfo.queueFamilyIndexCount];
for (uint32_t i = 0; i < createInfo.queueFamilyIndexCount; i++) {
pQueueFamilyIndices[i] = pCreateInfo->pQueueFamilyIndices[i];
}
createInfo.pQueueFamilyIndices = pQueueFamilyIndices;
}
if (createInfo.flags & VK_BUFFER_CREATE_SPARSE_BINDING_BIT) {
sparse = true;
}
auto *externalMemoryInfo = lvl_find_in_chain<VkExternalMemoryBufferCreateInfo>(pCreateInfo->pNext);
if (externalMemoryInfo) {
external_memory_handle = externalMemoryInfo->handleTypes;
}
};
BUFFER_STATE(BUFFER_STATE const &rh_obj) = delete;
~BUFFER_STATE() {
if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) {
delete[] createInfo.pQueueFamilyIndices;
createInfo.pQueueFamilyIndices = nullptr;
}
};
};
class BUFFER_VIEW_STATE : public BASE_NODE {
public:
VkBufferView buffer_view;
VkBufferViewCreateInfo create_info;
std::shared_ptr<BUFFER_STATE> buffer_state;
BUFFER_VIEW_STATE(const std::shared_ptr<BUFFER_STATE> &bf, VkBufferView bv, const VkBufferViewCreateInfo *ci)
: buffer_view(bv), create_info(*ci), buffer_state(bf){};
BUFFER_VIEW_STATE(const BUFFER_VIEW_STATE &rh_obj) = delete;
};
struct SAMPLER_STATE : public BASE_NODE {
VkSampler sampler;
VkSamplerCreateInfo createInfo;
VkSamplerYcbcrConversion samplerConversion = VK_NULL_HANDLE;
VkSamplerCustomBorderColorCreateInfoEXT customCreateInfo = {};
SAMPLER_STATE(const VkSampler *ps, const VkSamplerCreateInfo *pci) : sampler(*ps), createInfo(*pci) {
auto *conversionInfo = lvl_find_in_chain<VkSamplerYcbcrConversionInfo>(pci->pNext);
if (conversionInfo) samplerConversion = conversionInfo->conversion;
auto cbci = lvl_find_in_chain<VkSamplerCustomBorderColorCreateInfoEXT>(pci->pNext);
if (cbci) customCreateInfo = *cbci;
}
};
class IMAGE_STATE : public BINDABLE {
public:
VkImage image;
safe_VkImageCreateInfo safe_create_info;
VkImageCreateInfo &createInfo;
bool valid; // If this is a swapchain image backing memory track valid here as it doesn't have DEVICE_MEMORY_STATE
bool acquired; // If this is a swapchain image, has it been acquired by the app.
bool shared_presentable; // True for a front-buffered swapchain image
bool layout_locked; // A front-buffered image that has been presented can never have layout transitioned
bool get_sparse_reqs_called; // Track if GetImageSparseMemoryRequirements() has been called for this image
bool sparse_metadata_required; // Track if sparse metadata aspect is required for this image
bool sparse_metadata_bound; // Track if sparse metadata aspect is bound to this image
bool external_ahb; // True if image will be imported/exported from/to an Android Hardware Buffer
bool has_ahb_format; // True if image was created with an external Android format
bool is_swapchain_image; // True if image is a swapchain image
uint64_t ahb_format; // External Android format, if provided
VkImageSubresourceRange full_range; // The normalized ISR for all levels, layers (slices), and aspects
VkSwapchainKHR create_from_swapchain;
VkSwapchainKHR bind_swapchain;
uint32_t bind_swapchain_imageIndex;
image_layout_map::Encoder range_encoder;
VkFormatFeatureFlags format_features = 0;
// Need to memory requirments for each plane if image is disjoint
bool disjoint; // True if image was created with VK_IMAGE_CREATE_DISJOINT_BIT
VkMemoryRequirements plane0_requirements;
bool plane0_memory_requirements_checked;
VkMemoryRequirements plane1_requirements;
bool plane1_memory_requirements_checked;
VkMemoryRequirements plane2_requirements;
bool plane2_memory_requirements_checked;
std::vector<VkSparseImageMemoryRequirements> sparse_requirements;
IMAGE_STATE(VkImage img, const VkImageCreateInfo *pCreateInfo);
IMAGE_STATE(IMAGE_STATE const &rh_obj) = delete;
std::unordered_set<VkImage> aliasing_images;
bool IsCompatibleAliasing(IMAGE_STATE *other_image_state);
bool IsCreateInfoEqual(const VkImageCreateInfo &other_createInfo) const;
bool IsCreateInfoDedicatedAllocationImageAliasingCompatible(const VkImageCreateInfo &other_createInfo) const;
inline bool IsImageTypeEqual(const VkImageCreateInfo &other_createInfo) const {
return createInfo.imageType == other_createInfo.imageType;
}
inline bool IsFormatEqual(const VkImageCreateInfo &other_createInfo) const {
return createInfo.format == other_createInfo.format;
}
inline bool IsMipLevelsEqual(const VkImageCreateInfo &other_createInfo) const {
return createInfo.mipLevels == other_createInfo.mipLevels;
}
inline bool IsUsageEqual(const VkImageCreateInfo &other_createInfo) const { return createInfo.usage == other_createInfo.usage; }
inline bool IsSamplesEqual(const VkImageCreateInfo &other_createInfo) const {
return createInfo.samples == other_createInfo.samples;
}
inline bool IsTilingEqual(const VkImageCreateInfo &other_createInfo) const {
return createInfo.tiling == other_createInfo.tiling;
}
inline bool IsArrayLayersEqual(const VkImageCreateInfo &other_createInfo) const {
return createInfo.arrayLayers == other_createInfo.arrayLayers;
}
inline bool IsInitialLayoutEqual(const VkImageCreateInfo &other_createInfo) const {
return createInfo.initialLayout == other_createInfo.initialLayout;
}
inline bool IsSharingModeEqual(const VkImageCreateInfo &other_createInfo) const {
return createInfo.sharingMode == other_createInfo.sharingMode;
}
inline bool IsExtentEqual(const VkImageCreateInfo &other_createInfo) const {
return (createInfo.extent.width == other_createInfo.extent.width) &&
(createInfo.extent.height == other_createInfo.extent.height) &&
(createInfo.extent.depth == other_createInfo.extent.depth);
}
inline bool IsQueueFamilyIndicesEqual(const VkImageCreateInfo &other_createInfo) const {
return (createInfo.queueFamilyIndexCount == other_createInfo.queueFamilyIndexCount) &&
(createInfo.queueFamilyIndexCount == 0 ||
memcmp(createInfo.pQueueFamilyIndices, other_createInfo.pQueueFamilyIndices,
createInfo.queueFamilyIndexCount * sizeof(createInfo.pQueueFamilyIndices[0])) == 0);
}
~IMAGE_STATE() {
if ((createInfo.sharingMode == VK_SHARING_MODE_CONCURRENT) && (createInfo.queueFamilyIndexCount > 0)) {
delete[] createInfo.pQueueFamilyIndices;
createInfo.pQueueFamilyIndices = nullptr;
}
};
};
class IMAGE_VIEW_STATE : public BASE_NODE {
public:
VkImageView image_view;
VkImageViewCreateInfo create_info;
const VkImageSubresourceRange normalized_subresource_range;
const image_layout_map::RangeGenerator range_generator;
VkSampleCountFlagBits samples;
unsigned descriptor_format_bits;
VkSamplerYcbcrConversion samplerConversion; // Handle of the ycbcr sampler conversion the image was created with, if any
VkFormatFeatureFlags format_features;
std::shared_ptr<IMAGE_STATE> image_state;
IMAGE_VIEW_STATE(const std::shared_ptr<IMAGE_STATE> &image_state, VkImageView iv, const VkImageViewCreateInfo *ci);
IMAGE_VIEW_STATE(const IMAGE_VIEW_STATE &rh_obj) = delete;
};
class ACCELERATION_STRUCTURE_STATE : public BINDABLE {
public:
VkAccelerationStructureNV acceleration_structure;
safe_VkAccelerationStructureCreateInfoNV create_infoNV;
safe_VkAccelerationStructureCreateInfoKHR create_infoKHR;
bool memory_requirements_checked = false;
VkMemoryRequirements2KHR memory_requirements;
bool build_scratch_memory_requirements_checked = false;
VkMemoryRequirements2KHR build_scratch_memory_requirements;
bool update_scratch_memory_requirements_checked = false;
VkMemoryRequirements2KHR update_scratch_memory_requirements;
bool built = false;
safe_VkAccelerationStructureInfoNV build_info;
uint64_t opaque_handle = 0;
ACCELERATION_STRUCTURE_STATE(VkAccelerationStructureNV as, const VkAccelerationStructureCreateInfoNV *ci)
: acceleration_structure(as),
create_infoNV(ci),
memory_requirements{},
build_scratch_memory_requirements_checked{},
update_scratch_memory_requirements_checked{} {}
ACCELERATION_STRUCTURE_STATE(VkAccelerationStructureKHR as, const VkAccelerationStructureCreateInfoKHR *ci)
: acceleration_structure(as),
create_infoKHR(ci),
memory_requirements{},
build_scratch_memory_requirements_checked{},
update_scratch_memory_requirements_checked{} {}
ACCELERATION_STRUCTURE_STATE(const ACCELERATION_STRUCTURE_STATE &rh_obj) = delete;
};
struct SWAPCHAIN_IMAGE {
VkImage image;
std::unordered_set<VkImage> bound_images;
};
class SWAPCHAIN_NODE : public BASE_NODE {
public:
safe_VkSwapchainCreateInfoKHR createInfo;
VkSwapchainKHR swapchain;
std::vector<SWAPCHAIN_IMAGE> images;
bool retired = false;
bool shared_presentable = false;
CALL_STATE vkGetSwapchainImagesKHRState = UNCALLED;
uint32_t get_swapchain_image_count = 0;
SWAPCHAIN_NODE(const VkSwapchainCreateInfoKHR *pCreateInfo, VkSwapchainKHR swapchain)
: createInfo(pCreateInfo), swapchain(swapchain) {}
};
extern bool ImageLayoutMatches(const VkImageAspectFlags aspect_mask, VkImageLayout a, VkImageLayout b);
// Store the DAG.
struct DAGNode {
uint32_t pass;
std::vector<uint32_t> prev;
std::vector<uint32_t> next;
};
struct RENDER_PASS_STATE : public BASE_NODE {
VkRenderPass renderPass;
safe_VkRenderPassCreateInfo2 createInfo;
std::vector<std::vector<uint32_t>> self_dependencies;
std::vector<DAGNode> subpassToNode;
std::unordered_map<uint32_t, bool> attachment_first_read;
RENDER_PASS_STATE(VkRenderPassCreateInfo2KHR const *pCreateInfo) : createInfo(pCreateInfo) {}
RENDER_PASS_STATE(VkRenderPassCreateInfo const *pCreateInfo) {
ConvertVkRenderPassCreateInfoToV2KHR(*pCreateInfo, &createInfo);
}
};
// Autogenerated as part of the vk_validation_error_message.h codegen
enum CMD_TYPE { VUID_CMD_ENUM_LIST(CMD_) };
enum CB_STATE {
CB_NEW, // Newly created CB w/o any cmds
CB_RECORDING, // BeginCB has been called on this CB
CB_RECORDED, // EndCB has been called on this CB
CB_INVALID_COMPLETE, // had a complete recording, but was since invalidated
CB_INVALID_INCOMPLETE, // fouled before recording was completed
};
// CB Status -- used to track status of various bindings on cmd buffer objects
typedef VkFlags CBStatusFlags;
enum CBStatusFlagBits {
// clang-format off
CBSTATUS_NONE = 0x00000000, // No status is set
CBSTATUS_LINE_WIDTH_SET = 0x00000001, // Line width has been set
CBSTATUS_DEPTH_BIAS_SET = 0x00000002, // Depth bias has been set
CBSTATUS_BLEND_CONSTANTS_SET = 0x00000004, // Blend constants state has been set
CBSTATUS_DEPTH_BOUNDS_SET = 0x00000008, // Depth bounds state object has been set
CBSTATUS_STENCIL_READ_MASK_SET = 0x00000010, // Stencil read mask has been set
CBSTATUS_STENCIL_WRITE_MASK_SET = 0x00000020, // Stencil write mask has been set
CBSTATUS_STENCIL_REFERENCE_SET = 0x00000040, // Stencil reference has been set
CBSTATUS_VIEWPORT_SET = 0x00000080,
CBSTATUS_SCISSOR_SET = 0x00000100,
CBSTATUS_INDEX_BUFFER_BOUND = 0x00000200, // Index buffer has been set
CBSTATUS_EXCLUSIVE_SCISSOR_SET = 0x00000400,
CBSTATUS_SHADING_RATE_PALETTE_SET = 0x00000800,
CBSTATUS_LINE_STIPPLE_SET = 0x00001000,
CBSTATUS_VIEWPORT_W_SCALING_SET = 0x00002000,
CBSTATUS_ALL_STATE_SET = 0x00003DFF, // All state set (intentionally exclude index buffer)
// clang-format on
};
struct QueryObject {
VkQueryPool pool;
uint32_t query;
// These next two fields are *not* used in hash or comparison, they are effectively a data payload
uint32_t index; // must be zero if !indexed
uint32_t perf_pass;
bool indexed;
// Command index in the command buffer where the end of the query was
// recorded (equal to the number of commands in the command buffer before
// the end of the query).
uint64_t endCommandIndex;
QueryObject(VkQueryPool pool_, uint32_t query_)
: pool(pool_), query(query_), index(0), perf_pass(0), indexed(false), endCommandIndex(0) {}
QueryObject(VkQueryPool pool_, uint32_t query_, uint32_t index_)
: pool(pool_), query(query_), index(index_), perf_pass(0), indexed(true), endCommandIndex(0) {}
QueryObject(const QueryObject &obj)
: pool(obj.pool),
query(obj.query),
index(obj.index),
perf_pass(obj.perf_pass),
indexed(obj.indexed),
endCommandIndex(obj.endCommandIndex) {}
QueryObject(const QueryObject &obj, uint32_t perf_pass_)
: pool(obj.pool),
query(obj.query),
index(obj.index),
perf_pass(perf_pass_),
indexed(obj.indexed),
endCommandIndex(obj.endCommandIndex) {}
bool operator<(const QueryObject &rhs) const {
return (pool == rhs.pool) ? ((query == rhs.query) ? (perf_pass < rhs.perf_pass) : (query < rhs.query)) : pool < rhs.pool;
}
};
inline bool operator==(const QueryObject &query1, const QueryObject &query2) {
return ((query1.pool == query2.pool) && (query1.query == query2.query) && (query1.perf_pass == query2.perf_pass));
}
enum QueryState {
QUERYSTATE_UNKNOWN, // Initial state.
QUERYSTATE_RESET, // After resetting.
QUERYSTATE_RUNNING, // Query running.
QUERYSTATE_ENDED, // Query ended but results may not be available.
QUERYSTATE_AVAILABLE, // Results available.
};
enum QueryResultType {
QUERYRESULT_UNKNOWN,
QUERYRESULT_NO_DATA,
QUERYRESULT_SOME_DATA,
QUERYRESULT_WAIT_ON_RESET,
QUERYRESULT_WAIT_ON_RUNNING,
};
inline const char *string_QueryResultType(QueryResultType result_type) {
switch (result_type) {
case QUERYRESULT_UNKNOWN:
return "query may be in an unknown state";
case QUERYRESULT_NO_DATA:
return "query may return no data";
case QUERYRESULT_SOME_DATA:
return "query will return some data or availability bit";
case QUERYRESULT_WAIT_ON_RESET:
return "waiting on a query that has been reset and not issued yet";
case QUERYRESULT_WAIT_ON_RUNNING:
return "waiting on a query that has not ended yet";
}
assert(false);
return "UNKNOWN QUERY STATE"; // Unreachable.
}
namespace std {
template <>
struct hash<QueryObject> {
size_t operator()(QueryObject query) const throw() {
return hash<uint64_t>()((uint64_t)(query.pool)) ^
hash<uint64_t>()(static_cast<uint64_t>(query.query) | (static_cast<uint64_t>(query.perf_pass) << 32));
}
};
} // namespace std
struct CBVertexBufferBindingInfo {
std::vector<BufferBinding> vertex_buffer_bindings;
};
static inline bool operator==(const VkImageSubresource &lhs, const VkImageSubresource &rhs) {
bool is_equal = (lhs.aspectMask == rhs.aspectMask) && (lhs.mipLevel == rhs.mipLevel) && (lhs.arrayLayer == rhs.arrayLayer);
return is_equal;
}
// Canonical dictionary for PushConstantRanges
using PushConstantRangesDict = hash_util::Dictionary<PushConstantRanges>;
using PushConstantRangesId = PushConstantRangesDict::Id;
// Canonical dictionary for the pipeline layout's layout of descriptorsetlayouts
using DescriptorSetLayoutDef = cvdescriptorset::DescriptorSetLayoutDef;
using DescriptorSetLayoutId = std::shared_ptr<const DescriptorSetLayoutDef>;
using PipelineLayoutSetLayoutsDef = std::vector<DescriptorSetLayoutId>;
using PipelineLayoutSetLayoutsDict =
hash_util::Dictionary<PipelineLayoutSetLayoutsDef, hash_util::IsOrderedContainer<PipelineLayoutSetLayoutsDef>>;
using PipelineLayoutSetLayoutsId = PipelineLayoutSetLayoutsDict::Id;
// Defines/stores a compatibility defintion for set N
// The "layout layout" must store at least set+1 entries, but only the first set+1 are considered for hash and equality testing
// Note: the "cannonical" data are referenced by Id, not including handle or device specific state
// Note: hash and equality only consider layout_id entries [0, set] for determining uniqueness
struct PipelineLayoutCompatDef {
uint32_t set;
PushConstantRangesId push_constant_ranges;
PipelineLayoutSetLayoutsId set_layouts_id;
PipelineLayoutCompatDef(const uint32_t set_index, const PushConstantRangesId pcr_id, const PipelineLayoutSetLayoutsId sl_id)
: set(set_index), push_constant_ranges(pcr_id), set_layouts_id(sl_id) {}
size_t hash() const;
bool operator==(const PipelineLayoutCompatDef &other) const;
};
// Canonical dictionary for PipelineLayoutCompat records
using PipelineLayoutCompatDict = hash_util::Dictionary<PipelineLayoutCompatDef, hash_util::HasHashMember<PipelineLayoutCompatDef>>;
using PipelineLayoutCompatId = PipelineLayoutCompatDict::Id;
// Store layouts and pushconstants for PipelineLayout
struct PIPELINE_LAYOUT_STATE : public BASE_NODE {
VkPipelineLayout layout;
std::vector<std::shared_ptr<cvdescriptorset::DescriptorSetLayout const>> set_layouts;
PushConstantRangesId push_constant_ranges;
std::vector<PipelineLayoutCompatId> compat_for_set;
PIPELINE_LAYOUT_STATE() : layout(VK_NULL_HANDLE), set_layouts{}, push_constant_ranges{}, compat_for_set{} {}
void reset() {
layout = VK_NULL_HANDLE;
set_layouts.clear();
push_constant_ranges.reset();
compat_for_set.clear();
}
};
// Shader typedefs needed to store StageStage below
struct interface_var {
uint32_t id;
uint32_t type_id;
uint32_t offset;
bool is_patch;
bool is_block_member;
bool is_relaxed_precision;
// TODO: collect the name, too? Isn't required to be present.
};
typedef std::pair<unsigned, unsigned> descriptor_slot_t;
// Safe struct that spans NV and KHR VkRayTracingPipelineCreateInfo structures.
// It is a safe_VkRayTracingPipelineCreateInfoKHR and supports construction from
// a VkRayTracingPipelineCreateInfoNV.
class safe_VkRayTracingPipelineCreateInfoCommon : public safe_VkRayTracingPipelineCreateInfoKHR {
public:
safe_VkRayTracingPipelineCreateInfoCommon() : safe_VkRayTracingPipelineCreateInfoKHR() {}
safe_VkRayTracingPipelineCreateInfoCommon(const VkRayTracingPipelineCreateInfoNV *pCreateInfo)
: safe_VkRayTracingPipelineCreateInfoKHR() {
initialize(pCreateInfo);
}
void initialize(const VkRayTracingPipelineCreateInfoNV *pCreateInfo) {
safe_VkRayTracingPipelineCreateInfoNV nvStruct;
nvStruct.initialize(pCreateInfo);
sType = nvStruct.sType;
// Take ownership of the pointer and null it out in nvStruct
pNext = nvStruct.pNext;
nvStruct.pNext = nullptr;
flags = nvStruct.flags;
stageCount = nvStruct.stageCount;
pStages = nvStruct.pStages;
nvStruct.pStages = nullptr;
groupCount = nvStruct.groupCount;
maxRecursionDepth = nvStruct.maxRecursionDepth;
layout = nvStruct.layout;
basePipelineHandle = nvStruct.basePipelineHandle;
basePipelineIndex = nvStruct.basePipelineIndex;
assert(pGroups == nullptr);
if (nvStruct.groupCount && nvStruct.pGroups) {
pGroups = new safe_VkRayTracingShaderGroupCreateInfoKHR[groupCount];
for (uint32_t i = 0; i < groupCount; ++i) {
pGroups[i].sType = nvStruct.pGroups[i].sType;
pGroups[i].pNext = nvStruct.pGroups[i].pNext;
pGroups[i].type = nvStruct.pGroups[i].type;
pGroups[i].generalShader = nvStruct.pGroups[i].generalShader;
pGroups[i].closestHitShader = nvStruct.pGroups[i].closestHitShader;
pGroups[i].anyHitShader = nvStruct.pGroups[i].anyHitShader;
pGroups[i].intersectionShader = nvStruct.pGroups[i].intersectionShader;
pGroups[i].intersectionShader = nvStruct.pGroups[i].intersectionShader;
pGroups[i].pShaderGroupCaptureReplayHandle = nullptr;
}
}
}
void initialize(const VkRayTracingPipelineCreateInfoKHR *pCreateInfo) {
safe_VkRayTracingPipelineCreateInfoKHR::initialize(pCreateInfo);
}
};
class PIPELINE_STATE : public BASE_NODE {
public:
struct StageState {
std::unordered_set<uint32_t> accessible_ids;
std::vector<std::pair<descriptor_slot_t, interface_var>> descriptor_uses;
bool has_writable_descriptor;
};
VkPipeline pipeline;
safe_VkGraphicsPipelineCreateInfo graphicsPipelineCI;
safe_VkComputePipelineCreateInfo computePipelineCI;
safe_VkRayTracingPipelineCreateInfoCommon raytracingPipelineCI;
// Hold shared ptr to RP in case RP itself is destroyed
std::shared_ptr<const RENDER_PASS_STATE> rp_state;
// Flag of which shader stages are active for this pipeline
uint32_t active_shaders;
uint32_t duplicate_shaders;
// Capture which slots (set#->bindings) are actually used by the shaders of this pipeline
std::unordered_map<uint32_t, BindingReqMap> active_slots;
uint32_t max_active_slot; // the highest set number in active_slots for pipeline layout compatibility checks
// Additional metadata needed by pipeline_state initialization and validation
std::vector<StageState> stage_state;
// Vtx input info (if any)
std::vector<VkVertexInputBindingDescription> vertex_binding_descriptions_;
std::vector<VkVertexInputAttributeDescription> vertex_attribute_descriptions_;
std::vector<VkDeviceSize> vertex_attribute_alignments_;
std::unordered_map<uint32_t, uint32_t> vertex_binding_to_index_map_;
std::vector<VkPipelineColorBlendAttachmentState> attachments;
bool blendConstantsEnabled; // Blend constants enabled for any attachments
std::shared_ptr<const PIPELINE_LAYOUT_STATE> pipeline_layout;
VkPrimitiveTopology topology_at_rasterizer;
VkBool32 sample_location_enabled;
// Default constructor
PIPELINE_STATE()
: pipeline{},
graphicsPipelineCI{},
computePipelineCI{},
raytracingPipelineCI{},
rp_state(nullptr),
active_shaders(0),
duplicate_shaders(0),
active_slots(),
max_active_slot(0),
vertex_binding_descriptions_(),
vertex_attribute_descriptions_(),
vertex_binding_to_index_map_(),
attachments(),
blendConstantsEnabled(false),
pipeline_layout(),
topology_at_rasterizer{},
sample_location_enabled(VK_FALSE) {}
void reset() {
VkGraphicsPipelineCreateInfo emptyGraphicsCI = {};
graphicsPipelineCI.initialize(&emptyGraphicsCI, false, false);
VkComputePipelineCreateInfo emptyComputeCI = {};
computePipelineCI.initialize(&emptyComputeCI);
VkRayTracingPipelineCreateInfoKHR emptyRayTracingCI = {};
raytracingPipelineCI.initialize(&emptyRayTracingCI);
stage_state.clear();
}
void initGraphicsPipeline(const ValidationStateTracker *state_data, const VkGraphicsPipelineCreateInfo *pCreateInfo,
std::shared_ptr<const RENDER_PASS_STATE> &&rpstate);
void initComputePipeline(const ValidationStateTracker *state_data, const VkComputePipelineCreateInfo *pCreateInfo);
template <typename CreateInfo>
void initRayTracingPipeline(const ValidationStateTracker *state_data, const CreateInfo *pCreateInfo);
inline VkPipelineBindPoint getPipelineType() const {
if (graphicsPipelineCI.sType == VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO)
return VK_PIPELINE_BIND_POINT_GRAPHICS;
else if (computePipelineCI.sType == VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO)
return VK_PIPELINE_BIND_POINT_COMPUTE;
else if (raytracingPipelineCI.sType == VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV)
return VK_PIPELINE_BIND_POINT_RAY_TRACING_NV;
else if (raytracingPipelineCI.sType == VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR)
return VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR;
else
return VK_PIPELINE_BIND_POINT_MAX_ENUM;
}
inline VkPipelineCreateFlags getPipelineCreateFlags() const {
if (graphicsPipelineCI.sType == VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO)
return graphicsPipelineCI.flags;
else if (computePipelineCI.sType == VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO)
return computePipelineCI.flags;
else if (raytracingPipelineCI.sType == VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV)
return raytracingPipelineCI.flags;
else
return 0;
}
};
// Track last states that are bound per pipeline bind point (Gfx & Compute)
struct LAST_BOUND_STATE {
LAST_BOUND_STATE() { reset(); } // must define default constructor for portability reasons
PIPELINE_STATE *pipeline_state;
VkPipelineLayout pipeline_layout;
std::unique_ptr<cvdescriptorset::DescriptorSet> push_descriptor_set;
// Ordered bound set tracking where index is set# that given set is bound to
struct PER_SET {
PER_SET()
: bound_descriptor_set(nullptr),
compat_id_for_set(0),
validated_set(nullptr),
validated_set_change_count(~0ULL),
validated_set_image_layout_change_count(~0ULL),
validated_set_binding_req_map() {}
cvdescriptorset::DescriptorSet *bound_descriptor_set;
// one dynamic offset per dynamic descriptor bound to this CB
std::vector<uint32_t> dynamicOffsets;
PipelineLayoutCompatId compat_id_for_set;
// Cache most recently validated descriptor state for ValidateCmdBufDrawState/UpdateDrawState
const cvdescriptorset::DescriptorSet *validated_set;
uint64_t validated_set_change_count;
uint64_t validated_set_image_layout_change_count;
BindingReqMap validated_set_binding_req_map;
};
std::vector<PER_SET> per_set;
void reset() {
pipeline_state = nullptr;
pipeline_layout = VK_NULL_HANDLE;
push_descriptor_set = nullptr;
per_set.clear();
}
void UnbindAndResetPushDescriptorSet(cvdescriptorset::DescriptorSet *ds) {
if (push_descriptor_set) {
for (std::size_t i = 0; i < per_set.size(); i++) {
if (per_set[i].bound_descriptor_set == push_descriptor_set.get()) {
per_set[i].bound_descriptor_set = nullptr;
}
}
}
push_descriptor_set.reset(ds);
}
};
static inline bool CompatForSet(uint32_t set, const LAST_BOUND_STATE &a, const std::vector<PipelineLayoutCompatId> &b) {
bool result = (set < a.per_set.size()) && (set < b.size()) && (a.per_set[set].compat_id_for_set == b[set]);
return result;
}
static inline bool CompatForSet(uint32_t set, const PIPELINE_LAYOUT_STATE *a, const PIPELINE_LAYOUT_STATE *b) {
// Intentionally have a result variable to simplify debugging
bool result = a && b && (set < a->compat_for_set.size()) && (set < b->compat_for_set.size()) &&
(a->compat_for_set[set] == b->compat_for_set[set]);
return result;
}
// Types to store queue family ownership (QFO) Transfers
// Common to image and buffer memory barriers
template <typename Handle, typename Barrier>
struct QFOTransferBarrierBase {
using HandleType = Handle;
using BarrierType = Barrier;
struct Tag {};
HandleType handle = VK_NULL_HANDLE;
uint32_t srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
uint32_t dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
QFOTransferBarrierBase() = default;
QFOTransferBarrierBase(const BarrierType &barrier, const HandleType &resource_handle)
: handle(resource_handle),
srcQueueFamilyIndex(barrier.srcQueueFamilyIndex),
dstQueueFamilyIndex(barrier.dstQueueFamilyIndex) {}
hash_util::HashCombiner base_hash_combiner() const {
hash_util::HashCombiner hc;
hc << srcQueueFamilyIndex << dstQueueFamilyIndex << handle;
return hc;
}
bool operator==(const QFOTransferBarrierBase &rhs) const {
return (srcQueueFamilyIndex == rhs.srcQueueFamilyIndex) && (dstQueueFamilyIndex == rhs.dstQueueFamilyIndex) &&
(handle == rhs.handle);
}
};
template <typename Barrier>
struct QFOTransferBarrier {};
// Image barrier specific implementation
template <>
struct QFOTransferBarrier<VkImageMemoryBarrier> : public QFOTransferBarrierBase<VkImage, VkImageMemoryBarrier> {
using BaseType = QFOTransferBarrierBase<VkImage, VkImageMemoryBarrier>;
VkImageLayout oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
VkImageLayout newLayout = VK_IMAGE_LAYOUT_UNDEFINED;
VkImageSubresourceRange subresourceRange;
QFOTransferBarrier() = default;
QFOTransferBarrier(const BarrierType &barrier)
: BaseType(barrier, barrier.image),
oldLayout(barrier.oldLayout),
newLayout(barrier.newLayout),
subresourceRange(barrier.subresourceRange) {}
size_t hash() const {
// Ignoring the layout information for the purpose of the hash, as we're interested in QFO release/acquisition w.r.t.
// the subresource affected, an layout transitions are current validated on another path
auto hc = base_hash_combiner() << subresourceRange;
return hc.Value();
}
bool operator==(const QFOTransferBarrier<BarrierType> &rhs) const {
// Ignoring layout w.r.t. equality. See comment in hash above.
return (static_cast<BaseType>(*this) == static_cast<BaseType>(rhs)) && (subresourceRange == rhs.subresourceRange);
}
// TODO: codegen a comprehensive complie time type -> string (and or other traits) template family
static const char *BarrierName() { return "VkImageMemoryBarrier"; }
static const char *HandleName() { return "VkImage"; }
// UNASSIGNED-VkImageMemoryBarrier-image-00001 QFO transfer image barrier must not duplicate QFO recorded in command buffer
static const char *ErrMsgDuplicateQFOInCB() { return "UNASSIGNED-VkImageMemoryBarrier-image-00001"; }
// UNASSIGNED-VkImageMemoryBarrier-image-00002 QFO transfer image barrier must not duplicate QFO submitted in batch
static const char *ErrMsgDuplicateQFOInSubmit() { return "UNASSIGNED-VkImageMemoryBarrier-image-00002"; }
// UNASSIGNED-VkImageMemoryBarrier-image-00003 QFO transfer image barrier must not duplicate QFO submitted previously
static const char *ErrMsgDuplicateQFOSubmitted() { return "UNASSIGNED-VkImageMemoryBarrier-image-00003"; }
// UNASSIGNED-VkImageMemoryBarrier-image-00004 QFO acquire image barrier must have matching QFO release submitted previously
static const char *ErrMsgMissingQFOReleaseInSubmit() { return "UNASSIGNED-VkImageMemoryBarrier-image-00004"; }
};
// Buffer barrier specific implementation
template <>
struct QFOTransferBarrier<VkBufferMemoryBarrier> : public QFOTransferBarrierBase<VkBuffer, VkBufferMemoryBarrier> {
using BaseType = QFOTransferBarrierBase<VkBuffer, VkBufferMemoryBarrier>;
VkDeviceSize offset = 0;
VkDeviceSize size = 0;
QFOTransferBarrier(const VkBufferMemoryBarrier &barrier)
: BaseType(barrier, barrier.buffer), offset(barrier.offset), size(barrier.size) {}
size_t hash() const {
auto hc = base_hash_combiner() << offset << size;
return hc.Value();
}
bool operator==(const QFOTransferBarrier<BarrierType> &rhs) const {
return (static_cast<BaseType>(*this) == static_cast<BaseType>(rhs)) && (offset == rhs.offset) && (size == rhs.size);
}
static const char *BarrierName() { return "VkBufferMemoryBarrier"; }
static const char *HandleName() { return "VkBuffer"; }
// UNASSIGNED-VkImageMemoryBarrier-buffer-00001 QFO transfer buffer barrier must not duplicate QFO recorded in command buffer
static const char *ErrMsgDuplicateQFOInCB() { return "UNASSIGNED-VkBufferMemoryBarrier-buffer-00001"; }
// UNASSIGNED-VkBufferMemoryBarrier-buffer-00002 QFO transfer buffer barrier must not duplicate QFO submitted in batch
static const char *ErrMsgDuplicateQFOInSubmit() { return "UNASSIGNED-VkBufferMemoryBarrier-buffer-00002"; }
// UNASSIGNED-VkBufferMemoryBarrier-buffer-00003 QFO transfer buffer barrier must not duplicate QFO submitted previously
static const char *ErrMsgDuplicateQFOSubmitted() { return "UNASSIGNED-VkBufferMemoryBarrier-buffer-00003"; }
// UNASSIGNED-VkBufferMemoryBarrier-buffer-00004 QFO acquire buffer barrier must have matching QFO release submitted previously
static const char *ErrMsgMissingQFOReleaseInSubmit() { return "UNASSIGNED-VkBufferMemoryBarrier-buffer-00004"; }
};
template <typename Barrier>
using QFOTransferBarrierHash = hash_util::HasHashMember<QFOTransferBarrier<Barrier>>;
// Command buffers store the set of barriers recorded
template <typename Barrier>
using QFOTransferBarrierSet = std::unordered_set<QFOTransferBarrier<Barrier>, QFOTransferBarrierHash<Barrier>>;
template <typename Barrier>
struct QFOTransferBarrierSets {
QFOTransferBarrierSet<Barrier> release;
QFOTransferBarrierSet<Barrier> acquire;
void Reset() {
acquire.clear();
release.clear();
}
};
// The layer_data stores the map of pending release barriers
template <typename Barrier>
using GlobalQFOTransferBarrierMap =
std::unordered_map<typename QFOTransferBarrier<Barrier>::HandleType, QFOTransferBarrierSet<Barrier>>;
// Submit queue uses the Scoreboard to track all release/acquire operations in a batch.
template <typename Barrier>
using QFOTransferCBScoreboard =
std::unordered_map<QFOTransferBarrier<Barrier>, const CMD_BUFFER_STATE *, QFOTransferBarrierHash<Barrier>>;
template <typename Barrier>
struct QFOTransferCBScoreboards {
QFOTransferCBScoreboard<Barrier> acquire;
QFOTransferCBScoreboard<Barrier> release;
};
typedef std::map<QueryObject, QueryState> QueryMap;
typedef std::unordered_map<VkEvent, VkPipelineStageFlags> EventToStageMap;
typedef ImageSubresourceLayoutMap::LayoutMap GlobalImageLayoutRangeMap;
typedef std::unordered_map<VkImage, std::unique_ptr<GlobalImageLayoutRangeMap>> GlobalImageLayoutMap;
typedef std::unordered_map<VkImage, std::unique_ptr<ImageSubresourceLayoutMap>> CommandBufferImageLayoutMap;
// Cmd Buffer Wrapper Struct - TODO : This desperately needs its own class
struct CMD_BUFFER_STATE : public BASE_NODE {
VkCommandBuffer commandBuffer;
VkCommandBufferAllocateInfo createInfo = {};
VkCommandBufferBeginInfo beginInfo;
VkCommandBufferInheritanceInfo inheritanceInfo;
VkDevice device; // device this CB belongs to
std::shared_ptr<const COMMAND_POOL_STATE> command_pool;
bool hasDrawCmd;
bool hasTraceRaysCmd;
bool hasBuildAccelerationStructureCmd;
bool hasDispatchCmd;
CB_STATE state; // Track cmd buffer update state
uint64_t commandCount; // Number of commands recorded
uint64_t submitCount; // Number of times CB has been submitted
typedef uint64_t ImageLayoutUpdateCount;
ImageLayoutUpdateCount image_layout_change_count; // The sequence number for changes to image layout (for cached validation)
CBStatusFlags status; // Track status of various bindings on cmd buffer
CBStatusFlags static_status; // All state bits provided by current graphics pipeline
// rather than dynamic state
// Currently storing "lastBound" objects on per-CB basis
// long-term may want to create caches of "lastBound" states and could have
// each individual CMD_NODE referencing its own "lastBound" state
// Store last bound state for Gfx & Compute pipeline bind points
std::map<uint32_t, LAST_BOUND_STATE> lastBound;
using Bindings = std::map<uint32_t, descriptor_req>;
using Pipelines_Bindings = std::map<VkPipeline, Bindings>;
std::unordered_map<VkDescriptorSet, Pipelines_Bindings> validate_descriptorsets_in_queuesubmit;
uint32_t viewportMask;
uint32_t scissorMask;
uint32_t initial_device_mask;
safe_VkRenderPassBeginInfo activeRenderPassBeginInfo;
RENDER_PASS_STATE *activeRenderPass;
VkSubpassContents activeSubpassContents;
uint32_t active_render_pass_device_mask;
uint32_t activeSubpass;
VkFramebuffer activeFramebuffer;
std::unordered_set<VkFramebuffer> framebuffers;
// Unified data structs to track objects bound to this command buffer as well as object
// dependencies that have been broken : either destroyed objects, or updated descriptor sets
std::vector<VulkanTypedHandle> object_bindings;
std::vector<VulkanTypedHandle> broken_bindings;
QFOTransferBarrierSets<VkBufferMemoryBarrier> qfo_transfer_buffer_barriers;
QFOTransferBarrierSets<VkImageMemoryBarrier> qfo_transfer_image_barriers;
std::unordered_set<VkEvent> waitedEvents;
std::vector<VkEvent> writeEventsBeforeWait;
std::vector<VkEvent> events;
std::unordered_set<QueryObject> activeQueries;
std::unordered_set<QueryObject> startedQueries;
std::unordered_set<QueryObject> resetQueries;
CommandBufferImageLayoutMap image_layout_map;
CBVertexBufferBindingInfo current_vertex_buffer_binding_info;
bool vertex_buffer_used; // Track for perf warning to make sure any bound vtx buffer used
VkCommandBuffer primaryCommandBuffer;
// If primary, the secondary command buffers we will call.
// If secondary, the primary command buffers we will be called by.
std::unordered_set<CMD_BUFFER_STATE *> linkedCommandBuffers;
// Validation functions run at primary CB queue submit time
std::vector<std::function<bool(const ValidationStateTracker *device_data, const class QUEUE_STATE *queue_state)>>
queue_submit_functions;
// Validation functions run when secondary CB is executed in primary
std::vector<std::function<bool(const CMD_BUFFER_STATE *, VkFramebuffer)>> cmd_execute_commands_functions;
std::vector<
std::function<bool(const ValidationStateTracker *device_data, bool do_validate, EventToStageMap *localEventToStageMap)>>
eventUpdates;
std::vector<std::function<bool(const ValidationStateTracker *device_data, bool do_validate, VkQueryPool &firstPerfQueryPool,
uint32_t perfQueryPass, QueryMap *localQueryToStateMap)>>
queryUpdates;
std::unordered_set<cvdescriptorset::DescriptorSet *> validated_descriptor_sets;
// Contents valid only after an index buffer is bound (CBSTATUS_INDEX_BUFFER_BOUND set)
IndexBufferBinding index_buffer_binding;
bool performance_lock_acquired = false;
bool performance_lock_released = false;
// Cache of current insert label...
LoggingLabel debug_label;
std::vector<uint8_t> push_constant_data;
PushConstantRangesId push_constant_data_ranges;
// Used for Best Practices tracking
uint32_t small_indexed_draw_call_count;
std::vector<IMAGE_VIEW_STATE *> imagelessFramebufferAttachments;
};
static inline const QFOTransferBarrierSets<VkImageMemoryBarrier> &GetQFOBarrierSets(
const CMD_BUFFER_STATE *cb, const QFOTransferBarrier<VkImageMemoryBarrier>::Tag &type_tag) {
return cb->qfo_transfer_image_barriers;
}
static inline const QFOTransferBarrierSets<VkBufferMemoryBarrier> &GetQFOBarrierSets(
const CMD_BUFFER_STATE *cb, const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag &type_tag) {
return cb->qfo_transfer_buffer_barriers;
}
static inline QFOTransferBarrierSets<VkImageMemoryBarrier> &GetQFOBarrierSets(
CMD_BUFFER_STATE *cb, const QFOTransferBarrier<VkImageMemoryBarrier>::Tag &type_tag) {
return cb->qfo_transfer_image_barriers;
}
static inline QFOTransferBarrierSets<VkBufferMemoryBarrier> &GetQFOBarrierSets(
CMD_BUFFER_STATE *cb, const QFOTransferBarrier<VkBufferMemoryBarrier>::Tag &type_tag) {
return cb->qfo_transfer_buffer_barriers;
}
struct SEMAPHORE_WAIT {
VkSemaphore semaphore;
VkQueue queue;
uint64_t payload;
uint64_t seq;
};
struct SEMAPHORE_SIGNAL {
VkSemaphore semaphore;
uint64_t payload;
uint64_t seq;
};
struct CB_SUBMISSION {
CB_SUBMISSION(std::vector<VkCommandBuffer> const &cbs, std::vector<SEMAPHORE_WAIT> const &waitSemaphores,
std::vector<SEMAPHORE_SIGNAL> const &signalSemaphores, std::vector<VkSemaphore> const &externalSemaphores,
VkFence fence, uint32_t perf_submit_pass)
: cbs(cbs),
waitSemaphores(waitSemaphores),
signalSemaphores(signalSemaphores),
externalSemaphores(externalSemaphores),
fence(fence),
perf_submit_pass(perf_submit_pass) {}
std::vector<VkCommandBuffer> cbs;
std::vector<SEMAPHORE_WAIT> waitSemaphores;
std::vector<SEMAPHORE_SIGNAL> signalSemaphores;
std::vector<VkSemaphore> externalSemaphores;
VkFence fence;
uint32_t perf_submit_pass;
};
struct MT_FB_ATTACHMENT_INFO {
IMAGE_VIEW_STATE *view_state;
VkImage image;
};
class FRAMEBUFFER_STATE : public BASE_NODE {
public:
VkFramebuffer framebuffer;
safe_VkFramebufferCreateInfo createInfo;
std::shared_ptr<const RENDER_PASS_STATE> rp_state;
FRAMEBUFFER_STATE(VkFramebuffer fb, const VkFramebufferCreateInfo *pCreateInfo, std::shared_ptr<RENDER_PASS_STATE> &&rpstate)
: framebuffer(fb), createInfo(pCreateInfo), rp_state(rpstate){};
};
struct SHADER_MODULE_STATE;
struct DeviceExtensions;
struct DeviceFeatures {
VkPhysicalDeviceFeatures core;
VkPhysicalDeviceVulkan11Features core11;
VkPhysicalDeviceVulkan12Features core12;
VkPhysicalDeviceExclusiveScissorFeaturesNV exclusive_scissor;
VkPhysicalDeviceShadingRateImageFeaturesNV shading_rate_image;
VkPhysicalDeviceMeshShaderFeaturesNV mesh_shader;
VkPhysicalDeviceInlineUniformBlockFeaturesEXT inline_uniform_block;
VkPhysicalDeviceTransformFeedbackFeaturesEXT transform_feedback_features;
VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT vtx_attrib_divisor_features;
VkPhysicalDeviceBufferDeviceAddressFeaturesEXT buffer_device_address_ext;
VkPhysicalDeviceCooperativeMatrixFeaturesNV cooperative_matrix_features;
VkPhysicalDeviceComputeShaderDerivativesFeaturesNV compute_shader_derivatives_features;
VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV fragment_shader_barycentric_features;
VkPhysicalDeviceShaderImageFootprintFeaturesNV shader_image_footprint_features;
VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT fragment_shader_interlock_features;
VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT demote_to_helper_invocation_features;
VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT texel_buffer_alignment_features;
VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR pipeline_exe_props_features;
VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV dedicated_allocation_image_aliasing_features;
VkPhysicalDevicePerformanceQueryFeaturesKHR performance_query_features;
VkPhysicalDeviceCoherentMemoryFeaturesAMD device_coherent_memory_features;
VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ycbcr_image_array_features;
VkPhysicalDeviceRayTracingFeaturesKHR ray_tracing_features;
VkPhysicalDeviceRobustness2FeaturesEXT robustness2_features;
VkPhysicalDeviceFragmentDensityMapFeaturesEXT fragment_density_map_features;
VkPhysicalDeviceCustomBorderColorFeaturesEXT custom_border_color_features;
};
enum RenderPassCreateVersion { RENDER_PASS_VERSION_1 = 0, RENDER_PASS_VERSION_2 = 1 };
enum CommandVersion { CMD_VERSION_1 = 0, CMD_VERSION_2 = 1 };
enum BarrierOperationsType {
kAllAcquire, // All Barrier operations are "ownership acquire" operations
kAllRelease, // All Barrier operations are "ownership release" operations
kGeneral, // Either no ownership operations or a mix of ownership operation types and/or non-ownership operations
};
ImageSubresourceLayoutMap *GetImageSubresourceLayoutMap(CMD_BUFFER_STATE *cb_state, const IMAGE_STATE &image_state);
const ImageSubresourceLayoutMap *GetImageSubresourceLayoutMap(const CMD_BUFFER_STATE *cb_state, VkImage image);
void AddInitialLayoutintoImageLayoutMap(const IMAGE_STATE &image_state, GlobalImageLayoutMap &image_layout_map);
#endif // CORE_VALIDATION_TYPES_H_
|
Java
|
FILE(REMOVE_RECURSE
"CMakeFiles/dynamic_object_test.dir/unittests/dynamic_object_test.cpp.o"
"dynamic_object_test.pdb"
"dynamic_object_test"
)
# Per-language clean rules from dependency scanning.
FOREACH(lang CXX)
INCLUDE(CMakeFiles/dynamic_object_test.dir/cmake_clean_${lang}.cmake OPTIONAL)
ENDFOREACH(lang)
|
Java
|
/*----------------------------------------------------------------------------*/
/* Copyright (c) FIRST 2008. All Rights Reserved. */
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* must be accompanied by the FIRST BSD license file in the root directory of */
/* the project. */
/*----------------------------------------------------------------------------*/
package edu.wpi.first.wpilibj.templates;
import edu.wpi.first.wpilibj.Compressor;
import edu.wpi.first.wpilibj.Jaguar;
import edu.wpi.first.wpilibj.Joystick;
import edu.wpi.first.wpilibj.Servo;
import edu.wpi.first.wpilibj.SimpleRobot;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the SimpleRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class RobotTemplate extends SimpleRobot {
private Joystick joystick = new Joystick(1);
private Drivetrain drivetrain;
private BowlerArm arm;
Compressor compressor;
Pan pan;
//int port_1 = 7; //these ports were placeholders, no longer applicable
//int port_2 = 7;
public RobotTemplate() {
drivetrain = new Drivetrain();
arm = new BowlerArm();
pan = new Pan();
compressor = new Compressor(7, 7);//7 for the switch, 7 for the relay
}
/**
* This function is called once each time the robot enters autonomous mode.
*/
public void autonomous() {
drivetrain.set(1, 1);
sleep(5000);
drivetrain.set(0,0);
// arm.auto();
}
/**
* This function is called once each time the robot enters operator control.
*/
public void operatorControl() {
compressor.start();
arm.setSolenoid(-1);
while (isOperatorControl()) {
//drivetrain updates
double lstick = -joystick.getRawAxis(2);
double rstick = -joystick.getRawAxis(4);
drivetrain.set(Math.abs(lstick) * lstick, Math.abs(rstick) * rstick); //If I'm not mistaken, this is the most convenient way to square in Java?
//pan updates version 2 (Amita); this is basic and can be used for backup
if(joystick.getRawButton(10)){
pan.endGame();
}
else{
pan.resetServo();
}
//bowler arm updates
if (joystick.getRawButton(7)) {
arm.rampDown();
} else if (joystick.getRawButton(5)) {
arm.rampUp();
} else {
arm.setRamp(0);
}
arm.setSolenoid((int) joystick.getRawAxis(6));
}
}
/*
*changes the servo state based on the button being pressed.
*once it is pressed, it is set to the opposite of what is was at the start, ditto for release.
*/
/**
* This function is called once each time the robot enters test mode.
*/
public void test() {
}
public void updateDrivetrain(){
}
public void updateArm(){
}
public void updatePan(){
}
public static void sleep(long ms){
long t=System.currentTimeMillis()+ms;
while(System.currentTimeMillis()<t){
//do nothing!
}
}
}
|
Java
|
/*
* Copyright (c) 2010, Anima Games, Benjamin Karaban, Laurent Schneider,
* Jérémie Comarmond, Didier Colin.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* - Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "PtyPartEffectTranslate.moc.h"
#include <QtToolbox/CollapsibleWidget.moc.h>
#include <QtToolbox/SingleSlidingValue.moc.h>
#include <QtToolbox/SingleSlidingHDR.moc.h>
#include <QGridLayout>
#include <QPushButton>
namespace EPI
{
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
PtyPartEffectTranslate::PtyPartEffectTranslate( const Ptr<Universe::NodeEmitter>& pNodeE,
const Ptr<Universe::PartEffectTranslate>& pEffect,
const Core::String& title)
: PtyPartEffect(pNodeE, pEffect, title)
{
updateProperty();
}
//-----------------------------------------------------------------------------
PtyPartEffectTranslate::PtyPartEffectTranslate(const Ptr<Universe::NodeEmitter>& pNodeE, const Core::String& title)
: PtyPartEffect(
pNodeE,
Ptr<Universe::PartEffectTranslate>(new Universe::PartEffectTranslate()),
title)
{
updateProperty();
}
//-----------------------------------------------------------------------------
PtyPartEffectTranslate::~PtyPartEffectTranslate()
{}
//-----------------------------------------------------------------------------
Ptr<PropertyWidget> PtyPartEffectTranslate::internalCreatePropertyWidget(const Ptr<PropertyWidgetDataProxy>& pDataProxy, QWidget * parent)
{
Ptr<PtyWidgetPartEffectTranslate> pPW (new PtyWidgetPartEffectTranslate(pDataProxy, parent));
return pPW;
}
//-----------------------------------------------------------------------------
void PtyPartEffectTranslate::updateData()
{
Ptr<Universe::PartEffectTranslate> pEffet = LM_DEBUG_PTR_CAST<Universe::PartEffectTranslate> (getEffect());
pEffet->setConstSpeed(_constSpeed);
pEffet->setRandSpeed(_randSpeed);
}
//-----------------------------------------------------------------------------
void PtyPartEffectTranslate::updateProperty()
{
Ptr<Universe::PartEffectTranslate> pEffet = LM_DEBUG_PTR_CAST<Universe::PartEffectTranslate> (getEffect());
_constSpeed = pEffet->getConstSpeed();
_randSpeed = pEffet->getRandSpeed();
}
//-----------------------------------------------------------------------------
void PtyPartEffectTranslate::internalResurrect(const Ptr<Universe::World>& pWorld, const Ptr<Universe::World>& pWorldInfoContent, const Ptr<Property>& pty)
{
LM_ASSERT(getEffect()==null);
Ptr<Universe::IPartEffect> pEffet = Ptr<Universe::PartEffectTranslate>(new Universe::PartEffectTranslate());
setEffect(pEffet);
getUniverseNodeEmitter()->addEffect(getEffect());
updateData();
}
//-----------------------------------------------------------------------------
Ptr<Property> PtyPartEffectTranslate::clone() const
{
return Ptr<Property>(new PtyPartEffectTranslate( *this ));
}
//-----------------------------------------------------------------------------
void PtyPartEffectTranslate::internalCopy(const Ptr<Property>& pSrc)
{
PtyPartEffect::internalCopy(pSrc);
Ptr<PtyPartEffectTranslate> pPty = LM_DEBUG_PTR_CAST<PtyPartEffectTranslate>(pSrc);
_constSpeed = pPty->_constSpeed;
_randSpeed = pPty->_randSpeed;
updateData();
}
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
PtyWidgetPartEffectTranslate::PtyWidgetPartEffectTranslate(const Ptr<PropertyWidgetDataProxy>& data, QWidget * parent)
: PropertyWidget(data, parent)
{
setupUi();
}
//-----------------------------------------------------------------------------
PtyWidgetPartEffectTranslate::~PtyWidgetPartEffectTranslate()
{}
//-----------------------------------------------------------------------------
void PtyWidgetPartEffectTranslate::readProperty()
{
Ptr<PtyPartEffectTranslate> pP = LM_DEBUG_PTR_CAST<PtyPartEffectTranslate>(getDataProxy()->getProperty());
_constSpeedX->setSingleValue(pP->_constSpeed.x);
_constSpeedY->setSingleValue(pP->_constSpeed.y);
_constSpeedZ->setSingleValue(pP->_constSpeed.z);
_randSpeedX->setSingleValue(pP->_randSpeed.x);
_randSpeedY->setSingleValue(pP->_randSpeed.y);
_randSpeedZ->setSingleValue(pP->_randSpeed.z);
}
//-----------------------------------------------------------------------------
void PtyWidgetPartEffectTranslate::writeProperty(QWidget* pWidget)
{
Ptr<PtyPartEffectTranslate> pP = LM_DEBUG_PTR_CAST<PtyPartEffectTranslate>(getDataProxy()->getProperty());
double x = 0.0;
double y = 0.0;
double z = 0.0;
_constSpeedX->getSingleValue(x);
_constSpeedY->getSingleValue(y);
_constSpeedZ->getSingleValue(z);
pP->_constSpeed = Core::Vector3f(float(x), float(y), float(z));
_randSpeedX->getSingleValue(x);
_randSpeedY->getSingleValue(y);
_randSpeedZ->getSingleValue(z);
pP->_randSpeed = Core::Vector3f(float(x), float(y), float(z));
}
//-----------------------------------------------------------------------------
void PtyWidgetPartEffectTranslate::setupUi()
{
_layout = new QGridLayout(this);
_layout->setContentsMargins(0, 0, 0, 0);
_layout->setSpacing(0);
_groupBox = new QtToolbox::CollapsibleWidget(this, "Translate effect");
_del = new QPushButton(QIcon(":/icons/smallClearBW.png"), "", this);
_constSpeedX = new QtToolbox::SingleSlidingHDR(this, "Const X", true);
_constSpeedY = new QtToolbox::SingleSlidingHDR(this, "Const Y", true);
_constSpeedZ = new QtToolbox::SingleSlidingHDR(this, "Const Z", true);
_randSpeedX = new QtToolbox::SingleSlidingHDR(this, "Rand X", true);
_randSpeedY = new QtToolbox::SingleSlidingHDR(this, "Rand Y", true);
_randSpeedZ = new QtToolbox::SingleSlidingHDR(this, "Rand Z", true);
_groupBox->addWidgetToTitle(_del);
_groupBox->getLayout()->addWidget(_constSpeedX);
_groupBox->getLayout()->addWidget(_constSpeedY);
_groupBox->getLayout()->addWidget(_constSpeedZ);
_groupBox->getLayout()->addWidget(_randSpeedX);
_groupBox->getLayout()->addWidget(_randSpeedY);
_groupBox->getLayout()->addWidget(_randSpeedZ);
_layout->addWidget(_groupBox);
setLayout(_layout);
getWidgetsForUndoRedo().push_back(_constSpeedX);
getWidgetsForUndoRedo().push_back(_constSpeedY);
getWidgetsForUndoRedo().push_back(_constSpeedZ);
getWidgetsForUndoRedo().push_back(_randSpeedX);
getWidgetsForUndoRedo().push_back(_randSpeedY);
getWidgetsForUndoRedo().push_back(_randSpeedZ);
PropertyWidget::setupUi();
connect(_del, SIGNAL(clicked()), this, SLOT(deleteWidget()));
}
//-----------------------------------------------------------------------------
void PtyWidgetPartEffectTranslate::deleteWidget()
{
emit deletePtyWidgetEffect(this);
}
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
}//namespace EPI
|
Java
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Quality Control based on fuzzy logic.
"""
import logging
import numpy as np
from .core import QCCheckVar
from .gradient import gradient
from .spike import spike
from .woa_normbias import woa_normbias
from cotede.fuzzy import fuzzy_uncertainty
module_logger = logging.getLogger(__name__)
def fuzzylogic(features, cfg, require="all"):
"""
FIXME: Think about, should I return 0, or have an assert, and at qc.py
all qc tests are applied with a try, and in case it fails it flag
0s.
"""
require = cfg.get("require", require)
if (require == "all") and not np.all([f in features for f in cfg["features"]]):
module_logger.warning(
"Not all features (%s) required by fuzzy logic are available".format(
cfg["features"].keys()
)
)
raise KeyError
uncertainty = fuzzy_uncertainty(
data=features, features=cfg["features"], output=cfg["output"], require=require
)
return uncertainty
class FuzzyLogic(QCCheckVar):
def set_features(self):
self.features = {}
for v in [f for f in self.cfg["features"] if f not in self.features]:
if v == "woa_bias":
woa_comparison = woa_normbias(self.data, self.varname, self.attrs)
self.features[v] = woa_comparison["woa_bias"]
elif v == "woa_normbias":
woa_comparison = woa_normbias(self.data, self.varname, self.attrs)
self.features[v] = woa_comparison["woa_normbias"]
elif v == "spike":
self.features[v] = spike(self.data[self.varname])
elif v == "gradient":
self.features[v] = gradient(self.data[self.varname])
self.features["fuzzylogic"] = fuzzylogic(self.features, self.cfg)
def test(self):
self.flags = {}
cfg = self.cfg
flag = np.zeros(np.shape(self.data[self.varname]), dtype="i1")
uncertainty = self.features["fuzzylogic"]
# FIXME: As it is now, it will have no zero flag value. Think about cases
# where some values in a profile would not be estimated, hence flag=0
# I needed to use np.nonzeros because now uncertainty is a masked array,
# to accept when a feature is masked.
flag[np.nonzero(uncertainty <= 0.29)] = 1
flag[np.nonzero((uncertainty > 0.29) & (uncertainty <= 0.34))] = 2
flag[np.nonzero((uncertainty > 0.34) & (uncertainty <= 0.72))] = 3
flag[np.nonzero(uncertainty > 0.72)] = 4
self.flags["fuzzylogic"] = flag
|
Java
|
# Spree PayPal Express
This is a "re-do" of the official [spree_paypal_express][4] extension. The old extension is extremely hard to maintain and complex.
Behind-the-scenes, this extension uses [PayPal's Merchant Ruby SDK](https://github.com/paypal/merchant-sdk-ruby).
## Installation
1. Add this extension to your Gemfile with this line:
gem 'spree_paypal_express', :github => "spree-contrib/better_spree_paypal_express", :branch => "2-2-stable"
2. Install the gem using Bundler:
bundle install
3. Copy & run migrations
bundle exec rails g spree_paypal_express:install
4. Restart your server
If your server was running, restart it so that it can find the assets properly.
### Sandbox Setup
#### PayPal
Go to [PayPal's Developer Website](https://developer.paypal.com/), sign in with your PayPal account, click "Applications" then "Sandbox Accounts" and create a new "Business" account. Once the account is created, click on the triangle next to its email address, then "Profile". The "API Credentials" tab will provide your API credentials (probably). If this tab is blank, try refreshing the page.
You will also need a "Personal" account to test the transactions on your site. Create this in the same way, finding the account information under "Profile" as well. You may need to set a password in order to be able to log in to PayPal's sandbox for this user.
#### Spree Setup
In Spree, go to the admin backend, click "Configuration" and then "Payment Methods" and create a new payment method. Select "Spree::Gateway::PayPalExpress" as the provider, and click "Create". Enter the email address, password and signature from the "API Credentials" tab for the **Business** account on PayPal.
### Production setup
#### PayPal
Sign in to PayPal, then click "Profile" and then (under "Account Information" on the left), click "API Access". On this page, select "Option 2" and click "View API Signature". The username, password and signature will be displayed on this screen.
If you are unable to find it, then follow [PayPal's own documentation](https://developer.paypal.com/webapps/developer/docs/classic/api/apiCredentials/).
#### Spree Setup
Same as sandbox setup, but change "Server" from "sandbox" to "live".
## Configuration
The PayPal Express Checkout has [no less than 4.5 billion configuration options](https://github.com/paypal/merchant-sdk-ruby/blob/1d65e598d2f9f200f85c6b3338d4293dbed576d8/lib/paypal-sdk/merchant/data_types.rb#L830-L959).
This Spree extension supports *some* of those. If your favourite is not here, then please submit an issue about it, or better still a patch to add it in.
### Solution Type
Determines whether or not a user needs a PayPal account to check out.
```ruby
payment_method.preferred_solution_type = "Mark"
# or
payment_method.preferred_solution_type = "Sole"
```
"Mark" if you do want users to have a paypal account, "Sole" otherwise.
### Landing Page
Determines which page to show users once they're redirected to PayPal.
```ruby
payment_method.preferred_solution_type = "Login"
# or
payment_method.preferred_solution_type = "Billing"
```
"Login" will show the users the login form for PayPal, and "Billing" will show them a form where they can enter their credit card data and possibly sign up for a PayPal account (depending on the Solution Type setting above).
### Logo
Determines what logo, if any, to display at the top left of the PayPal express checkout:
```ruby
payment_method.preferred_logourl = 'http://yoursite.com/images/checkout.jpg'
```
**Must** be an absolute path to the image.
## Caveats
*Caveat venditor*
Paypal will refuse any order with a zero cost item.
Any such item will be skipped and not displayed.
PayPal will also refuse any order where item total (before taxes and shipping costs) is zero.
In this case the PayPal checkout page will simply display "Current order".
## Contributing
In the spirit of [free software][1], **everyone** is encouraged to help improve this project.
Here are some ways *you* can contribute:
* by using prerelease versions
* by reporting [bugs][2]
* by suggesting new features
* by writing or editing documentation
* by writing specifications
* by writing code (*no patch is too small*: fix typos, add comments, clean up inconsistent whitespace)
* by refactoring code
* by resolving [issues][2]
* by reviewing patches
Starting point:
* Fork the repo
* Clone your repo
* Run `bundle install`
* Run `bundle exec rake test_app` to create the test application in `spec/dummy`
* Make your changes
* Ensure specs pass by running `bundle exec rspec spec`
* Submit your pull request
Copyright (c) 2014 Spree Commerce and contributors, released under the [New BSD License][3]
[1]: http://www.fsf.org/licensing/essays/free-sw.html
[2]: https://github.com/spree/better_spree_paypal_express/issues
[3]: https://github.com/spree/better_spree_paypal_express/tree/master/LICENSE.md
[4]: https://github.com/spree/spree_paypal_express
|
Java
|
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.ui.widget;
import android.content.Context;
import android.util.AttributeSet;
import android.util.SparseArray;
import android.view.View;
import android.view.ViewGroup;
import androidx.annotation.IdRes;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import org.chromium.base.BuildConfig;
import java.lang.ref.WeakReference;
/**
* An {@link OptimizedFrameLayout} that increases the speed of frequent view lookup by ID by caching
* the result of the lookup. Adding or removing a view with the same ID as a cached version will
* cause the cache to be invalidated for that view and cause a re-lookup the next time it is
* queried. The goal of this view type is to be used in cases where child views are frequently
* accessed or reused, for example as part of a {@link androidx.recyclerview.widget.RecyclerView}.
* The logic in the {@link #fastFindViewById(int)} method would be in {@link #findViewById(int)} if
* it weren't final on the {@link View} class.
*
* {@link android.view.ViewGroup.OnHierarchyChangeListener}s cannot be used on ViewGroups that are
* children of this group since they would overwrite the listeners that are critical to this class'
* functionality.
*
* Usage:
* Use the same way that you would use a normal {@link android.widget.FrameLayout}, but instead
* of using {@link #findViewById(int)} to access views, use {@link #fastFindViewById(int)}.
*/
public class ViewLookupCachingFrameLayout extends OptimizedFrameLayout {
/** A map containing views that have had lookup performed on them for quicker access. */
private final SparseArray<WeakReference<View>> mCachedViews = new SparseArray<>();
/** The hierarchy listener responsible for notifying the cache that the tree has changed. */
@VisibleForTesting
final OnHierarchyChangeListener mListener = new OnHierarchyChangeListener() {
@Override
public void onChildViewAdded(View parent, View child) {
mCachedViews.remove(child.getId());
setHierarchyListenerOnTree(child, this);
}
@Override
public void onChildViewRemoved(View parent, View child) {
mCachedViews.remove(child.getId());
setHierarchyListenerOnTree(child, null);
}
};
/** Default constructor for use in XML. */
public ViewLookupCachingFrameLayout(Context context, AttributeSet atts) {
super(context, atts);
setOnHierarchyChangeListener(mListener);
}
@Override
public void setOnHierarchyChangeListener(OnHierarchyChangeListener listener) {
assert listener == mListener : "Hierarchy change listeners cannot be set for this group!";
super.setOnHierarchyChangeListener(listener);
}
/**
* Set the hierarchy listener that invalidates relevant parts of the cache when subtrees change.
* @param view The root of the tree to attach listeners to.
* @param listener The listener to attach (null to unset).
*/
private void setHierarchyListenerOnTree(View view, OnHierarchyChangeListener listener) {
if (!(view instanceof ViewGroup)) return;
ViewGroup group = (ViewGroup) view;
group.setOnHierarchyChangeListener(listener);
for (int i = 0; i < group.getChildCount(); i++) {
setHierarchyListenerOnTree(group.getChildAt(i), listener);
}
}
/**
* Does the same thing as {@link #findViewById(int)} but caches the result if not null.
* Subsequent lookups are cheaper as a result. Adding or removing a child view invalidates
* the cache for the ID of the view removed and causes a re-lookup.
* @param id The ID of the view to lookup.
* @return The view if it exists.
*/
@Nullable
public View fastFindViewById(@IdRes int id) {
WeakReference<View> ref = mCachedViews.get(id);
View view = null;
if (ref != null) view = ref.get();
if (view == null) view = findViewById(id);
if (BuildConfig.DCHECK_IS_ON) {
assert view == findViewById(id) : "View caching logic is broken!";
assert ref == null
|| ref.get() != null : "Cache held reference to garbage collected view!";
}
if (view != null) mCachedViews.put(id, new WeakReference<>(view));
return view;
}
@VisibleForTesting
SparseArray<WeakReference<View>> getCache() {
return mCachedViews;
}
}
|
Java
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_91) on Tue Dec 29 12:44:17 AEDT 2015 -->
<title>org.bouncycastle.math.raw (Bouncy Castle Library 1.54 API Specification)</title>
<meta name="date" content="2015-12-29">
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="org.bouncycastle.math.raw (Bouncy Castle Library 1.54 API Specification)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li class="navBarCell1Rev">Package</li>
<li>Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage"><em><b>Bouncy Castle Cryptography Library 1.54</b></em></div>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../org/bouncycastle/math/field/package-summary.html">Prev Package</a></li>
<li><a href="../../../../org/bouncycastle/math/raw/test/package-summary.html">Next Package</a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?org/bouncycastle/math/raw/package-summary.html" target="_top">Frames</a></li>
<li><a href="package-summary.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h1 title="Package" class="title">Package org.bouncycastle.math.raw</h1>
<div class="docSummary">
<div class="block">Math support for raw multi-precision calculations.</div>
</div>
<p>See: <a href="#package_description">Description</a></p>
</div>
<div class="contentContainer">
<ul class="blockList">
<li class="blockList">
<table class="packageSummary" border="0" cellpadding="3" cellspacing="0" summary="Class Summary table, listing classes, and an explanation">
<caption><span>Class Summary</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Class</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="../../../../org/bouncycastle/math/raw/Interleave.html" title="class in org.bouncycastle.math.raw">Interleave</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../../org/bouncycastle/math/raw/Mod.html" title="class in org.bouncycastle.math.raw">Mod</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="../../../../org/bouncycastle/math/raw/Mont256.html" title="class in org.bouncycastle.math.raw">Mont256</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../../org/bouncycastle/math/raw/Nat.html" title="class in org.bouncycastle.math.raw">Nat</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="../../../../org/bouncycastle/math/raw/Nat128.html" title="class in org.bouncycastle.math.raw">Nat128</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../../org/bouncycastle/math/raw/Nat160.html" title="class in org.bouncycastle.math.raw">Nat160</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="../../../../org/bouncycastle/math/raw/Nat192.html" title="class in org.bouncycastle.math.raw">Nat192</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../../org/bouncycastle/math/raw/Nat224.html" title="class in org.bouncycastle.math.raw">Nat224</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="../../../../org/bouncycastle/math/raw/Nat256.html" title="class in org.bouncycastle.math.raw">Nat256</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../../org/bouncycastle/math/raw/Nat320.html" title="class in org.bouncycastle.math.raw">Nat320</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="../../../../org/bouncycastle/math/raw/Nat384.html" title="class in org.bouncycastle.math.raw">Nat384</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../../org/bouncycastle/math/raw/Nat448.html" title="class in org.bouncycastle.math.raw">Nat448</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="../../../../org/bouncycastle/math/raw/Nat512.html" title="class in org.bouncycastle.math.raw">Nat512</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../../org/bouncycastle/math/raw/Nat576.html" title="class in org.bouncycastle.math.raw">Nat576</a></td>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
</ul>
<a name="package_description">
<!-- -->
</a>
<h2 title="Package org.bouncycastle.math.raw Description">Package org.bouncycastle.math.raw Description</h2>
<div class="block">Math support for raw multi-precision calculations.</div>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li class="navBarCell1Rev">Package</li>
<li>Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage"><em><b>Bouncy Castle Cryptography Library 1.54</b></em></div>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../org/bouncycastle/math/field/package-summary.html">Prev Package</a></li>
<li><a href="../../../../org/bouncycastle/math/raw/test/package-summary.html">Next Package</a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?org/bouncycastle/math/raw/package-summary.html" target="_top">Frames</a></li>
<li><a href="package-summary.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
|
Java
|
from setuptools import setup, find_packages
setup(name='gelato.models',
version='0.1.2',
description='Gelato models',
namespace_packages=['gelato'],
long_description='',
author='',
author_email='',
license='',
url='',
include_package_data=True,
packages=find_packages(exclude=['tests']),
install_requires=['django', 'tower'])
|
Java
|
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, TypeFamilies #-}
module Rubik.Turn where
import Data.Array
import Rubik.Negate as N
import Rubik.Key
data Turn = NoTurn | Clock | OneEighty | CounterClock
deriving (Eq,Ord,Show,Enum,Ix)
instance Negate Turn where
negate NoTurn = NoTurn
negate Clock = CounterClock
negate OneEighty = OneEighty
negate CounterClock = Clock
instance Key Turn where
universe = [ NoTurn, Clock, OneEighty, CounterClock ]
class Rotate a where
type SideOf a
rotate :: SideOf a -> a -> a
-- never used
--instance (Negate a, Rotate a b) => Rotate a (b -> c) where
-- rotate t f a = f (rotate (N.negate t) a)
{-
-- Split into its own module
class Rotate a where
type SideOf a
rotate :: SideOf a -> a -> a
-- can complete either
turn :: a -> a
turn = rotateBy Clock
rotateBy :: Turn -> a -> a
rotateBy Clock = turn
rotateBy OneEighty = turn . turn
rotateBy CounterClock = turn . turn . turn
-- We invert the rotate because it is the co-varient position
instance Rotate a => Rotate (a -> b) where
type SideOf (a -> b) = SideOf a
rotateBy t f a = f (rotateBy (N.negate t) a)
instance (Rotate a,Rotate b) => Rotate (a,b) where
rotateBy t (a,b) = (rotateBy t a, rotateBy t b)
data Apply a b = Apply (a -> b) a
apply :: Apply a b -> b
apply (Apply f a) = f a
instance Rotate a => Rotate (Apply a b) where
turn (Apply f a) = Apply f (turn a)
-}
|
Java
|
/* -*- mode: c++; fill-column: 132; c-basic-offset: 4; indent-tabs-mode: nil -*- */
#include "irods_auth_object.hpp"
namespace irods {
auth_object::auth_object(
rError_t* _r_error ) : r_error_( _r_error ) {
// TODO - stub
}
auth_object::~auth_object() {
// TODO - stub
}
auth_object::auth_object(
const auth_object& _rhs ) {
r_error_ = _rhs.r_error();
request_result_ = _rhs.request_result();
context_ = _rhs.context();
}
auth_object& auth_object::operator=(
const auth_object& _rhs ) {
r_error_ = _rhs.r_error();
request_result_ = _rhs.request_result();
context_ = _rhs.context();
return *this;
}
bool auth_object::operator==(
const auth_object& _rhs ) const {
// For the base class just always return true
return ( r_error_ == _rhs.r_error() &&
request_result_ == _rhs.request_result() &&
context_ == _rhs.context() );
}
}; // namespace irods
|
Java
|
package operation
import (
"fmt"
"os"
"github.com/runabove/sail/internal"
"github.com/spf13/cobra"
)
var cmdOperationAttach = &cobra.Command{
Use: "attach",
Short: "Attach to an ongoing operation output: sail operation attach [applicationName] <operationId>",
Long: `Attach to an ongoing operation output: sail operation attach [applicationName] <operationId>
Example: sail operation attach devel/redis fa853ede-6c05-4823-8b20-46a5389fe0de
If the applicationName is not passed, the default application name will be used (the user's username).
`,
Run: func(cmd *cobra.Command, args []string) {
switch len(args) {
case 1:
// applicationName was not passed. Using default one.
applicationName := internal.GetUserName()
operationAttach(applicationName, args[0])
case 2:
operationAttach(args[0], args[1])
default:
fmt.Fprintln(os.Stderr, "Invalid usage. sail operation attach [applicationName] <operationId>. Please see sail operation attach --help")
}
},
}
func operationAttach(app, operationID string) {
// Split namespace and service
internal.StreamPrint("GET", fmt.Sprintf("/applications/%s/operation/%s/attach", app, operationID), nil)
internal.ExitAfterCtrlC()
}
|
Java
|
/*-------------------------------------------------------------------------
* OpenGL Conformance Test Suite
* -----------------------------
*
* Copyright (c) 2016 Google Inc.
* Copyright (c) 2016 The Khronos Group Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/ /*!
* \file
* \brief CTS runner main().
*/ /*-------------------------------------------------------------------*/
#include "deString.h"
#include "deUniquePtr.hpp"
#include "glcTestRunner.hpp"
#include "tcuPlatform.hpp"
#include "tcuResource.hpp"
#include <cstdio>
// See tcuMain.cpp
tcu::Platform* createPlatform(void);
struct CommandLine
{
CommandLine(void) : runType(glu::ApiType::es(2, 0)), flags(0)
{
}
glu::ApiType runType;
std::string dstLogDir;
deUint32 flags;
};
static bool parseCommandLine(CommandLine& cmdLine, int argc, const char* const* argv)
{
for (int argNdx = 1; argNdx < argc; argNdx++)
{
const char* arg = argv[argNdx];
if (deStringBeginsWith(arg, "--type="))
{
static const struct
{
const char* name;
glu::ApiType runType;
} runTypes[] = { { "es2", glu::ApiType::es(2, 0) }, { "es3", glu::ApiType::es(3, 0) },
{ "es31", glu::ApiType::es(3, 1) }, { "es32", glu::ApiType::es(3, 2) },
{ "gl30", glu::ApiType::core(3, 0) }, { "gl31", glu::ApiType::core(3, 1) },
{ "gl32", glu::ApiType::core(3, 2) }, { "gl33", glu::ApiType::core(3, 3) },
{ "gl40", glu::ApiType::core(4, 0) }, { "gl41", glu::ApiType::core(4, 1) },
{ "gl42", glu::ApiType::core(4, 2) }, { "gl43", glu::ApiType::core(4, 3) },
{ "gl44", glu::ApiType::core(4, 4) }, { "gl45", glu::ApiType::core(4, 5) },
{ "gl46", glu::ApiType::core(4, 6) } };
const char* value = arg + 7;
int ndx = 0;
for (; ndx < DE_LENGTH_OF_ARRAY(runTypes); ndx++)
{
if (deStringEqual(runTypes[ndx].name, value))
{
cmdLine.runType = runTypes[ndx].runType;
break;
}
}
if (ndx >= DE_LENGTH_OF_ARRAY(runTypes))
return false;
}
else if (deStringBeginsWith(arg, "--logdir="))
{
const char* value = arg + 9;
cmdLine.dstLogDir = value;
}
else if (deStringBeginsWith(arg, "--summary"))
{
cmdLine.flags = glcts::TestRunner::PRINT_SUMMARY;
}
else if (deStringEqual(arg, "--verbose"))
cmdLine.flags = glcts::TestRunner::VERBOSE_ALL;
else
return false;
}
return true;
}
static void printHelp(const char* binName)
{
printf("%s:\n", binName);
printf(" --type=[esN[M]|glNM] Conformance test run type. Choose from\n");
printf(" ES: es2, es3, es31, es32\n");
printf(" GL: gl30, gl31, gl32, gl33, gl40, gl41, gl42, gl43, gl44, gl45, gl46\n");
printf(" --logdir=[path] Destination directory for log files\n");
printf(" --summary Print summary without running the tests\n");
printf(" --verbose Print out and log more information\n");
}
int main(int argc, char** argv)
{
CommandLine cmdLine;
int exitStatus = EXIT_SUCCESS;
if (!parseCommandLine(cmdLine, argc, argv))
{
printHelp(argv[0]);
return -1;
}
try
{
de::UniquePtr<tcu::Platform> platform(createPlatform());
tcu::DirArchive archive(".");
glcts::TestRunner runner(static_cast<tcu::Platform&>(*platform.get()), archive, cmdLine.dstLogDir.c_str(),
cmdLine.runType, cmdLine.flags);
for (;;)
{
if (!runner.iterate())
{
if (!runner.isConformant())
{
exitStatus = EXIT_FAILURE;
}
break;
}
}
}
catch (const std::exception& e)
{
printf("ERROR: %s\n", e.what());
return -1;
}
return exitStatus;
}
|
Java
|
/*
* Copyright (c) 2016, Groupon, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* Neither the name of GROUPON nor the names of its contributors may be
* used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.groupon.lex.metrics;
import org.hamcrest.Matchers;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import org.junit.Test;
/**
*
* @author ariane
*/
public class SimpleMetricTest {
@Test
public void constructor_number() {
Metric m = new SimpleMetric(MetricName.valueOf("foobar"), (short)7);
assertEquals(MetricName.valueOf("foobar"), m.getName());
assertNotNull(m.getValue());
MetricValueTest.validateNumber(true, 7, m.getValue());
}
@Test
public void constructor_string() {
Metric m = new SimpleMetric(MetricName.valueOf("foobar"), "chocoladevla");
assertEquals(MetricName.valueOf("foobar"), m.getName());
assertNotNull(m.getValue());
MetricValueTest.validateString("chocoladevla", m.getValue());
}
@Test
public void constructor_bool() {
Metric m = new SimpleMetric(MetricName.valueOf("foobar"), true);
assertEquals(MetricName.valueOf("foobar"), m.getName());
assertNotNull(m.getValue());
MetricValueTest.validateBoolean(true, m.getValue());
}
@Test
public void constructor_metric() {
Metric m = new SimpleMetric(MetricName.valueOf("foobar"), MetricValue.fromNumberValue(9000));
assertEquals(MetricName.valueOf("foobar"), m.getName());
assertNotNull(m.getValue());
MetricValueTest.validateNumber(true, 9000, m.getValue());
}
@Test
public void constructor_empty() {
Metric m = new SimpleMetric(MetricName.valueOf("foobar"), MetricValue.EMPTY);
assertEquals(MetricName.valueOf("foobar"), m.getName());
assertNotNull(m.getValue());
MetricValueTest.validateEmpty(m.getValue());
}
@Test
public void to_string() {
Metric m = new SimpleMetric(MetricName.valueOf("foobar"), MetricValue.fromIntValue(19));
assertThat(m.toString(), Matchers.allOf(containsString("foobar"), containsString("19")));
}
@Test
public void equality() {
Metric m0 = new SimpleMetric(MetricName.valueOf("foobar"), MetricValue.fromIntValue(19));
Metric m1 = new SimpleMetric(MetricName.valueOf("foobar"), MetricValue.fromIntValue(19));
assertEquals(m0, m1);
assertEquals(m0.hashCode(), m1.hashCode());
}
@Test
public void inequality() {
Metric m0 = new SimpleMetric(MetricName.valueOf("foobar"), MetricValue.fromIntValue(17));
Metric m1 = new SimpleMetric(MetricName.valueOf("foobar"), MetricValue.fromIntValue(19));
Metric m2 = new SimpleMetric(MetricName.valueOf("fizzbuzz"), MetricValue.fromIntValue(19));
assertNotEquals(m0, m1);
assertNotEquals(m0, m2);
assertNotEquals(m1, m0);
assertNotEquals(m1, m2);
assertNotEquals(m2, m0);
assertNotEquals(m2, m1);
}
@Test
public void equal_across_types() {
Metric m = new SimpleMetric(MetricName.valueOf("foobar"), MetricValue.fromIntValue(19));
assertFalse(m.equals(null));
assertFalse(m.equals(new Object()));
}
}
|
Java
|
import sys
import warnings
try:
import itertools.izip as zip
except ImportError:
pass
from itertools import product
import numpy as np
from .. import util
from ..dimension import dimension_name
from ..element import Element
from ..ndmapping import NdMapping, item_check, sorted_context
from .interface import DataError, Interface
from .pandas import PandasInterface
from .util import finite_range
class cuDFInterface(PandasInterface):
"""
The cuDFInterface allows a Dataset objects to wrap a cuDF
DataFrame object. Using cuDF allows working with columnar
data on a GPU. Most operations leave the data in GPU memory,
however to plot the data it has to be loaded into memory.
The cuDFInterface covers almost the complete API exposed
by the PandasInterface with two notable exceptions:
1) Aggregation and groupby do not have a consistent sort order
(see https://github.com/rapidsai/cudf/issues/4237)
3) Not all functions can be easily applied to a cuDF so
some functions applied with aggregate and reduce will not work.
"""
datatype = 'cuDF'
types = ()
@classmethod
def loaded(cls):
return 'cudf' in sys.modules
@classmethod
def applies(cls, obj):
if not cls.loaded():
return False
import cudf
return isinstance(obj, (cudf.DataFrame, cudf.Series))
@classmethod
def init(cls, eltype, data, kdims, vdims):
import cudf
import pandas as pd
element_params = eltype.param.objects()
kdim_param = element_params['kdims']
vdim_param = element_params['vdims']
if isinstance(data, (cudf.Series, pd.Series)):
data = data.to_frame()
if not isinstance(data, cudf.DataFrame):
data, _, _ = PandasInterface.init(eltype, data, kdims, vdims)
data = cudf.from_pandas(data)
columns = list(data.columns)
ncols = len(columns)
index_names = [data.index.name]
if index_names == [None]:
index_names = ['index']
if eltype._auto_indexable_1d and ncols == 1 and kdims is None:
kdims = list(index_names)
if isinstance(kdim_param.bounds[1], int):
ndim = min([kdim_param.bounds[1], len(kdim_param.default)])
else:
ndim = None
nvdim = vdim_param.bounds[1] if isinstance(vdim_param.bounds[1], int) else None
if kdims and vdims is None:
vdims = [c for c in columns if c not in kdims]
elif vdims and kdims is None:
kdims = [c for c in columns if c not in vdims][:ndim]
elif kdims is None:
kdims = list(columns[:ndim])
if vdims is None:
vdims = [d for d in columns[ndim:((ndim+nvdim) if nvdim else None)]
if d not in kdims]
elif kdims == [] and vdims is None:
vdims = list(columns[:nvdim if nvdim else None])
# Handle reset of index if kdims reference index by name
for kd in kdims:
kd = dimension_name(kd)
if kd in columns:
continue
if any(kd == ('index' if name is None else name)
for name in index_names):
data = data.reset_index()
break
if any(isinstance(d, (np.int64, int)) for d in kdims+vdims):
raise DataError("cudf DataFrame column names used as dimensions "
"must be strings not integers.", cls)
if kdims:
kdim = dimension_name(kdims[0])
if eltype._auto_indexable_1d and ncols == 1 and kdim not in columns:
data = data.copy()
data.insert(0, kdim, np.arange(len(data)))
for d in kdims+vdims:
d = dimension_name(d)
if len([c for c in columns if c == d]) > 1:
raise DataError('Dimensions may not reference duplicated DataFrame '
'columns (found duplicate %r columns). If you want to plot '
'a column against itself simply declare two dimensions '
'with the same name. '% d, cls)
return data, {'kdims':kdims, 'vdims':vdims}, {}
@classmethod
def range(cls, dataset, dimension):
dimension = dataset.get_dimension(dimension, strict=True)
column = dataset.data[dimension.name]
if dimension.nodata is not None:
column = cls.replace_value(column, dimension.nodata)
if column.dtype.kind == 'O':
return np.NaN, np.NaN
else:
return finite_range(column, column.min(), column.max())
@classmethod
def values(cls, dataset, dim, expanded=True, flat=True, compute=True,
keep_index=False):
dim = dataset.get_dimension(dim, strict=True)
data = dataset.data[dim.name]
if not expanded:
data = data.unique()
return data.values_host if compute else data.values
elif keep_index:
return data
elif compute:
return data.values_host
try:
return data.values
except Exception:
return data.values_host
@classmethod
def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs):
# Get dimensions information
dimensions = [dataset.get_dimension(d).name for d in dimensions]
kdims = [kdim for kdim in dataset.kdims if kdim not in dimensions]
# Update the kwargs appropriately for Element group types
group_kwargs = {}
group_type = dict if group_type == 'raw' else group_type
if issubclass(group_type, Element):
group_kwargs.update(util.get_param_values(dataset))
group_kwargs['kdims'] = kdims
group_kwargs.update(kwargs)
# Propagate dataset
group_kwargs['dataset'] = dataset.dataset
# Find all the keys along supplied dimensions
keys = product(*(dataset.data[dimensions[0]].unique().values_host for d in dimensions))
# Iterate over the unique entries applying selection masks
grouped_data = []
for unique_key in util.unique_iterator(keys):
group_data = dataset.select(**dict(zip(dimensions, unique_key)))
if not len(group_data):
continue
group_data = group_type(group_data, **group_kwargs)
grouped_data.append((unique_key, group_data))
if issubclass(container_type, NdMapping):
with item_check(False), sorted_context(False):
kdims = [dataset.get_dimension(d) for d in dimensions]
return container_type(grouped_data, kdims=kdims)
else:
return container_type(grouped_data)
@classmethod
def select_mask(cls, dataset, selection):
"""
Given a Dataset object and a dictionary with dimension keys and
selection keys (i.e. tuple ranges, slices, sets, lists, or literals)
return a boolean mask over the rows in the Dataset object that
have been selected.
"""
mask = None
for dim, sel in selection.items():
if isinstance(sel, tuple):
sel = slice(*sel)
arr = cls.values(dataset, dim, keep_index=True)
if util.isdatetime(arr) and util.pd:
try:
sel = util.parse_datetime_selection(sel)
except:
pass
new_masks = []
if isinstance(sel, slice):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', r'invalid value encountered')
if sel.start is not None:
new_masks.append(sel.start <= arr)
if sel.stop is not None:
new_masks.append(arr < sel.stop)
if not new_masks:
continue
new_mask = new_masks[0]
for imask in new_masks[1:]:
new_mask &= imask
elif isinstance(sel, (set, list)):
for v in sel:
new_masks.append(arr==v)
if not new_masks:
continue
new_mask = new_masks[0]
for imask in new_masks[1:]:
new_mask |= imask
elif callable(sel):
new_mask = sel(arr)
else:
new_mask = arr == sel
if mask is None:
mask = new_mask
else:
mask &= new_mask
return mask
@classmethod
def select(cls, dataset, selection_mask=None, **selection):
df = dataset.data
if selection_mask is None:
selection_mask = cls.select_mask(dataset, selection)
indexed = cls.indexed(dataset, selection)
if selection_mask is not None:
df = df.loc[selection_mask]
if indexed and len(df) == 1 and len(dataset.vdims) == 1:
return df[dataset.vdims[0].name].iloc[0]
return df
@classmethod
def concat_fn(cls, dataframes, **kwargs):
import cudf
return cudf.concat(dataframes, **kwargs)
@classmethod
def add_dimension(cls, dataset, dimension, dim_pos, values, vdim):
data = dataset.data.copy()
if dimension.name not in data:
data[dimension.name] = values
return data
@classmethod
def aggregate(cls, dataset, dimensions, function, **kwargs):
data = dataset.data
cols = [d.name for d in dataset.kdims if d in dimensions]
vdims = dataset.dimensions('value', label='name')
reindexed = data[cols+vdims]
agg = function.__name__
if len(dimensions):
agg_map = {'amin': 'min', 'amax': 'max'}
agg = agg_map.get(agg, agg)
grouped = reindexed.groupby(cols, sort=False)
if not hasattr(grouped, agg):
raise ValueError('%s aggregation is not supported on cudf DataFrame.' % agg)
df = getattr(grouped, agg)().reset_index()
else:
agg_map = {'amin': 'min', 'amax': 'max', 'size': 'count'}
agg = agg_map.get(agg, agg)
if not hasattr(reindexed, agg):
raise ValueError('%s aggregation is not supported on cudf DataFrame.' % agg)
agg = getattr(reindexed, agg)()
data = dict(((col, [v]) for col, v in zip(agg.index.values_host, agg.to_array())))
df = util.pd.DataFrame(data, columns=list(agg.index.values_host))
dropped = []
for vd in vdims:
if vd not in df.columns:
dropped.append(vd)
return df, dropped
@classmethod
def iloc(cls, dataset, index):
import cudf
rows, cols = index
scalar = False
columns = list(dataset.data.columns)
if isinstance(cols, slice):
cols = [d.name for d in dataset.dimensions()][cols]
elif np.isscalar(cols):
scalar = np.isscalar(rows)
cols = [dataset.get_dimension(cols).name]
else:
cols = [dataset.get_dimension(d).name for d in index[1]]
col_index = [columns.index(c) for c in cols]
if np.isscalar(rows):
rows = [rows]
if scalar:
return dataset.data[cols[0]].iloc[rows[0]]
result = dataset.data.iloc[rows, col_index]
# cuDF does not handle single rows and cols indexing correctly
# as of cudf=0.10.0 so we have to convert Series back to DataFrame
if isinstance(result, cudf.Series):
if len(cols) == 1:
result = result.to_frame(cols[0])
else:
result = result.to_frame().T
return result
@classmethod
def sort(cls, dataset, by=[], reverse=False):
cols = [dataset.get_dimension(d, strict=True).name for d in by]
return dataset.data.sort_values(by=cols, ascending=not reverse)
@classmethod
def dframe(cls, dataset, dimensions):
if dimensions:
return dataset.data[dimensions].to_pandas()
else:
return dataset.data.to_pandas()
Interface.register(cuDFInterface)
|
Java
|
/*
* Copyright (c) Contributors, http://openviewer.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenViewer Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
using System;
using OpenViewer.Model;
namespace OpenViewer.UI
{
public abstract class UIBase : IUI
{
protected MetaverseSession m_model;
public abstract string GetName();
public abstract void Initialize(MetaverseSession model, string renderingEngine, string loginURI, string username, string password);
public abstract void Run();
}
}
|
Java
|
<?php
/**
* Zend Framework (http://framework.zend.com/)
*
* @link http://github.com/zendframework/ZendSkeletonApplication for the canonical source repository
* @copyright Copyright (c) 2005-2015 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
return array(
'router' => array(
'routes' => array(
// The following is a route to simplify getting started creating
// new controllers and actions without needing to create a new
// module. Simply drop new controllers in, and you can access them
// using the path /api/:controller/:action
'api' => array(
'type' => 'Hostname',
'options' => array(
'route' => 'api.vuongquocbalo.com',
),
),
),
),
);
|
Java
|
<?php
namespace Jazzee\Element;
/**
* Phonenumber Element
*
* @author Jon Johnson <jon.johnson@ucsf.edu>
* @license http://jazzee.org/license BSD-3-Clause
*/
class Phonenumber extends TextInput
{
const PAGEBUILDER_SCRIPT = 'resource/scripts/element_types/JazzeeElementPhonenumber.js';
public function addToField(\Foundation\Form\Field $field)
{
$element = $field->newElement('TextInput', 'el' . $this->_element->getId());
$element->setLabel($this->_element->getTitle());
$element->setInstructions($this->_element->getInstructions());
$element->setFormat($this->_element->getFormat());
$element->setDefaultValue($this->_element->getDefaultValue());
if ($this->_element->isRequired()) {
$validator = new \Foundation\Form\Validator\NotEmpty($element);
$element->addValidator($validator);
}
$validator = new \Foundation\Form\Validator\Phonenumber($element);
$element->addValidator($validator);
$filter = new \Foundation\Form\Filter\Phonenumber($element);
$element->addFilter($filter);
return $element;
}
}
|
Java
|
# for
for i in 1..10 do
puts i
end
|
Java
|
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/media/webrtc/permission_bubble_media_access_handler.h"
#include <memory>
#include <utility>
#include "base/bind.h"
#include "base/callback_helpers.h"
#include "base/metrics/field_trial.h"
#include "base/task/post_task.h"
#include "build/build_config.h"
#include "chrome/browser/media/webrtc/media_capture_devices_dispatcher.h"
#include "chrome/browser/media/webrtc/media_stream_capture_indicator.h"
#include "chrome/browser/media/webrtc/media_stream_device_permissions.h"
#include "chrome/browser/permissions/permission_manager_factory.h"
#include "chrome/browser/profiles/profile.h"
#include "chrome/common/pref_names.h"
#include "components/content_settings/browser/tab_specific_content_settings.h"
#include "components/content_settings/core/browser/host_content_settings_map.h"
#include "components/permissions/permission_manager.h"
#include "components/permissions/permission_result.h"
#include "components/pref_registry/pref_registry_syncable.h"
#include "components/prefs/pref_service.h"
#include "components/webrtc/media_stream_devices_controller.h"
#include "content/public/browser/browser_task_traits.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/browser/notification_service.h"
#include "content/public/browser/notification_types.h"
#include "content/public/browser/web_contents.h"
#if defined(OS_ANDROID)
#include <vector>
#include "chrome/browser/flags/android/chrome_feature_list.h"
#include "chrome/browser/media/webrtc/screen_capture_infobar_delegate_android.h"
#include "components/permissions/permission_uma_util.h"
#include "components/permissions/permission_util.h"
#endif // defined(OS_ANDROID)
#if defined(OS_MACOSX)
#include "base/metrics/histogram_macros.h"
#include "chrome/browser/content_settings/chrome_content_settings_utils.h"
#include "chrome/browser/media/webrtc/system_media_capture_permissions_mac.h"
#include "chrome/browser/media/webrtc/system_media_capture_permissions_stats_mac.h"
#endif
using content::BrowserThread;
using RepeatingMediaResponseCallback =
base::RepeatingCallback<void(const blink::MediaStreamDevices& devices,
blink::mojom::MediaStreamRequestResult result,
std::unique_ptr<content::MediaStreamUI> ui)>;
#if defined(OS_MACOSX)
using system_media_permissions::SystemPermission;
#endif
namespace {
void UpdateTabSpecificContentSettings(
content::WebContents* web_contents,
const content::MediaStreamRequest& request,
ContentSetting audio_setting,
ContentSetting video_setting) {
if (!web_contents)
return;
auto* content_settings =
content_settings::TabSpecificContentSettings::FromWebContents(
web_contents);
if (!content_settings)
return;
content_settings::TabSpecificContentSettings::MicrophoneCameraState
microphone_camera_state = content_settings::TabSpecificContentSettings::
MICROPHONE_CAMERA_NOT_ACCESSED;
std::string selected_audio_device;
std::string selected_video_device;
std::string requested_audio_device = request.requested_audio_device_id;
std::string requested_video_device = request.requested_video_device_id;
// TODO(raymes): Why do we use the defaults here for the selected devices?
// Shouldn't we just use the devices that were actually selected?
Profile* profile =
Profile::FromBrowserContext(web_contents->GetBrowserContext());
if (audio_setting != CONTENT_SETTING_DEFAULT) {
selected_audio_device =
requested_audio_device.empty()
? profile->GetPrefs()->GetString(prefs::kDefaultAudioCaptureDevice)
: requested_audio_device;
microphone_camera_state |=
content_settings::TabSpecificContentSettings::MICROPHONE_ACCESSED |
(audio_setting == CONTENT_SETTING_ALLOW
? 0
: content_settings::TabSpecificContentSettings::
MICROPHONE_BLOCKED);
}
if (video_setting != CONTENT_SETTING_DEFAULT) {
selected_video_device =
requested_video_device.empty()
? profile->GetPrefs()->GetString(prefs::kDefaultVideoCaptureDevice)
: requested_video_device;
microphone_camera_state |=
content_settings::TabSpecificContentSettings::CAMERA_ACCESSED |
(video_setting == CONTENT_SETTING_ALLOW
? 0
: content_settings::TabSpecificContentSettings::CAMERA_BLOCKED);
}
content_settings->OnMediaStreamPermissionSet(
PermissionManagerFactory::GetForProfile(profile)->GetCanonicalOrigin(
ContentSettingsType::MEDIASTREAM_CAMERA, request.security_origin,
web_contents->GetLastCommittedURL()),
microphone_camera_state, selected_audio_device, selected_video_device,
requested_audio_device, requested_video_device);
}
} // namespace
struct PermissionBubbleMediaAccessHandler::PendingAccessRequest {
PendingAccessRequest(const content::MediaStreamRequest& request,
RepeatingMediaResponseCallback callback)
: request(request), callback(callback) {}
~PendingAccessRequest() {}
// TODO(gbillock): make the MediaStreamDevicesController owned by
// this object when we're using bubbles.
content::MediaStreamRequest request;
RepeatingMediaResponseCallback callback;
};
PermissionBubbleMediaAccessHandler::PermissionBubbleMediaAccessHandler() {
// PermissionBubbleMediaAccessHandler should be created on UI thread.
// Otherwise, it will not receive
// content::NOTIFICATION_WEB_CONTENTS_DESTROYED, and that will result in
// possible use after free.
DCHECK_CURRENTLY_ON(BrowserThread::UI);
notifications_registrar_.Add(this,
content::NOTIFICATION_WEB_CONTENTS_DESTROYED,
content::NotificationService::AllSources());
}
PermissionBubbleMediaAccessHandler::~PermissionBubbleMediaAccessHandler() {}
bool PermissionBubbleMediaAccessHandler::SupportsStreamType(
content::WebContents* web_contents,
const blink::mojom::MediaStreamType type,
const extensions::Extension* extension) {
#if defined(OS_ANDROID)
return type == blink::mojom::MediaStreamType::DEVICE_VIDEO_CAPTURE ||
type == blink::mojom::MediaStreamType::DEVICE_AUDIO_CAPTURE ||
type == blink::mojom::MediaStreamType::GUM_DESKTOP_VIDEO_CAPTURE ||
type == blink::mojom::MediaStreamType::DISPLAY_VIDEO_CAPTURE;
#else
return type == blink::mojom::MediaStreamType::DEVICE_VIDEO_CAPTURE ||
type == blink::mojom::MediaStreamType::DEVICE_AUDIO_CAPTURE;
#endif
}
bool PermissionBubbleMediaAccessHandler::CheckMediaAccessPermission(
content::RenderFrameHost* render_frame_host,
const GURL& security_origin,
blink::mojom::MediaStreamType type,
const extensions::Extension* extension) {
content::WebContents* web_contents =
content::WebContents::FromRenderFrameHost(render_frame_host);
Profile* profile =
Profile::FromBrowserContext(web_contents->GetBrowserContext());
ContentSettingsType content_settings_type =
type == blink::mojom::MediaStreamType::DEVICE_AUDIO_CAPTURE
? ContentSettingsType::MEDIASTREAM_MIC
: ContentSettingsType::MEDIASTREAM_CAMERA;
DCHECK(!security_origin.is_empty());
GURL embedding_origin = web_contents->GetLastCommittedURL().GetOrigin();
permissions::PermissionManager* permission_manager =
PermissionManagerFactory::GetForProfile(profile);
return permission_manager
->GetPermissionStatusForFrame(content_settings_type,
render_frame_host, security_origin)
.content_setting == CONTENT_SETTING_ALLOW;
}
void PermissionBubbleMediaAccessHandler::HandleRequest(
content::WebContents* web_contents,
const content::MediaStreamRequest& request,
content::MediaResponseCallback callback,
const extensions::Extension* extension) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
#if defined(OS_ANDROID)
if (blink::IsScreenCaptureMediaType(request.video_type) &&
!base::FeatureList::IsEnabled(
chrome::android::kUserMediaScreenCapturing)) {
// If screen capturing isn't enabled on Android, we'll use "invalid state"
// as result, same as on desktop.
std::move(callback).Run(
blink::MediaStreamDevices(),
blink::mojom::MediaStreamRequestResult::INVALID_STATE, nullptr);
return;
}
#endif // defined(OS_ANDROID)
RequestsMap& requests_map = pending_requests_[web_contents];
requests_map.emplace(
next_request_id_++,
PendingAccessRequest(
request, base::AdaptCallbackForRepeating(std::move(callback))));
// If this is the only request then show the infobar.
if (requests_map.size() == 1)
ProcessQueuedAccessRequest(web_contents);
}
void PermissionBubbleMediaAccessHandler::ProcessQueuedAccessRequest(
content::WebContents* web_contents) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
auto it = pending_requests_.find(web_contents);
if (it == pending_requests_.end() || it->second.empty()) {
// Don't do anything if the tab was closed.
return;
}
DCHECK(!it->second.empty());
const int request_id = it->second.begin()->first;
const content::MediaStreamRequest& request =
it->second.begin()->second.request;
#if defined(OS_ANDROID)
if (blink::IsScreenCaptureMediaType(request.video_type)) {
ScreenCaptureInfoBarDelegateAndroid::Create(
web_contents, request,
base::BindOnce(
&PermissionBubbleMediaAccessHandler::OnAccessRequestResponse,
base::Unretained(this), web_contents, request_id));
return;
}
#endif
webrtc::MediaStreamDevicesController::RequestPermissions(
request, MediaCaptureDevicesDispatcher::GetInstance(),
base::BindOnce(
&PermissionBubbleMediaAccessHandler::OnMediaStreamRequestResponse,
base::Unretained(this), web_contents, request_id, request));
}
void PermissionBubbleMediaAccessHandler::UpdateMediaRequestState(
int render_process_id,
int render_frame_id,
int page_request_id,
blink::mojom::MediaStreamType stream_type,
content::MediaRequestState state) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
if (state != content::MEDIA_REQUEST_STATE_CLOSING)
return;
bool found = false;
for (auto requests_it = pending_requests_.begin();
requests_it != pending_requests_.end(); ++requests_it) {
RequestsMap& requests_map = requests_it->second;
for (RequestsMap::iterator it = requests_map.begin();
it != requests_map.end(); ++it) {
if (it->second.request.render_process_id == render_process_id &&
it->second.request.render_frame_id == render_frame_id &&
it->second.request.page_request_id == page_request_id) {
requests_map.erase(it);
found = true;
break;
}
}
if (found)
break;
}
}
// static
void PermissionBubbleMediaAccessHandler::RegisterProfilePrefs(
user_prefs::PrefRegistrySyncable* prefs) {
prefs->RegisterBooleanPref(prefs::kVideoCaptureAllowed, true);
prefs->RegisterBooleanPref(prefs::kAudioCaptureAllowed, true);
prefs->RegisterListPref(prefs::kVideoCaptureAllowedUrls);
prefs->RegisterListPref(prefs::kAudioCaptureAllowedUrls);
}
void PermissionBubbleMediaAccessHandler::OnMediaStreamRequestResponse(
content::WebContents* web_contents,
int request_id,
content::MediaStreamRequest request,
const blink::MediaStreamDevices& devices,
blink::mojom::MediaStreamRequestResult result,
bool blocked_by_feature_policy,
ContentSetting audio_setting,
ContentSetting video_setting) {
if (pending_requests_.find(web_contents) == pending_requests_.end()) {
// WebContents has been destroyed. Don't need to do anything.
return;
}
// If the kill switch is, or the request was blocked because of feature
// policy we don't update the tab context.
if (result != blink::mojom::MediaStreamRequestResult::KILL_SWITCH_ON &&
!blocked_by_feature_policy) {
UpdateTabSpecificContentSettings(web_contents, request, audio_setting,
video_setting);
}
std::unique_ptr<content::MediaStreamUI> ui;
if (!devices.empty()) {
ui = MediaCaptureDevicesDispatcher::GetInstance()
->GetMediaStreamCaptureIndicator()
->RegisterMediaStream(web_contents, devices);
}
OnAccessRequestResponse(web_contents, request_id, devices, result,
std::move(ui));
}
void PermissionBubbleMediaAccessHandler::OnAccessRequestResponse(
content::WebContents* web_contents,
int request_id,
const blink::MediaStreamDevices& devices,
blink::mojom::MediaStreamRequestResult result,
std::unique_ptr<content::MediaStreamUI> ui) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
auto request_maps_it = pending_requests_.find(web_contents);
if (request_maps_it == pending_requests_.end()) {
// WebContents has been destroyed. Don't need to do anything.
return;
}
RequestsMap& requests_map(request_maps_it->second);
if (requests_map.empty())
return;
auto request_it = requests_map.find(request_id);
DCHECK(request_it != requests_map.end());
if (request_it == requests_map.end())
return;
blink::mojom::MediaStreamRequestResult final_result = result;
#if defined(OS_MACOSX)
// If the request was approved, ask for system permissions if needed, and run
// this function again when done.
if (result == blink::mojom::MediaStreamRequestResult::OK) {
const content::MediaStreamRequest& request = request_it->second.request;
if (request.audio_type ==
blink::mojom::MediaStreamType::DEVICE_AUDIO_CAPTURE) {
const SystemPermission system_audio_permission =
system_media_permissions::CheckSystemAudioCapturePermission();
UMA_HISTOGRAM_ENUMERATION(
"Media.Audio.Capture.Mac.MicSystemPermission.UserMedia",
system_audio_permission);
if (system_audio_permission == SystemPermission::kNotDetermined) {
// Using WeakPtr since callback can come at any time and we might be
// destroyed.
system_media_permissions::RequestSystemAudioCapturePermisson(
base::BindOnce(
&PermissionBubbleMediaAccessHandler::OnAccessRequestResponse,
weak_factory_.GetWeakPtr(), web_contents, request_id, devices,
result, std::move(ui)),
{content::BrowserThread::UI});
return;
} else if (system_audio_permission == SystemPermission::kRestricted ||
system_audio_permission == SystemPermission::kDenied) {
content_settings::UpdateLocationBarUiForWebContents(web_contents);
final_result =
blink::mojom::MediaStreamRequestResult::SYSTEM_PERMISSION_DENIED;
system_media_permissions::SystemAudioCapturePermissionBlocked();
} else {
DCHECK_EQ(system_audio_permission, SystemPermission::kAllowed);
content_settings::UpdateLocationBarUiForWebContents(web_contents);
}
}
if (request.video_type ==
blink::mojom::MediaStreamType::DEVICE_VIDEO_CAPTURE) {
const SystemPermission system_video_permission =
system_media_permissions::CheckSystemVideoCapturePermission();
UMA_HISTOGRAM_ENUMERATION(
"Media.Video.Capture.Mac.CameraSystemPermission.UserMedia",
system_video_permission);
if (system_video_permission == SystemPermission::kNotDetermined) {
// Using WeakPtr since callback can come at any time and we might be
// destroyed.
system_media_permissions::RequestSystemVideoCapturePermisson(
base::BindOnce(
&PermissionBubbleMediaAccessHandler::OnAccessRequestResponse,
weak_factory_.GetWeakPtr(), web_contents, request_id, devices,
result, std::move(ui)),
{content::BrowserThread::UI});
return;
} else if (system_video_permission == SystemPermission::kRestricted ||
system_video_permission == SystemPermission::kDenied) {
content_settings::UpdateLocationBarUiForWebContents(web_contents);
final_result =
blink::mojom::MediaStreamRequestResult::SYSTEM_PERMISSION_DENIED;
system_media_permissions::SystemVideoCapturePermissionBlocked();
} else {
DCHECK_EQ(system_video_permission, SystemPermission::kAllowed);
content_settings::UpdateLocationBarUiForWebContents(web_contents);
}
}
}
#endif // defined(OS_MACOSX)
RepeatingMediaResponseCallback callback =
std::move(request_it->second.callback);
requests_map.erase(request_it);
if (!requests_map.empty()) {
// Post a task to process next queued request. It has to be done
// asynchronously to make sure that calling infobar is not destroyed until
// after this function returns.
base::PostTask(
FROM_HERE, {BrowserThread::UI},
base::BindOnce(
&PermissionBubbleMediaAccessHandler::ProcessQueuedAccessRequest,
base::Unretained(this), web_contents));
}
std::move(callback).Run(devices, final_result, std::move(ui));
}
void PermissionBubbleMediaAccessHandler::Observe(
int type,
const content::NotificationSource& source,
const content::NotificationDetails& details) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
DCHECK_EQ(content::NOTIFICATION_WEB_CONTENTS_DESTROYED, type);
pending_requests_.erase(content::Source<content::WebContents>(source).ptr());
}
|
Java
|
# Copyright (c) 2006-2009 The Trustees of Indiana University.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# - Neither the Indiana University nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from corepy.spre.spe import Instruction, DispatchInstruction, Register
from spu_insts import *
__doc__="""
ISA for the Cell Broadband Engine's SPU.
"""
class lqx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':452}
cycles = (1, 6, 0)
class stqx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':324}
cycles = (1, 6, 0)
class cbx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':468}
cycles = (1, 4, 0)
class chx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':469}
cycles = (1, 4, 0)
class cwx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':470}
cycles = (1, 4, 0)
class cdx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':471}
cycles = (1, 4, 0)
class ah(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':200}
cycles = (0, 2, 0)
class a(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':192}
cycles = (0, 2, 0)
class sfh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':72}
cycles = (0, 2, 0)
class sf(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':64}
cycles = (0, 2, 0)
class addx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':832}
cycles = (0, 2, 0)
class cg(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':194}
cycles = (0, 2, 0)
class cgx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':834}
cycles = (0, 2, 0)
class sfx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':833}
cycles = (0, 2, 0)
class bg(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':66}
cycles = (0, 2, 0)
class bgx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':835}
cycles = (0, 2, 0)
class mpy(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':964}
cycles = (0, 7, 0)
class mpyu(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':972}
cycles = (0, 7, 0)
class mpyh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':965}
cycles = (0, 7, 0)
class mpys(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':967}
cycles = (0, 7, 0)
class mpyhh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':966}
cycles = (0, 7, 0)
class mpyhha(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':838}
cycles = (0, 7, 0)
class mpyhhu(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':974}
cycles = (0, 7, 0)
class mpyhhau(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':846}
cycles = (0, 7, 0)
class clz(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':677}
cycles = (0, 2, 0)
class cntb(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':692}
cycles = (0, 4, 0)
class fsmb(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':438}
cycles = (1, 4, 0)
class fsmh(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':437}
cycles = (1, 4, 0)
class fsm(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':436}
cycles = (1, 4, 0)
class gbb(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':434}
cycles = (1, 4, 0)
class gbh(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':433}
cycles = (1, 4, 0)
class gb(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':432}
cycles = (1, 4, 0)
class avgb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':211}
cycles = (0, 4, 0)
class absdb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':83}
cycles = (0, 4, 0)
class sumb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':595}
cycles = (0, 4, 0)
class xsbh(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':694}
cycles = (0, 2, 0)
class xshw(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':686}
cycles = (0, 2, 0)
class xswd(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':678}
cycles = (0, 2, 0)
class and_(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':193}
cycles = (0, 2, 0)
class andc(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':705}
cycles = (0, 2, 0)
class or_(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':65}
cycles = (0, 2, 0)
class orc(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':713}
cycles = (0, 2, 0)
class orx(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':496}
cycles = (1, 4, 0)
class xor(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':577}
cycles = (0, 2, 0)
class nand(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':201}
cycles = (0, 2, 0)
class nor(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':73}
cycles = (0, 2, 0)
class eqv(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':585}
cycles = (0, 2, 0)
class shlh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':95}
cycles = (0, 4, 0)
class shl(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':91}
cycles = (0, 4, 0)
class shlqbi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':475}
cycles = (1, 4, 0)
class shlqby(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':479}
cycles = (1, 4, 0)
class shlqbybi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':463}
cycles = (1, 4, 0)
class roth(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':92}
cycles = (0, 4, 0)
class rot(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':88}
cycles = (0, 4, 0)
class rotqby(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':476}
cycles = (1, 4, 0)
class rotqbybi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':460}
cycles = (1, 4, 0)
class rotqbi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':472}
cycles = (1, 4, 0)
class rothm(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':93}
cycles = (0, 4, 0)
class rotm(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':89}
cycles = (0, 4, 0)
class rotqmby(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':477}
cycles = (1, 4, 0)
class rotqmbybi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':461}
cycles = (1, 4, 0)
class rotqmbi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':473}
cycles = (1, 4, 0)
class rotmah(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':94}
cycles = (0, 4, 0)
class rotma(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':90}
cycles = (0, 4, 0)
class heq(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':984}
cycles = (0, 2, 0)
class hgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':600}
cycles = (0, 2, 0)
class hlgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':728}
cycles = (0, 2, 0)
class ceqb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':976}
cycles = (0, 2, 0)
class ceqh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':968}
cycles = (0, 2, 0)
class ceq(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':960}
cycles = (0, 2, 0)
class cgtb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':592}
cycles = (0, 2, 0)
class cgth(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':584}
cycles = (0, 2, 0)
class cgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':576}
cycles = (0, 2, 0)
class clgtb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':720}
cycles = (0, 2, 0)
class clgth(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':712}
cycles = (0, 2, 0)
class clgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':704}
cycles = (0, 2, 0)
class bi(Instruction):
machine_inst = OPCD_A_D_E
params = {'OPCD':424}
cycles = (1, 4, 0)
class iret(Instruction):
machine_inst = OPCD_A_D_E
params = {'OPCD':426}
cycles = (1, 4, 0)
class bisled(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':427}
cycles = (1, 4, 0)
class bisl(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':425}
cycles = (1, 4, 0)
class biz(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':296}
cycles = (1, 4, 0)
class binz(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':297}
cycles = (1, 4, 0)
class bihz(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':294}
cycles = (1, 4, 0)
class bihnz(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':299}
cycles = (1, 4, 0)
# TODO - can we check that if P is set then RO is zero as required?
class hbr(DispatchInstruction):
cycles = (1, 15, 0)
dispatch = (
(OPCD_RO_A_P, {'OPCD':428}),
(OPCD_LBL9_A_P, {'OPCD':428}))
class fa(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':708}
cycles = (0, 6, 0)
class dfa(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':716}
cycles = (0, 13, 6)
class fs(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':709}
cycles = (0, 6, 0)
class dfs(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':717}
cycles = (0, 13, 6)
class fm(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':710}
cycles = (0, 6, 0)
class dfm(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':718}
cycles = (0, 13, 6)
class dfma(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':860}
cycles = (0, 13, 6)
class dfnms(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':862}
cycles = (0, 13, 6)
class dfms(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':861}
cycles = (0, 13, 6)
class dfnma(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':863}
cycles = (0, 13, 6)
class frest(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':440}
cycles = (1, 4, 0)
class frsqest(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':441}
cycles = (1, 4, 0)
class fi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':980}
cycles = (0, 7, 0)
class frds(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':953}
cycles = (0, 13, 6)
class fesd(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':952}
cycles = (0, 13, 6)
class fceq(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':962}
cycles = (0, 2, 0)
class fcmeq(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':970}
cycles = (0, 2, 0)
class fcgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':706}
cycles = (0, 2, 0)
class fcmgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':714}
cycles = (0, 2, 0)
class fscrwr(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':954}
cycles = (0, 7, 0)
class fscrrd(Instruction):
machine_inst = OPCD_T
params = {'OPCD':920}
cycles = (0, 13, 6)
class stop(Instruction):
machine_inst = OPCD_STOP_SIG
params = {'OPCD':0}
cycles = (1, 4, 0)
class stopd(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':320}
cycles = (1, 4, 0)
class lnop(Instruction):
machine_inst = OPCD
params = {'OPCD':1}
cycles = (1, 0, 0)
class nop(Instruction):
machine_inst = OPCD_T
params = {'OPCD':513}
cycles = (0, 0, 0)
class sync(Instruction):
machine_inst = OPCD_CF
params = {'OPCD':2}
cycles = (1, 4, 0)
class dsync(Instruction):
machine_inst = OPCD
params = {'OPCD':3}
cycles = (1, 4, 0)
class mfspr(Instruction):
machine_inst = OPCD_SA_T
params = {'OPCD':12}
cycles = (1, 6, 0)
class mtspr(Instruction):
machine_inst = OPCD_SA_T
params = {'OPCD':268}
cycles = (1, 6, 0)
class rdch(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':13}
cycles = (1, 6, 0)
class rchcnt(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':15}
cycles = (1, 6, 0)
class wrch(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':269}
cycles = (1, 6, 0)
class mpya(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':12}
cycles = (0, 7, 0)
class selb(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':8}
cycles = (0, 2, 0)
class shufb(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':11}
cycles = (1, 4, 0)
class fma(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':14}
cycles = (0, 6, 0)
class fnms(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':13}
cycles = (0, 6, 0)
class fms(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':15}
cycles = (0, 6, 0)
class cbd(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':500}
cycles = (1, 4, 0)
class chd(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':501}
cycles = (1, 4, 0)
class cwd(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':502}
cycles = (1, 4, 0)
class cdd(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':503}
cycles = (1, 4, 0)
class shlhi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':127}
cycles = (0, 4, 0)
class shli(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':123}
cycles = (0, 4, 0)
class shlqbii(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':507}
cycles = (1, 4, 0)
class shlqbyi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':511}
cycles = (1, 4, 0)
class rothi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':124}
cycles = (0, 4, 0)
class roti(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':120}
cycles = (0, 4, 0)
class rotqbyi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':508}
cycles = (1, 4, 0)
class rotqbii(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':504}
cycles = (1, 4, 0)
class rothmi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':125}
cycles = (0, 4, 0)
class rotmi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':121}
cycles = (0, 4, 0)
class rotqmbyi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':509}
cycles = (1, 4, 0)
class rotqmbii(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':505}
cycles = (1, 4, 0)
class rotmahi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':126}
cycles = (0, 4, 0)
class rotmai(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':122}
cycles = (0, 4, 0)
class csflt(Instruction):
machine_inst = OPCD_I8_A_T
params = {'OPCD':474}
cycles = (0, 7, 0)
class cflts(Instruction):
machine_inst = OPCD_I8_A_T
params = {'OPCD':472}
cycles = (0, 7, 0)
class cuflt(Instruction):
machine_inst = OPCD_I8_A_T
params = {'OPCD':475}
cycles = (0, 7, 0)
class cfltu(Instruction):
machine_inst = OPCD_I8_A_T
params = {'OPCD':473}
cycles = (0, 7, 0)
class lqd(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':52}
cycles = (1, 6, 0)
class stqd(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':36}
cycles = (1, 6, 0)
class ahi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':29}
cycles = (0, 2, 0)
class ai(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':28}
cycles = (0, 2, 0)
class sfhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':13}
cycles = (0, 2, 0)
class sfi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':12}
cycles = (0, 2, 0)
class mpyi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':116}
cycles = (0, 7, 0)
class mpyui(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':117}
cycles = (0, 7, 0)
class andbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':22}
cycles = (0, 2, 0)
class andhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':21}
cycles = (0, 2, 0)
class andi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':20}
cycles = (0, 2, 0)
class orbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':6}
cycles = (0, 2, 0)
class orhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':5}
cycles = (0, 2, 0)
class ori(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':4}
cycles = (0, 2, 0)
class xorbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':70}
cycles = (0, 2, 0)
class xorhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':69}
cycles = (0, 2, 0)
class xori(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':68}
cycles = (0, 2, 0)
class heqi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':127}
cycles = (0, 2, 0)
class hgti(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':79}
cycles = (0, 2, 0)
class hlgti(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':95}
cycles = (0, 2, 0)
class ceqbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':126}
cycles = (0, 2, 0)
class ceqhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':125}
cycles = (0, 2, 0)
class ceqi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':124}
cycles = (0, 2, 0)
class cgtbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':78}
cycles = (0, 2, 0)
class cgthi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':77}
cycles = (0, 2, 0)
class cgti(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':76}
cycles = (0, 2, 0)
class clgtbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':94}
cycles = (0, 2, 0)
class clgthi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':93}
cycles = (0, 2, 0)
class clgti(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':92}
cycles = (0, 2, 0)
class lqa(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':97}
cycles = (1, 6, 0)
class lqr(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':103}
cycles = (1, 6, 0)
class stqa(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':65}
cycles = (1, 6, 0)
class stqr(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':71}
cycles = (1, 6, 0)
class ilh(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':131}
cycles = (0, 2, 0)
class ilhu(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':130}
cycles = (0, 2, 0)
class il(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':129}
cycles = (0, 2, 0)
class iohl(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':193}
cycles = (0, 2, 0)
class fsmbi(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':101}
cycles = (1, 4, 0)
class br(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16, {'OPCD':100}),
(OPCD_LBL16, {'OPCD':100}))
# TODO - how can I do absolute branches?
class bra(Instruction):
machine_inst = OPCD_I16
params = {'OPCD':96}
cycles = (1, 4, 0)
# TODO - I16 has two zero bits appended, do I handle this correctly?
# What is the correct way, anyway?
class brsl(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16_T, {'OPCD':102}),
(OPCD_LBL16_T, {'OPCD':102}))
class brasl(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':98}
cycles = (1, 4, 0)
class brnz(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16_T, {'OPCD':66}),
(OPCD_LBL16_T, {'OPCD':66}))
class brz(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16_T, {'OPCD':64}),
(OPCD_LBL16_T, {'OPCD':64}))
class brhnz(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16, {'OPCD':70}),
(OPCD_LBL16, {'OPCD':70}))
class brhz(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16, {'OPCD':68}),
(OPCD_LBL16, {'OPCD':68}))
class hbra(Instruction):
machine_inst = OPCD_LBL9_I16
params = {'OPCD':8}
cycles = (1, 15, 0)
class hbrr(DispatchInstruction):
cycles = (1, 15, 0)
dispatch = (
(OPCD_ROA_I16, {'OPCD':9}),
(OPCD_LBL9_LBL16, {'OPCD':9}))
class ila(Instruction):
machine_inst = OPCD_I18_T
params = {'OPCD':33}
cycles = (0, 2, 0)
|
Java
|
/*
* Copyright (c) 2012, United States Government, as represented by the Secretary of Health and Human Services.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the United States Government nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package gov.hhs.fha.nhinc.patientdiscovery.inbound.deferred.request;
import gov.hhs.fha.nhinc.aspect.InboundProcessingEvent;
import gov.hhs.fha.nhinc.common.nhinccommon.AssertionType;
import gov.hhs.fha.nhinc.nhinclib.NhincConstants;
import gov.hhs.fha.nhinc.patientdiscovery.PatientDiscoveryAuditor;
import gov.hhs.fha.nhinc.patientdiscovery.adapter.deferred.request.proxy.AdapterPatientDiscoveryDeferredReqProxy;
import gov.hhs.fha.nhinc.patientdiscovery.adapter.deferred.request.proxy.AdapterPatientDiscoveryDeferredReqProxyObjectFactory;
import gov.hhs.fha.nhinc.patientdiscovery.aspect.MCCIIN000002UV01EventDescriptionBuilder;
import gov.hhs.fha.nhinc.patientdiscovery.aspect.PRPAIN201305UV02EventDescriptionBuilder;
import org.hl7.v3.MCCIIN000002UV01;
import org.hl7.v3.PRPAIN201305UV02;
public abstract class AbstractInboundPatientDiscoveryDeferredRequest implements InboundPatientDiscoveryDeferredRequest {
private final AdapterPatientDiscoveryDeferredReqProxyObjectFactory adapterFactory;
public AbstractInboundPatientDiscoveryDeferredRequest(AdapterPatientDiscoveryDeferredReqProxyObjectFactory factory) {
adapterFactory = factory;
}
abstract MCCIIN000002UV01 process(PRPAIN201305UV02 request, AssertionType assertion);
abstract PatientDiscoveryAuditor getAuditLogger();
/**
* Processes the PD Deferred request message. This call will audit the message and send it to the Nhin.
*
* @param request
* @param assertion
* @return MCCIIN000002UV01
*/
@InboundProcessingEvent(beforeBuilder = PRPAIN201305UV02EventDescriptionBuilder.class,
afterReturningBuilder = MCCIIN000002UV01EventDescriptionBuilder.class,
serviceType = "Patient Discovery Deferred Request",
version = "1.0")
public MCCIIN000002UV01 respondingGatewayPRPAIN201305UV02(PRPAIN201305UV02 request, AssertionType assertion) {
auditRequestFromNhin(request, assertion);
MCCIIN000002UV01 response = process(request, assertion);
auditResponseToNhin(response, assertion);
return response;
}
protected MCCIIN000002UV01 sendToAdapter(PRPAIN201305UV02 request, AssertionType assertion) {
AdapterPatientDiscoveryDeferredReqProxy proxy = adapterFactory.getAdapterPatientDiscoveryDeferredReqProxy();
return proxy.processPatientDiscoveryAsyncReq(request, assertion);
}
private void auditRequestFromNhin(PRPAIN201305UV02 request, AssertionType assertion) {
getAuditLogger().auditNhinDeferred201305(request, assertion, NhincConstants.AUDIT_LOG_INBOUND_DIRECTION);
}
private void auditResponseToNhin(MCCIIN000002UV01 response, AssertionType assertion) {
getAuditLogger().auditAck(response, assertion, NhincConstants.AUDIT_LOG_OUTBOUND_DIRECTION,
NhincConstants.AUDIT_LOG_NHIN_INTERFACE);
}
protected void auditRequestToAdapter(PRPAIN201305UV02 request, AssertionType assertion) {
getAuditLogger().auditAdapterDeferred201305(request, assertion, NhincConstants.AUDIT_LOG_OUTBOUND_DIRECTION);
}
protected void auditResponseFromAdapter(MCCIIN000002UV01 response, AssertionType assertion) {
getAuditLogger().auditAck(response, assertion, NhincConstants.AUDIT_LOG_INBOUND_DIRECTION,
NhincConstants.AUDIT_LOG_ADAPTER_INTERFACE);
}
}
|
Java
|
/*
-- MAGMA (version 2.1.0) --
Univ. of Tennessee, Knoxville
Univ. of California, Berkeley
Univ. of Colorado, Denver
@date August 2016
@author Stan Tomov
@author Hartwig Anzt
@precisions normal z -> s d c
*/
#include "magmasparse_internal.h"
#define PRECISION_z
#define COMPLEX
#define RTOLERANCE lapackf77_dlamch( "E" )
#define ATOLERANCE lapackf77_dlamch( "E" )
/**
Purpose
-------
Solves an eigenvalue problem
A * X = evalues X
where A is a complex sparse matrix stored in the GPU memory.
X and B are complex vectors stored on the GPU memory.
This is a GPU implementation of the LOBPCG method.
This method allocates all required memory space inside the routine.
Also, the memory is not allocated as one big chunk, but seperatly for
the different blocks. This allows to use texture also for large matrices.
Arguments
---------
@param[in]
A magma_z_matrix
input matrix A
@param[in,out]
solver_par magma_z_solver_par*
solver parameters
@param[in,out]
precond_par magma_z_precond_par*
preconditioner parameters
@param[in]
queue magma_queue_t
Queue to execute in.
@ingroup magmasparse_zheev
********************************************************************/
extern "C" magma_int_t
magma_zlobpcg(
magma_z_matrix A,
magma_z_solver_par *solver_par,
magma_z_preconditioner *precond_par,
magma_queue_t queue )
{
magma_int_t info = 0;
#define residualNorms(i,iter) ( residualNorms + (i) + (iter)*n )
#define SWAP(x, y) { pointer = x; x = y; y = pointer; }
#define hresidualNorms(i,iter) (hresidualNorms + (i) + (iter)*n )
#define gramA( m, n) (gramA + (m) + (n)*ldgram)
#define gramB( m, n) (gramB + (m) + (n)*ldgram)
#define gevectors(m, n) (gevectors + (m) + (n)*ldgram)
#define h_gramB( m, n) (h_gramB + (m) + (n)*ldgram)
#define magma_z_bspmv_tuned(m, n, alpha, A, X, beta, AX, queue) { \
magma_z_matrix x={Magma_CSR}, ax={Magma_CSR}; \
x.memory_location = Magma_DEV; x.num_rows = m; x.num_cols = n; x.major = MagmaColMajor; x.nnz = m*n; x.dval = X; x.storage_type = Magma_DENSE; \
ax.memory_location= Magma_DEV; ax.num_rows = m; ax.num_cols = n; ax.major = MagmaColMajor; ax.nnz = m*n; ax.dval = AX; ax.storage_type = Magma_DENSE; \
CHECK( magma_z_spmv(alpha, A, x, beta, ax, queue )); \
}
//**************************************************************
// %Memory allocation for the eigenvectors, eigenvalues, and workspace
solver_par->solver = Magma_LOBPCG;
magma_int_t m = A.num_rows;
magma_int_t n = (solver_par->num_eigenvalues);
magmaDoubleComplex *blockX = solver_par->eigenvectors;
double *evalues = solver_par->eigenvalues;
solver_par->numiter = 0;
solver_par->spmv_count = 0;
magmaDoubleComplex *dwork=NULL, *hwork=NULL;
magmaDoubleComplex *blockP=NULL, *blockAP=NULL, *blockR=NULL, *blockAR=NULL, *blockAX=NULL, *blockW=NULL;
magmaDoubleComplex *gramA=NULL, *gramB=NULL, *gramM=NULL;
magmaDoubleComplex *gevectors=NULL, *h_gramB=NULL;
dwork = NULL;
hwork = NULL;
blockP = NULL;
blockR = NULL;
blockAP = NULL;
blockAR = NULL;
blockAX = NULL;
blockW = NULL;
gramA = NULL;
gramB = NULL;
gramM = NULL;
gevectors = NULL;
h_gramB = NULL;
magmaDoubleComplex *pointer, *origX = blockX;
double *eval_gpu=NULL;
magma_int_t iterationNumber, cBlockSize, restart = 1, iter;
//Chronometry
real_Double_t tempo1, tempo2;
magma_int_t lwork = max( 2*n+n*magma_get_dsytrd_nb(n),
1 + 6*3*n + 2* 3*n* 3*n);
magma_int_t *iwork={0}, liwork = 15*n+9;
magma_int_t gramDim, ldgram = 3*n, ikind = 3;
magmaDoubleComplex *hW={0};
// === Set solver parameters ===
double residualTolerance = solver_par->rtol;
magma_int_t maxIterations = solver_par->maxiter;
double tmp;
double r0=0; // set in 1st iteration
// === Set some constants & defaults ===
magmaDoubleComplex c_zero = MAGMA_Z_ZERO;
magmaDoubleComplex c_one = MAGMA_Z_ONE;
magmaDoubleComplex c_neg_one = MAGMA_Z_NEG_ONE;
double *residualNorms={0}, *condestGhistory={0}, condestG={0};
double *gevalues={0};
magma_int_t *activeMask={0};
double *hresidualNorms={0};
#ifdef COMPLEX
double *rwork={0};
magma_int_t lrwork = 1 + 5*(3*n) + 2*(3*n)*(3*n);
CHECK( magma_dmalloc_cpu(&rwork, lrwork));
#endif
CHECK( magma_zmalloc_pinned( &hwork , lwork ));
CHECK( magma_zmalloc( &blockAX , m*n ));
CHECK( magma_zmalloc( &blockAR , m*n ));
CHECK( magma_zmalloc( &blockAP , m*n ));
CHECK( magma_zmalloc( &blockR , m*n ));
CHECK( magma_zmalloc( &blockP , m*n ));
CHECK( magma_zmalloc( &blockW , m*n ));
CHECK( magma_zmalloc( &dwork , m*n ));
CHECK( magma_dmalloc( &eval_gpu , 3*n ));
//**********************************************************+
// === Check some parameters for possible quick exit ===
solver_par->info = MAGMA_SUCCESS;
if (m < 2)
info = MAGMA_DIVERGENCE;
else if (n > m)
info = MAGMA_SLOW_CONVERGENCE;
if (solver_par->info != 0) {
magma_xerbla( __func__, -(info) );
goto cleanup;
}
solver_par->info = info; // local info variable;
// === Allocate GPU memory for the residual norms' history ===
CHECK( magma_dmalloc(&residualNorms, (maxIterations+1) * n));
CHECK( magma_malloc( (void **)&activeMask, (n+1) * sizeof(magma_int_t) ));
// === Allocate CPU work space ===
CHECK( magma_dmalloc_cpu(&condestGhistory, maxIterations+1));
CHECK( magma_dmalloc_cpu(&gevalues, 3 * n));
CHECK( magma_malloc_cpu((void **)&iwork, liwork * sizeof(magma_int_t)));
CHECK( magma_zmalloc_pinned(&hW, n*n));
CHECK( magma_zmalloc_pinned(&gevectors, 9*n*n));
CHECK( magma_zmalloc_pinned(&h_gramB , 9*n*n));
// === Allocate GPU workspace ===
CHECK( magma_zmalloc(&gramM, n * n));
CHECK( magma_zmalloc(&gramA, 9 * n * n));
CHECK( magma_zmalloc(&gramB, 9 * n * n));
// === Set activemask to one ===
for(magma_int_t k =0; k<n; k++){
iwork[k]=1;
}
magma_setmatrix(n, 1, sizeof(magma_int_t), iwork, n , activeMask, n, queue);
#if defined(PRECISION_s)
ikind = 3;
#endif
// === Make the initial vectors orthonormal ===
magma_zgegqr_gpu(ikind, m, n, blockX, m, dwork, hwork, &info );
//magma_zorthomgs( m, n, blockX, queue );
magma_z_bspmv_tuned(m, n, c_one, A, blockX, c_zero, blockAX, queue );
solver_par->spmv_count++;
// === Compute the Gram matrix = (X, AX) & its eigenstates ===
magma_zgemm( MagmaConjTrans, MagmaNoTrans, n, n, m,
c_one, blockX, m, blockAX, m, c_zero, gramM, n, queue );
magma_zheevd_gpu( MagmaVec, MagmaUpper,
n, gramM, n, evalues, hW, n, hwork, lwork,
#ifdef COMPLEX
rwork, lrwork,
#endif
iwork, liwork, &info );
// === Update X = X * evectors ===
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, n,
c_one, blockX, m, gramM, n, c_zero, blockW, m, queue );
SWAP(blockW, blockX);
// === Update AX = AX * evectors ===
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, n,
c_one, blockAX, m, gramM, n, c_zero, blockW, m, queue );
SWAP(blockW, blockAX);
condestGhistory[1] = 7.82;
tempo1 = magma_sync_wtime( queue );
// === Main LOBPCG loop ============================================================
for(iterationNumber = 1; iterationNumber < maxIterations; iterationNumber++)
{
// === compute the residuals (R = Ax - x evalues )
magmablas_zlacpy( MagmaFull, m, n, blockAX, m, blockR, m, queue );
/*
for(magma_int_t i=0; i<n; i++) {
magma_zaxpy( m, MAGMA_Z_MAKE(-evalues[i],0), blockX+i*m, 1, blockR+i*m, 1, queue );
}
*/
magma_dsetmatrix( 3*n, 1, evalues, 3*n, eval_gpu, 3*n, queue );
CHECK( magma_zlobpcg_res( m, n, eval_gpu, blockX, blockR, eval_gpu, queue ));
magmablas_dznrm2_cols( m, n, blockR, m, residualNorms(0, iterationNumber), queue );
// === remove the residuals corresponding to already converged evectors
CHECK( magma_zcompact(m, n, blockR, m,
residualNorms(0, iterationNumber), residualTolerance,
activeMask, &cBlockSize, queue ));
if (cBlockSize == 0)
break;
// === apply a preconditioner P to the active residulas: R_new = P R_old
// === for now set P to be identity (no preconditioner => nothing to be done )
//magmablas_zlacpy( MagmaFull, m, cBlockSize, blockR, m, blockW, m, queue );
//SWAP(blockW, blockR);
// preconditioner
magma_z_matrix bWv={Magma_CSR}, bRv={Magma_CSR};
bWv.memory_location = Magma_DEV; bWv.num_rows = m; bWv.num_cols = cBlockSize; bWv.major = MagmaColMajor; bWv.nnz = m*cBlockSize; bWv.dval = blockW;
bRv.memory_location = Magma_DEV; bRv.num_rows = m; bRv.num_cols = cBlockSize; bRv.major = MagmaColMajor; bRv.nnz = m*cBlockSize; bRv.dval = blockR;
CHECK( magma_z_applyprecond_left( MagmaNoTrans, A, bRv, &bWv, precond_par, queue ));
CHECK( magma_z_applyprecond_right( MagmaNoTrans, A, bWv, &bRv, precond_par, queue ));
// === make the preconditioned residuals orthogonal to X
if( precond_par->solver != Magma_NONE){
magma_zgemm( MagmaConjTrans, MagmaNoTrans, n, cBlockSize, m,
c_one, blockX, m, blockR, m, c_zero, gramB(0,0), ldgram, queue );
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, cBlockSize, n,
c_neg_one, blockX, m, gramB(0,0), ldgram, c_one, blockR, m, queue );
}
// === make the active preconditioned residuals orthonormal
magma_zgegqr_gpu(ikind, m, cBlockSize, blockR, m, dwork, hwork, &info );
#if defined(PRECISION_s)
// re-orthogonalization
SWAP(blockX, dwork);
magma_zgegqr_gpu(ikind, m, cBlockSize, blockR, m, dwork, hwork, &info );
#endif
//magma_zorthomgs( m, cBlockSize, blockR, queue );
// === compute AR
magma_z_bspmv_tuned(m, cBlockSize, c_one, A, blockR, c_zero, blockAR, queue );
solver_par->spmv_count++;
if (!restart) {
// === compact P & AP as well
CHECK( magma_zcompactActive(m, n, blockP, m, activeMask, queue ));
CHECK( magma_zcompactActive(m, n, blockAP, m, activeMask, queue ));
/*
// === make P orthogonal to X ?
magma_zgemm( MagmaConjTrans, MagmaNoTrans, n, cBlockSize, m,
c_one, blockX, m, blockP, m, c_zero, gramB(0,0), ldgram, queue );
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, cBlockSize, n,
c_neg_one, blockX, m, gramB(0,0), ldgram, c_one, blockP, m, queue );
// === make P orthogonal to R ?
magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, cBlockSize, m,
c_one, blockR, m, blockP, m, c_zero, gramB(0,0), ldgram, queue );
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, cBlockSize, cBlockSize,
c_neg_one, blockR, m, gramB(0,0), ldgram, c_one, blockP, m, queue );
*/
// === Make P orthonormal & properly change AP (without multiplication by A)
magma_zgegqr_gpu(ikind, m, cBlockSize, blockP, m, dwork, hwork, &info );
#if defined(PRECISION_s)
// re-orthogonalization
SWAP(blockX, dwork);
magma_zgegqr_gpu(ikind, m, cBlockSize, blockP, m, dwork, hwork, &info );
#endif
//magma_zorthomgs( m, cBlockSize, blockP, queue );
//magma_z_bspmv_tuned(m, cBlockSize, c_one, A, blockP, c_zero, blockAP, queue );
magma_zsetmatrix( cBlockSize, cBlockSize, hwork, cBlockSize, dwork, cBlockSize, queue );
// replacement according to Stan
#if defined(PRECISION_s) || defined(PRECISION_d)
magmablas_ztrsm( MagmaRight, MagmaUpper, MagmaNoTrans, MagmaNonUnit,
m, cBlockSize, c_one, dwork, cBlockSize, blockAP, m, queue );
#else
magma_ztrsm( MagmaRight, MagmaUpper, MagmaNoTrans, MagmaNonUnit,
m, cBlockSize, c_one, dwork, cBlockSize, blockAP, m, queue );
#endif
}
iter = max( 1, iterationNumber - 10 - int(log(1.*cBlockSize)) );
double condestGmean = 0.;
for(magma_int_t i = 0; i<iterationNumber-iter+1; i++){
condestGmean += condestGhistory[i];
}
condestGmean = condestGmean / (iterationNumber-iter+1);
if (restart)
gramDim = n+cBlockSize;
else
gramDim = n+2*cBlockSize;
/* --- The Raileight-Ritz method for [X R P] -----------------------
[ X R P ]' [AX AR AP] y = evalues [ X R P ]' [ X R P ], i.e.,
GramA GramB
/ X'AX X'AR X'AP \ / X'X X'R X'P \
| R'AX R'AR R'AP | y = evalues | R'X R'R R'P |
\ P'AX P'AR P'AP / \ P'X P'R P'P /
----------------------------------------------------------------- */
// === assemble GramB; first, set it to I
magmablas_zlaset( MagmaFull, ldgram, ldgram, c_zero, c_one, gramB, ldgram, queue ); // identity
if (!restart) {
magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, n, m,
c_one, blockP, m, blockX, m, c_zero, gramB(n+cBlockSize,0), ldgram, queue );
magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, cBlockSize, m,
c_one, blockP, m, blockR, m, c_zero, gramB(n+cBlockSize,n), ldgram, queue );
}
magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, n, m,
c_one, blockR, m, blockX, m, c_zero, gramB(n,0), ldgram, queue );
// === get GramB from the GPU to the CPU and compute its eigenvalues only
magma_zgetmatrix( gramDim, gramDim, gramB, ldgram, h_gramB, ldgram, queue );
lapackf77_zheev("N", "L", &gramDim, h_gramB, &ldgram, gevalues,
hwork, &lwork,
#ifdef COMPLEX
rwork,
#endif
&info);
// === check stability criteria if we need to restart
condestG = log10( gevalues[gramDim-1]/gevalues[0] ) + 1.;
if ((condestG/condestGmean>2 && condestG>2) || condestG>8) {
// Steepest descent restart for stability
restart=1;
printf("restart at step #%d\n", int(iterationNumber));
}
// === assemble GramA; first, set it to I
magmablas_zlaset( MagmaFull, ldgram, ldgram, c_zero, c_one, gramA, ldgram, queue ); // identity
magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, n, m,
c_one, blockR, m, blockAX, m, c_zero, gramA(n,0), ldgram, queue );
magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, cBlockSize, m,
c_one, blockR, m, blockAR, m, c_zero, gramA(n,n), ldgram, queue );
if (!restart) {
magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, n, m,
c_one, blockP, m, blockAX, m, c_zero,
gramA(n+cBlockSize,0), ldgram, queue );
magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, cBlockSize, m,
c_one, blockP, m, blockAR, m, c_zero,
gramA(n+cBlockSize,n), ldgram, queue );
magma_zgemm( MagmaConjTrans, MagmaNoTrans, cBlockSize, cBlockSize, m,
c_one, blockP, m, blockAP, m, c_zero,
gramA(n+cBlockSize,n+cBlockSize), ldgram, queue );
}
/*
// === Compute X' AX or just use the eigenvalues below ?
magma_zgemm( MagmaConjTrans, MagmaNoTrans, n, n, m,
c_one, blockX, m, blockAX, m, c_zero,
gramA(0,0), ldgram, queue );
*/
if (restart==0) {
magma_zgetmatrix( gramDim, gramDim, gramA, ldgram, gevectors, ldgram, queue );
}
else {
gramDim = n+cBlockSize;
magma_zgetmatrix( gramDim, gramDim, gramA, ldgram, gevectors, ldgram, queue );
}
for(magma_int_t k=0; k<n; k++)
*gevectors(k,k) = MAGMA_Z_MAKE(evalues[k], 0);
// === the previous eigensolver destroyed what is in h_gramB => must copy it again
magma_zgetmatrix( gramDim, gramDim, gramB, ldgram, h_gramB, ldgram, queue );
magma_int_t itype = 1;
lapackf77_zhegvd(&itype, "V", "L", &gramDim,
gevectors, &ldgram, h_gramB, &ldgram,
gevalues, hwork, &lwork,
#ifdef COMPLEX
rwork, &lrwork,
#endif
iwork, &liwork, &info);
for(magma_int_t k =0; k<n; k++)
evalues[k] = gevalues[k];
// === copy back the result to gramA on the GPU and use it for the updates
magma_zsetmatrix( gramDim, gramDim, gevectors, ldgram, gramA, ldgram, queue );
if (restart == 0) {
// === contribution from P to the new X (in new search direction P)
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, cBlockSize,
c_one, blockP, m, gramA(n+cBlockSize,0), ldgram, c_zero, dwork, m, queue );
SWAP(dwork, blockP);
// === contribution from R to the new X (in new search direction P)
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, cBlockSize,
c_one, blockR, m, gramA(n,0), ldgram, c_one, blockP, m, queue );
// === corresponding contribution from AP to the new AX (in AP)
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, cBlockSize,
c_one, blockAP, m, gramA(n+cBlockSize,0), ldgram, c_zero, dwork, m, queue );
SWAP(dwork, blockAP);
// === corresponding contribution from AR to the new AX (in AP)
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, cBlockSize,
c_one, blockAR, m, gramA(n,0), ldgram, c_one, blockAP, m, queue );
}
else {
// === contribution from R (only) to the new X
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, cBlockSize,
c_one, blockR, m, gramA(n,0), ldgram, c_zero, blockP, m, queue );
// === corresponding contribution from AR (only) to the new AX
magma_zgemm( MagmaNoTrans, MagmaNoTrans,m, n, cBlockSize,
c_one, blockAR, m, gramA(n,0), ldgram, c_zero, blockAP, m, queue );
}
// === contribution from old X to the new X + the new search direction P
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, n,
c_one, blockX, m, gramA, ldgram, c_zero, dwork, m, queue );
SWAP(dwork, blockX);
//magma_zaxpy( m*n, c_one, blockP, 1, blockX, 1, queue );
CHECK( magma_zlobpcg_maxpy( m, n, blockP, blockX, queue ));
// === corresponding contribution from old AX to new AX + AP
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, n,
c_one, blockAX, m, gramA, ldgram, c_zero, dwork, m, queue );
SWAP(dwork, blockAX);
//magma_zaxpy( m*n, c_one, blockAP, 1, blockAX, 1, queue );
CHECK( magma_zlobpcg_maxpy( m, n, blockAP, blockAX, queue ));
condestGhistory[iterationNumber+1]=condestG;
magma_dgetmatrix( 1, 1, residualNorms(0, iterationNumber), 1, &tmp, 1, queue );
if ( iterationNumber == 1 ) {
solver_par->init_res = tmp;
r0 = tmp * solver_par->rtol;
if ( r0 < ATOLERANCE )
r0 = ATOLERANCE;
}
solver_par->final_res = tmp;
if ( tmp < r0 ) {
break;
}
if (cBlockSize == 0) {
break;
}
if ( solver_par->verbose!=0 ) {
if ( iterationNumber%solver_par->verbose == 0 ) {
// double res;
// magma_zgetmatrix( 1, 1,
// (magmaDoubleComplex*)residualNorms(0, iterationNumber), 1,
// (magmaDoubleComplex*)&res, 1, queue );
//
// printf("Iteration %4d, CBS %4d, Residual: %10.7f\n",
// iterationNumber, cBlockSize, res);
printf("%4d-%2d ", int(iterationNumber), int(cBlockSize));
magma_dprint_gpu(1, n, residualNorms(0, iterationNumber), 1);
}
}
restart = 0;
} // === end for iterationNumber = 1,maxIterations =======================
// fill solver info
tempo2 = magma_sync_wtime( queue );
solver_par->runtime = (real_Double_t) tempo2-tempo1;
solver_par->numiter = iterationNumber;
if ( solver_par->numiter < solver_par->maxiter) {
info = MAGMA_SUCCESS;
} else if ( solver_par->init_res > solver_par->final_res )
info = MAGMA_SLOW_CONVERGENCE;
else
info = MAGMA_DIVERGENCE;
// =============================================================================
// === postprocessing;
// =============================================================================
// === compute the real AX and corresponding eigenvalues
magma_z_bspmv_tuned(m, n, c_one, A, blockX, c_zero, blockAX, queue );
magma_zgemm( MagmaConjTrans, MagmaNoTrans, n, n, m,
c_one, blockX, m, blockAX, m, c_zero, gramM, n, queue );
magma_zheevd_gpu( MagmaVec, MagmaUpper,
n, gramM, n, gevalues, dwork, n, hwork, lwork,
#ifdef COMPLEX
rwork, lrwork,
#endif
iwork, liwork, &info );
for(magma_int_t k =0; k<n; k++)
evalues[k] = gevalues[k];
// === update X = X * evectors
SWAP(blockX, dwork);
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, n,
c_one, dwork, m, gramM, n, c_zero, blockX, m, queue );
// === update AX = AX * evectors to compute the final residual
SWAP(blockAX, dwork);
magma_zgemm( MagmaNoTrans, MagmaNoTrans, m, n, n,
c_one, dwork, m, gramM, n, c_zero, blockAX, m, queue );
// === compute R = AX - evalues X
magmablas_zlacpy( MagmaFull, m, n, blockAX, m, blockR, m, queue );
for(magma_int_t i=0; i<n; i++)
magma_zaxpy( m, MAGMA_Z_MAKE(-evalues[i], 0), blockX+i*m, 1, blockR+i*m, 1, queue );
// === residualNorms[iterationNumber] = || R ||
magmablas_dznrm2_cols( m, n, blockR, m, residualNorms(0, iterationNumber), queue );
// === restore blockX if needed
if (blockX != origX)
magmablas_zlacpy( MagmaFull, m, n, blockX, m, origX, m, queue );
printf("Eigenvalues:\n");
for(magma_int_t i =0; i<n; i++)
printf("%e ", evalues[i]);
printf("\n\n");
printf("Final residuals:\n");
magma_dprint_gpu(1, n, residualNorms(0, iterationNumber), 1);
printf("\n\n");
//=== Prmagma_int_t residual history in a file for plotting ====
CHECK( magma_dmalloc_cpu(&hresidualNorms, (iterationNumber+1) * n));
magma_dgetmatrix( n, iterationNumber,
residualNorms, n,
hresidualNorms, n, queue );
solver_par->iter_res = *hresidualNorms(0, iterationNumber-1);
printf("Residuals are stored in file residualNorms\n");
printf("Plot the residuals using: myplot \n");
FILE *residuals_file;
residuals_file = fopen("residualNorms", "w");
for(magma_int_t i =1; i<iterationNumber; i++) {
for(magma_int_t j = 0; j<n; j++)
fprintf(residuals_file, "%f ", *hresidualNorms(j,i));
fprintf(residuals_file, "\n");
}
fclose(residuals_file);
cleanup:
magma_free_cpu(hresidualNorms);
// === free work space
magma_free( residualNorms );
magma_free_cpu( condestGhistory );
magma_free_cpu( gevalues );
magma_free_cpu( iwork );
magma_free_pinned( hW );
magma_free_pinned( gevectors );
magma_free_pinned( h_gramB );
magma_free( gramM );
magma_free( gramA );
magma_free( gramB );
magma_free( activeMask );
if (blockX != (solver_par->eigenvectors))
magma_free( blockX );
if (blockAX != (solver_par->eigenvectors))
magma_free( blockAX );
if (blockAR != (solver_par->eigenvectors))
magma_free( blockAR );
if (blockAP != (solver_par->eigenvectors))
magma_free( blockAP );
if (blockR != (solver_par->eigenvectors))
magma_free( blockR );
if (blockP != (solver_par->eigenvectors))
magma_free( blockP );
if (blockW != (solver_par->eigenvectors))
magma_free( blockW );
if (dwork != (solver_par->eigenvectors))
magma_free( dwork );
magma_free( eval_gpu );
magma_free_pinned( hwork );
#ifdef COMPLEX
magma_free_cpu( rwork );
rwork = NULL;
#endif
return info;
}
|
Java
|
import * as fs from "fs"
import * as path from "path"
import * as ts from "typescript"
const coffee = require("coffeescript")
const less = require("less")
import {argv} from "yargs"
import {collect_deps} from "./dependencies"
const mkCoffeescriptError = (error: any, file?: string) => {
const message = error.message
if (error.location == null) {
const text = [file || "<string>", message].join(":")
return {message, text}
} else {
const location = error.location
const line = location.first_line + 1
const column = location.first_column + 1
const text = [file || "<string>", line, column, message].join(":")
let markerLen = 2
if (location.first_line === location.last_line)
markerLen += location.last_column - location.first_column
const extract = error.code.split('\n')[line - 1]
const annotated = [
text,
" " + extract,
" " + Array(column).join(' ') + Array(markerLen).join('^'),
].join('\n')
return {message, line, column, text, extract, annotated}
}
}
const mkLessError = (error: any, file?: string) => {
const message = error.message
const line = error.line
const column = error.column + 1
const text = [file || "<string>", line, column, message].join(":")
const extract = error.extract[line]
const annotated = [text, " " + extract].join("\n")
return {message, line, column, text, extract, annotated}
}
const reply = (data: any) => {
process.stdout.write(JSON.stringify(data))
process.stdout.write("\n")
}
type Files = {[name: string]: string}
function compile_typescript(inputs: Files, bokehjs_dir: string): {outputs: Files, error?: string} {
const options: ts.CompilerOptions = {
noImplicitAny: true,
noImplicitThis: true,
noImplicitReturns: true,
noUnusedLocals: true,
noUnusedParameters: true,
strictNullChecks: true,
strictBindCallApply: false,
strictFunctionTypes: false,
strictPropertyInitialization: false,
alwaysStrict: true,
noErrorTruncation: true,
noEmitOnError: false,
declaration: false,
sourceMap: false,
importHelpers: false,
experimentalDecorators: true,
module: ts.ModuleKind.CommonJS,
moduleResolution: ts.ModuleResolutionKind.NodeJs,
target: ts.ScriptTarget.ES5,
lib: [
"lib.es5.d.ts",
"lib.dom.d.ts",
"lib.es2015.core.d.ts",
"lib.es2015.promise.d.ts",
"lib.es2015.symbol.d.ts",
"lib.es2015.iterable.d.ts",
],
types: [],
baseUrl: ".",
paths: {
"*": [
path.join(bokehjs_dir, "js/lib/*"),
path.join(bokehjs_dir, "js/types/*"),
],
},
}
const host: ts.CompilerHost = {
getDefaultLibFileName: () => "lib.d.ts",
getDefaultLibLocation: () => {
// bokeh/server/static or bokehjs/build
if (path.basename(bokehjs_dir) == "static")
return path.join(bokehjs_dir, "lib")
else
return path.join(path.dirname(bokehjs_dir), "node_modules/typescript/lib")
},
getCurrentDirectory: () => ts.sys.getCurrentDirectory(),
getDirectories: (path) => ts.sys.getDirectories(path),
getCanonicalFileName: (name) => ts.sys.useCaseSensitiveFileNames ? name : name.toLowerCase(),
useCaseSensitiveFileNames: () => ts.sys.useCaseSensitiveFileNames,
getNewLine: () => ts.sys.newLine,
fileExists(name: string): boolean {
return inputs[name] != null || ts.sys.fileExists(name)
},
readFile(name: string): string | undefined {
return inputs[name] != null ? inputs[name] : ts.sys.readFile(name)
},
writeFile(name, content): void {
ts.sys.writeFile(name, content)
},
getSourceFile(name: string, target: ts.ScriptTarget, _onError?: (message: string) => void) {
const source = inputs[name] != null ? inputs[name] : ts.sys.readFile(name)
return source !== undefined ? ts.createSourceFile(name, source, target) : undefined
},
}
const program = ts.createProgram(Object.keys(inputs), options, host)
const outputs: Files = {}
const emitted = program.emit(undefined, (name, output) => outputs[name] = output)
const diagnostics = ts.getPreEmitDiagnostics(program).concat(emitted.diagnostics)
if (diagnostics.length == 0)
return {outputs}
else {
const format_host: ts.FormatDiagnosticsHost = {
getCanonicalFileName: (path) => path,
getCurrentDirectory: ts.sys.getCurrentDirectory,
getNewLine: () => ts.sys.newLine,
}
const error = ts.formatDiagnosticsWithColorAndContext(
ts.sortAndDeduplicateDiagnostics(diagnostics), format_host)
return {outputs, error}
}
}
function compile_javascript(file: string, code: string): {output: string, error?: string} {
const result = ts.transpileModule(code, {
fileName: file,
reportDiagnostics: true,
compilerOptions: {
target: ts.ScriptTarget.ES5,
module: ts.ModuleKind.CommonJS,
},
})
const format_host: ts.FormatDiagnosticsHost = {
getCanonicalFileName: (path) => path,
getCurrentDirectory: ts.sys.getCurrentDirectory,
getNewLine: () => ts.sys.newLine,
}
const {outputText, diagnostics} = result
if (diagnostics == null || diagnostics.length == 0)
return {output: outputText}
else {
const error = ts.formatDiagnosticsWithColorAndContext(
ts.sortAndDeduplicateDiagnostics(diagnostics), format_host)
return {output: outputText, error}
}
}
function rename(p: string, options: {dir?: string, ext?: string}): string {
let {dir, name, ext} = path.parse(p)
if (options.dir != null)
dir = options.dir
if (options.ext != null)
ext = options.ext
return path.format({dir, name, ext})
}
function normalize(path: string): string {
return path.replace(/\\/g, "/")
}
const compile_and_resolve_deps = (input: {code: string, lang: string, file: string, bokehjs_dir: string}) => {
const {file, lang, bokehjs_dir} = input
let {code} = input
let output: string
switch (lang) {
case "typescript":
const inputs = {[normalize(file)]: code}
const result = compile_typescript(inputs, bokehjs_dir)
if (result.error == null)
output = result.outputs[normalize(rename(file, {ext: ".js"}))]
else
return reply({error: result.error})
break
case "coffeescript":
try {
code = coffee.compile(code, {bare: true, shiftLine: true})
} catch (error) {
return reply({error: mkCoffeescriptError(error, file)})
}
case "javascript": {
const result = compile_javascript(file, code)
if (result.error == null)
output = result.output
else
return reply({error: result.error})
break
}
case "less":
const options = {
paths: [path.dirname(file)],
compress: true,
ieCompat: false,
}
less.render(code, options, (error: any, output: any) => {
if (error != null)
reply({error: mkLessError(error, file)})
else
reply({code: output.css})
})
return
default:
throw new Error(`unsupported input type: ${lang}`)
}
const source = ts.createSourceFile(file, output, ts.ScriptTarget.ES5, true, ts.ScriptKind.JS)
const deps = collect_deps(source)
return reply({code: output, deps})
}
if (argv.file != null) {
const input = {
code: fs.readFileSync(argv.file as string, "utf-8"),
lang: (argv.lang as string | undefined) || "coffeescript",
file: argv.file as string,
bokehjs_dir: (argv.bokehjsDir as string | undefined) || "./build", // this is what bokeh.settings defaults to
}
compile_and_resolve_deps(input)
} else {
const stdin = process.stdin
stdin.resume()
stdin.setEncoding("utf-8")
let data = ""
stdin.on("data", (chunk: string) => data += chunk)
stdin.on("end", () => compile_and_resolve_deps(JSON.parse(data)))
}
|
Java
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUPermissionsFetcher
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from bambou import NURESTObject
class NUVMResync(NURESTObject):
""" Represents a VMResync in the VSD
Notes:
Provide information about the state of a VM resync request.
"""
__rest_name__ = "resync"
__resource_name__ = "resync"
## Constants
CONST_STATUS_IN_PROGRESS = "IN_PROGRESS"
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_STATUS_SUCCESS = "SUCCESS"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
def __init__(self, **kwargs):
""" Initializes a VMResync instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> vmresync = NUVMResync(id=u'xxxx-xxx-xxx-xxx', name=u'VMResync')
>>> vmresync = NUVMResync(data=my_dict)
"""
super(NUVMResync, self).__init__()
# Read/Write Attributes
self._last_request_timestamp = None
self._last_time_resync_initiated = None
self._last_updated_by = None
self._last_updated_date = None
self._embedded_metadata = None
self._entity_scope = None
self._creation_date = None
self._status = None
self._owner = None
self._external_id = None
self.expose_attribute(local_name="last_request_timestamp", remote_name="lastRequestTimestamp", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_time_resync_initiated", remote_name="lastTimeResyncInitiated", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_date", remote_name="lastUpdatedDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="embedded_metadata", remote_name="embeddedMetadata", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="creation_date", remote_name="creationDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="status", remote_name="status", attribute_type=str, is_required=False, is_unique=False, choices=[u'IN_PROGRESS', u'SUCCESS'])
self.expose_attribute(local_name="owner", remote_name="owner", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
# Fetchers
self.permissions = NUPermissionsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def last_request_timestamp(self):
""" Get last_request_timestamp value.
Notes:
Time of the last timestamp received
This attribute is named `lastRequestTimestamp` in VSD API.
"""
return self._last_request_timestamp
@last_request_timestamp.setter
def last_request_timestamp(self, value):
""" Set last_request_timestamp value.
Notes:
Time of the last timestamp received
This attribute is named `lastRequestTimestamp` in VSD API.
"""
self._last_request_timestamp = value
@property
def last_time_resync_initiated(self):
""" Get last_time_resync_initiated value.
Notes:
Time that the resync was initiated
This attribute is named `lastTimeResyncInitiated` in VSD API.
"""
return self._last_time_resync_initiated
@last_time_resync_initiated.setter
def last_time_resync_initiated(self, value):
""" Set last_time_resync_initiated value.
Notes:
Time that the resync was initiated
This attribute is named `lastTimeResyncInitiated` in VSD API.
"""
self._last_time_resync_initiated = value
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def last_updated_date(self):
""" Get last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
return self._last_updated_date
@last_updated_date.setter
def last_updated_date(self, value):
""" Set last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
self._last_updated_date = value
@property
def embedded_metadata(self):
""" Get embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
return self._embedded_metadata
@embedded_metadata.setter
def embedded_metadata(self, value):
""" Set embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
self._embedded_metadata = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def creation_date(self):
""" Get creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
return self._creation_date
@creation_date.setter
def creation_date(self, value):
""" Set creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
self._creation_date = value
@property
def status(self):
""" Get status value.
Notes:
Status of the resync
"""
return self._status
@status.setter
def status(self, value):
""" Set status value.
Notes:
Status of the resync
"""
self._status = value
@property
def owner(self):
""" Get owner value.
Notes:
Identifies the user that has created this object.
"""
return self._owner
@owner.setter
def owner(self, value):
""" Set owner value.
Notes:
Identifies the user that has created this object.
"""
self._owner = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
|
Java
|
package glasskey.spray.model
import glasskey.config.OAuthConfig
import spray.http.HttpHeader
trait OAuthAction {
import akka.actor.ActorSystem
import spray.client.pipelining._
import spray.http.HttpHeaders.RawHeader
import spray.http.HttpRequest
import spray.http.parser.HttpParser
import spray.httpx.encoding.{Deflate, Gzip}
import spray.httpx.unmarshalling.FromResponseUnmarshaller
import scala.concurrent.{ExecutionContext, Future}
implicit val system = ActorSystem()
implicit def executor: ExecutionContext = system.dispatcher
def getHeaderedPipeline[T](token: String,
id_token: Option[String] = None, addlHdrs : Option[List[HttpHeader]] = None)
(implicit evidence: FromResponseUnmarshaller[T]): HttpRequest => Future[T] =
(getHeaders(token, id_token, addlHdrs)
~> encode(Gzip)
~> sendReceive
~> decode(Deflate) ~> decode(Gzip)
~> unmarshal[T])
def getHeaders(accessToken: String, id_token: Option[String] = None,
addlHdrs : Option[List[HttpHeader]] = None): RequestTransformer =
getHeaders(accessToken, id_token, OAuthConfig.providerConfig.authHeaderName,
OAuthConfig.providerConfig.authHeaderPrefix,
OAuthConfig.providerConfig.idHeaderName,
OAuthConfig.providerConfig.idHeaderPrefix, addlHdrs)
def getHeaders(accessToken: String, id_token: Option[String], authHdrName: String, authHdrPrefix: String,
idHdrName: String, idHdrPrefix: String, addlHdrs : Option[List[HttpHeader]]): RequestTransformer =
addHeaders(getHttpHeaders(accessToken, id_token, authHdrName, authHdrPrefix,
idHdrName, idHdrPrefix, addlHdrs))
def getHttpHeaders(accessToken: String, id_token: Option[String], authHdrName: String, authHdrPrefix: String,
idHdrName: String, idHdrPrefix: String, addlHdrs : Option[List[HttpHeader]]): List[HttpHeader] = {
val hdrs = id_token match {
case Some(idTokenStr) =>
val authHeader = RawHeader(s"${authHdrName}", s"${authHdrPrefix} $accessToken")
val idTokenHeader = RawHeader(s"${idHdrName}", s"${idHdrPrefix} $idTokenStr")
List(
HttpParser.parseHeader(authHeader).left.flatMap(_ ⇒ Right(authHeader)).right.get,
HttpParser.parseHeader(idTokenHeader).left.flatMap(_ ⇒ Right(idTokenHeader)).right.get)
case None => val rawHeader = RawHeader(authHdrName, s"${authHdrPrefix}$accessToken")
List(HttpParser.parseHeader(rawHeader).left.flatMap(_ ⇒ Right(rawHeader)).right.get)
}
hdrs ++ addlHdrs.toList.flatten
}
}
|
Java
|
from mock import patch
from nose.tools import eq_
from helper import TestCase
import appvalidator.submain as submain
class TestSubmainPackage(TestCase):
@patch("appvalidator.submain.test_inner_package",
lambda x, z: "success")
def test_package_pass(self):
"Tests the test_package function with simple data"
self.setup_err()
name = "tests/resources/submain/install_rdf.xpi"
with open(name) as pack:
result = submain.test_package(self.err, pack, name)
self.assert_silent()
eq_(result, "success")
@patch("appvalidator.submain.test_inner_package",
lambda x, z: "success")
def test_package_corrupt(self):
"Tests the test_package function fails with a non-zip"
self.setup_err()
name = "tests/resources/junk.xpi"
with open(name) as pack:
result = submain.test_package(self.err, pack, name)
self.assert_failed()
def test_package_corrupt(self):
"Tests the test_package function fails with a corrupt file"
self.setup_err()
name = "tests/resources/corrupt.xpi"
result = submain.test_package(self.err, name, name)
self.assert_failed(with_errors=True, with_warnings=True)
|
Java
|
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="scaffolded-by" content="https://github.com/google/stagehand">
<title>futures_api</title>
<link rel="stylesheet" href="styles.css">
<link rel="icon" href="favicon.ico">
<script defer src="main.dart.js"></script>
</head>
<body>
<div id="output"></div>
</body>
</html>
|
Java
|
Directory for interactive development of c2po plots.
Install deps via:
bundle install
then run
bundle exec guard
execute some of the code in `scratch.rb` and view your plots at `http://localhost:3000/`.
|
Java
|
package com.mistraltech.smogen.codegenerator.javabuilder;
public class InterfaceMethodBuilder extends MethodSignatureBuilder<InterfaceMethodBuilder> {
private InterfaceMethodBuilder() {
}
public static InterfaceMethodBuilder anInterfaceMethod() {
return new InterfaceMethodBuilder();
}
@Override
public String build(JavaBuilderContext context) {
return super.build(context) + ";";
}
}
|
Java
|
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef COMPONENTS_CLOUD_DEVICES_COMMON_PRINTER_DESCRIPTION_H_
#define COMPONENTS_CLOUD_DEVICES_COMMON_PRINTER_DESCRIPTION_H_
#include <stdint.h>
#include <string>
#include <vector>
#include "base/logging.h"
#include "build/build_config.h"
#include "components/cloud_devices/common/description_items.h"
// Defines printer options, CDD and CJT items.
// https://developers.google.com/cloud-print/docs/cdd
namespace cloud_devices {
namespace printer {
struct SelectVendorCapabilityOption;
class SelectVendorCapabilityTraits;
typedef SelectionCapability<SelectVendorCapabilityOption,
SelectVendorCapabilityTraits>
SelectVendorCapability;
typedef std::string ContentType;
struct Copies {
// Default requested number of copies.
int32_t default_value = 1;
// Maximum number of copies supported, sourced from
// PrinterSemanticCapsAndDefaults.copies_max.
int32_t max_value = 1;
};
enum class DocumentSheetBack {
NORMAL,
ROTATED,
MANUAL_TUMBLE,
FLIPPED,
};
enum class PwgDocumentTypeSupported {
SGRAY_8 = 22,
SRGB_8 = 23,
};
struct PwgRasterConfig {
PwgRasterConfig();
~PwgRasterConfig();
std::vector<PwgDocumentTypeSupported> document_types_supported;
DocumentSheetBack document_sheet_back;
bool reverse_order_streaming;
bool rotate_all_pages;
};
class RangeVendorCapability {
public:
enum class ValueType {
FLOAT,
INTEGER,
};
RangeVendorCapability();
RangeVendorCapability(ValueType value_type,
const std::string& min_value,
const std::string& max_value);
RangeVendorCapability(ValueType value_type,
const std::string& min_value,
const std::string& max_value,
const std::string& default_value);
RangeVendorCapability(RangeVendorCapability&& other);
~RangeVendorCapability();
RangeVendorCapability& operator=(RangeVendorCapability&& other);
bool operator==(const RangeVendorCapability& other) const;
bool operator!=(const RangeVendorCapability& other) const {
return !(*this == other);
}
bool IsValid() const;
bool LoadFrom(const base::Value& dict);
void SaveTo(base::Value* dict) const;
private:
ValueType value_type_;
std::string min_value_;
std::string max_value_;
std::string default_value_;
DISALLOW_COPY_AND_ASSIGN(RangeVendorCapability);
};
struct SelectVendorCapabilityOption {
SelectVendorCapabilityOption();
SelectVendorCapabilityOption(const std::string& value,
const std::string& display_name);
~SelectVendorCapabilityOption();
bool IsValid() const;
bool operator==(const SelectVendorCapabilityOption& other) const;
bool operator!=(const SelectVendorCapabilityOption& other) const {
return !(*this == other);
}
std::string value;
std::string display_name;
};
class TypedValueVendorCapability {
public:
enum class ValueType {
BOOLEAN,
FLOAT,
INTEGER,
STRING,
};
TypedValueVendorCapability();
explicit TypedValueVendorCapability(ValueType value_type);
TypedValueVendorCapability(ValueType value_type,
const std::string& default_value);
TypedValueVendorCapability(TypedValueVendorCapability&& other);
~TypedValueVendorCapability();
TypedValueVendorCapability& operator=(TypedValueVendorCapability&& other);
bool operator==(const TypedValueVendorCapability& other) const;
bool operator!=(const TypedValueVendorCapability& other) const {
return !(*this == other);
}
bool IsValid() const;
bool LoadFrom(const base::Value& dict);
void SaveTo(base::Value* dict) const;
private:
ValueType value_type_;
std::string default_value_;
DISALLOW_COPY_AND_ASSIGN(TypedValueVendorCapability);
};
class VendorCapability {
public:
enum class Type {
NONE,
RANGE,
SELECT,
TYPED_VALUE,
};
VendorCapability();
VendorCapability(const std::string& id,
const std::string& display_name,
RangeVendorCapability range_capability);
VendorCapability(const std::string& id,
const std::string& display_name,
SelectVendorCapability select_capability);
VendorCapability(const std::string& id,
const std::string& display_name,
TypedValueVendorCapability typed_value_capability);
VendorCapability(VendorCapability&& other);
~VendorCapability();
bool operator==(const VendorCapability& other) const;
bool operator!=(const VendorCapability& other) const {
return !(*this == other);
}
bool IsValid() const;
bool LoadFrom(const base::Value& dict);
void SaveTo(base::Value* dict) const;
private:
void InternalCleanup();
Type type_;
std::string id_;
std::string display_name_;
// If the CDD is valid, exactly one of the capabilities has a value.
union {
RangeVendorCapability range_capability_;
SelectVendorCapability select_capability_;
TypedValueVendorCapability typed_value_capability_;
};
DISALLOW_COPY_AND_ASSIGN(VendorCapability);
};
enum class ColorType {
STANDARD_COLOR,
STANDARD_MONOCHROME,
CUSTOM_COLOR,
CUSTOM_MONOCHROME,
AUTO_COLOR,
};
struct Color {
Color();
explicit Color(ColorType type);
bool IsValid() const;
bool operator==(const Color& other) const;
bool operator!=(const Color& other) const { return !(*this == other); }
ColorType type;
std::string vendor_id;
std::string custom_display_name;
};
enum class DuplexType {
NO_DUPLEX,
LONG_EDGE,
SHORT_EDGE,
};
enum class OrientationType {
PORTRAIT,
LANDSCAPE,
AUTO_ORIENTATION,
};
enum class MarginsType {
NO_MARGINS,
STANDARD_MARGINS,
CUSTOM_MARGINS,
};
struct Margins {
Margins();
Margins(MarginsType type,
int32_t top_um,
int32_t right_um,
int32_t bottom_um,
int32_t left_um);
bool operator==(const Margins& other) const;
bool operator!=(const Margins& other) const { return !(*this == other); }
MarginsType type;
int32_t top_um;
int32_t right_um;
int32_t bottom_um;
int32_t left_um;
};
struct Dpi {
Dpi();
Dpi(int32_t horizontal, int32_t vertical);
bool IsValid() const;
bool operator==(const Dpi& other) const;
bool operator!=(const Dpi& other) const { return !(*this == other); }
int32_t horizontal;
int32_t vertical;
};
enum class FitToPageType {
NO_FITTING,
FIT_TO_PAGE,
GROW_TO_PAGE,
SHRINK_TO_PAGE,
FILL_PAGE,
};
enum class MediaType {
CUSTOM_MEDIA,
// North American standard sheet media names.
NA_INDEX_3X5,
NA_PERSONAL,
NA_MONARCH,
NA_NUMBER_9,
NA_INDEX_4X6,
NA_NUMBER_10,
NA_A2,
NA_NUMBER_11,
NA_NUMBER_12,
NA_5X7,
NA_INDEX_5X8,
NA_NUMBER_14,
NA_INVOICE,
NA_INDEX_4X6_EXT,
NA_6X9,
NA_C5,
NA_7X9,
NA_EXECUTIVE,
NA_GOVT_LETTER,
NA_GOVT_LEGAL,
NA_QUARTO,
NA_LETTER,
NA_FANFOLD_EUR,
NA_LETTER_PLUS,
NA_FOOLSCAP,
NA_LEGAL,
NA_SUPER_A,
NA_9X11,
NA_ARCH_A,
NA_LETTER_EXTRA,
NA_LEGAL_EXTRA,
NA_10X11,
NA_10X13,
NA_10X14,
NA_10X15,
NA_11X12,
NA_EDP,
NA_FANFOLD_US,
NA_11X15,
NA_LEDGER,
NA_EUR_EDP,
NA_ARCH_B,
NA_12X19,
NA_B_PLUS,
NA_SUPER_B,
NA_C,
NA_ARCH_C,
NA_D,
NA_ARCH_D,
NA_ASME_F,
NA_WIDE_FORMAT,
NA_E,
NA_ARCH_E,
NA_F,
// Chinese standard sheet media size names.
ROC_16K,
ROC_8K,
PRC_32K,
PRC_1,
PRC_2,
PRC_4,
PRC_5,
PRC_8,
PRC_6,
PRC_3,
PRC_16K,
PRC_7,
OM_JUURO_KU_KAI,
OM_PA_KAI,
OM_DAI_PA_KAI,
PRC_10,
// ISO standard sheet media size names.
ISO_A10,
ISO_A9,
ISO_A8,
ISO_A7,
ISO_A6,
ISO_A5,
ISO_A5_EXTRA,
ISO_A4,
ISO_A4_TAB,
ISO_A4_EXTRA,
ISO_A3,
ISO_A4X3,
ISO_A4X4,
ISO_A4X5,
ISO_A4X6,
ISO_A4X7,
ISO_A4X8,
ISO_A4X9,
ISO_A3_EXTRA,
ISO_A2,
ISO_A3X3,
ISO_A3X4,
ISO_A3X5,
ISO_A3X6,
ISO_A3X7,
ISO_A1,
ISO_A2X3,
ISO_A2X4,
ISO_A2X5,
ISO_A0,
ISO_A1X3,
ISO_A1X4,
ISO_2A0,
ISO_A0X3,
ISO_B10,
ISO_B9,
ISO_B8,
ISO_B7,
ISO_B6,
ISO_B6C4,
ISO_B5,
ISO_B5_EXTRA,
ISO_B4,
ISO_B3,
ISO_B2,
ISO_B1,
ISO_B0,
ISO_C10,
ISO_C9,
ISO_C8,
ISO_C7,
ISO_C7C6,
ISO_C6,
ISO_C6C5,
ISO_C5,
ISO_C4,
ISO_C3,
ISO_C2,
ISO_C1,
ISO_C0,
ISO_DL,
ISO_RA2,
ISO_SRA2,
ISO_RA1,
ISO_SRA1,
ISO_RA0,
ISO_SRA0,
// Japanese standard sheet media size names.
JIS_B10,
JIS_B9,
JIS_B8,
JIS_B7,
JIS_B6,
JIS_B5,
JIS_B4,
JIS_B3,
JIS_B2,
JIS_B1,
JIS_B0,
JIS_EXEC,
JPN_CHOU4,
JPN_HAGAKI,
JPN_YOU4,
JPN_CHOU2,
JPN_CHOU3,
JPN_OUFUKU,
JPN_KAHU,
JPN_KAKU2,
// Other metric standard sheet media size names.
OM_SMALL_PHOTO,
OM_ITALIAN,
OM_POSTFIX,
OM_LARGE_PHOTO,
OM_FOLIO,
OM_FOLIO_SP,
OM_INVITE,
};
struct Media {
Media();
explicit Media(MediaType type);
Media(MediaType type, int32_t width_um, int32_t height_um);
Media(const std::string& custom_display_name,
const std::string& vendor_id,
int32_t width_um,
int32_t height_um);
Media(const Media& other);
bool MatchBySize();
bool IsValid() const;
bool operator==(const Media& other) const;
bool operator!=(const Media& other) const { return !(*this == other); }
MediaType type;
int32_t width_um;
int32_t height_um;
bool is_continuous_feed;
std::string custom_display_name;
std::string vendor_id;
};
struct Interval {
Interval();
Interval(int32_t start, int32_t end);
explicit Interval(int32_t start);
bool operator==(const Interval& other) const;
bool operator!=(const Interval& other) const { return !(*this == other); }
int32_t start;
int32_t end;
};
typedef std::vector<Interval> PageRange;
class ContentTypeTraits;
class PwgRasterConfigTraits;
class VendorCapabilityTraits;
class ColorTraits;
class DuplexTraits;
class OrientationTraits;
class MarginsTraits;
class DpiTraits;
class FitToPageTraits;
class MediaTraits;
class PageRangeTraits;
class CollateTraits;
class CopiesCapabilityTraits;
class CopiesTicketItemTraits;
typedef ListCapability<ContentType, ContentTypeTraits> ContentTypesCapability;
typedef ValueCapability<PwgRasterConfig, PwgRasterConfigTraits>
PwgRasterConfigCapability;
typedef ListCapability<VendorCapability, VendorCapabilityTraits>
VendorCapabilities;
typedef SelectionCapability<Color, ColorTraits> ColorCapability;
typedef SelectionCapability<DuplexType, DuplexTraits> DuplexCapability;
typedef SelectionCapability<OrientationType, OrientationTraits>
OrientationCapability;
typedef SelectionCapability<Margins, MarginsTraits> MarginsCapability;
typedef SelectionCapability<Dpi, DpiTraits> DpiCapability;
typedef SelectionCapability<FitToPageType, FitToPageTraits> FitToPageCapability;
typedef SelectionCapability<Media, MediaTraits> MediaCapability;
typedef ValueCapability<Copies, class CopiesCapabilityTraits> CopiesCapability;
typedef EmptyCapability<class PageRangeTraits> PageRangeCapability;
typedef BooleanCapability<class CollateTraits> CollateCapability;
typedef BooleanCapability<class ReverseTraits> ReverseCapability;
#if defined(OS_CHROMEOS)
// This capability is not a part of standard CDD description. It's used for
// providing PIN printing opportunity in Chrome OS native printing.
typedef ValueCapability<bool, class PinTraits> PinCapability;
#endif // defined(OS_CHROMEOS)
typedef TicketItem<PwgRasterConfig, PwgRasterConfigTraits>
PwgRasterConfigTicketItem;
typedef TicketItem<Color, ColorTraits> ColorTicketItem;
typedef TicketItem<DuplexType, DuplexTraits> DuplexTicketItem;
typedef TicketItem<OrientationType, OrientationTraits> OrientationTicketItem;
typedef TicketItem<Margins, MarginsTraits> MarginsTicketItem;
typedef TicketItem<Dpi, DpiTraits> DpiTicketItem;
typedef TicketItem<FitToPageType, FitToPageTraits> FitToPageTicketItem;
typedef TicketItem<Media, MediaTraits> MediaTicketItem;
typedef TicketItem<int32_t, CopiesTicketItemTraits> CopiesTicketItem;
typedef TicketItem<PageRange, PageRangeTraits> PageRangeTicketItem;
typedef TicketItem<bool, CollateTraits> CollateTicketItem;
typedef TicketItem<bool, ReverseTraits> ReverseTicketItem;
} // namespace printer
} // namespace cloud_devices
#endif // COMPONENTS_CLOUD_DEVICES_COMMON_PRINTER_DESCRIPTION_H_
|
Java
|
class ProductTag < ActiveRecord::Base
has_attached_file :icon,
:url => "/assets/product_tags/:id/:basename.:extension",
:path => ":rails_root/public/assets/product_tags/:id/:basename.:extension"
validates :name, :presence => true, :uniqueness => true
has_many :products
end
|
Java
|
"use strict"
function checkEnvironmentForConfig(config:Object) : Object {
let mentionBotEnvConfig;
try {
mentionBotEnvConfig = JSON.parse(process.env.MENTION_BOT_CONFIG);
} catch(e) {
mentionBotEnvConfig = {};
}
return Object.keys(config).reduce((previousValue, key) => {
let defaultConfigValue = config[key];
let environmentVariable = mentionBotEnvConfig[key];
let configElement = {};
configElement[key] = environmentVariable === undefined ? defaultConfigValue
: environmentVariable;
return {...previousValue, ...configElement};
}, {});
}
module.exports = {
checkEnvironmentForConfig
}
|
Java
|
// Copyright 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "cc/resources/content_layer_updater.h"
#include "base/debug/trace_event.h"
#include "base/time.h"
#include "cc/debug/rendering_stats_instrumentation.h"
#include "cc/resources/layer_painter.h"
#include "third_party/skia/include/core/SkCanvas.h"
#include "third_party/skia/include/core/SkPaint.h"
#include "third_party/skia/include/core/SkRect.h"
#include "third_party/skia/include/core/SkScalar.h"
#include "ui/gfx/rect_conversions.h"
#include "ui/gfx/rect_f.h"
namespace cc {
ContentLayerUpdater::ContentLayerUpdater(
scoped_ptr<LayerPainter> painter,
RenderingStatsInstrumentation* stats_instrumentation)
: rendering_stats_instrumentation_(stats_instrumentation),
painter_(painter.Pass()) {}
ContentLayerUpdater::~ContentLayerUpdater() {}
void ContentLayerUpdater::PaintContents(SkCanvas* canvas,
gfx::Rect content_rect,
float contents_width_scale,
float contents_height_scale,
gfx::Rect* resulting_opaque_rect,
RenderingStats* stats) {
TRACE_EVENT0("cc", "ContentLayerUpdater::PaintContents");
canvas->save();
canvas->translate(SkFloatToScalar(-content_rect.x()),
SkFloatToScalar(-content_rect.y()));
gfx::Rect layer_rect = content_rect;
if (contents_width_scale != 1.f || contents_height_scale != 1.f) {
canvas->scale(SkFloatToScalar(contents_width_scale),
SkFloatToScalar(contents_height_scale));
gfx::RectF rect = gfx::ScaleRect(
content_rect, 1.f / contents_width_scale, 1.f / contents_height_scale);
layer_rect = gfx::ToEnclosingRect(rect);
}
SkPaint paint;
paint.setAntiAlias(false);
paint.setXfermodeMode(SkXfermode::kClear_Mode);
SkRect layer_sk_rect = SkRect::MakeXYWH(
layer_rect.x(), layer_rect.y(), layer_rect.width(), layer_rect.height());
canvas->drawRect(layer_sk_rect, paint);
canvas->clipRect(layer_sk_rect);
gfx::RectF opaque_layer_rect;
base::TimeTicks paint_begin_time;
if (stats)
paint_begin_time = base::TimeTicks::Now();
painter_->Paint(canvas, layer_rect, &opaque_layer_rect);
if (stats) {
stats->total_paint_time += base::TimeTicks::Now() - paint_begin_time;
stats->total_pixels_painted += content_rect.width() * content_rect.height();
}
canvas->restore();
gfx::RectF opaque_content_rect = gfx::ScaleRect(
opaque_layer_rect, contents_width_scale, contents_height_scale);
*resulting_opaque_rect = gfx::ToEnclosedRect(opaque_content_rect);
content_rect_ = content_rect;
}
} // namespace cc
|
Java
|
# Copyright (C) 2010 CENATIC: Centro Nacional de Referencia de
# Aplicacion de las TIC basadas en Fuentes Abiertas, Spain.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# Neither the name of the CENATIC nor the names of its contributors
# may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# You may contact the copyright holder at: Fundacion CENATIC, Edificio
# de Servicios Sociales: C/ Vistahermosa, 1, 3ra planta, 06200
# Almendralejo (Badajoz), Spain
from DBSlayer import Query
def get_type_name (type_id):
l = get_type (type_id)
if not l:
return None
return l['name']
def get_type (type_id):
q = "SELECT id, type "\
"FROM asset_types WHERE id=%(type_id)s;" % locals()
query = Query(q)
if len(query) != 1:
return None
ret = {'id': type_id,
'name': query['type'][0]}
return ret
def get_types ():
q = "SELECT id, type "\
"FROM asset_types;" % locals()
query = Query(q)
if not len(query):
return None
ret = []
for x in query:
d={'id': query[x]['id'],
'name': query[x]['type']}
ret.append(d)
return ret
def test ():
import sys
try:
type_id = sys.argv[1]
except IndexError:
print 'Required test parameters: type_id'
sys.exit(1)
print 'Types:', get_types()
print 'type_id %s, type_name %s' % (type_id, get_type_name(type_id))
print get_type(type_id),
if __name__ == '__main__':
test()
|
Java
|
# -*- coding: utf-8 -*-
'''
Production Configurations
- Use djangosecure
- Use mailgun to send emails
- Use redis
'''
from __future__ import absolute_import, unicode_literals
from django.utils import six
from .common import * # noqa
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Raises ImproperlyConfigured exception if DJANO_SECRET_KEY not in os.environ
SECRET_KEY = env("DJANGO_SECRET_KEY")
# This ensures that Django will be able to detect a secure connection
# properly on Heroku.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# django-secure
# ------------------------------------------------------------------------------
INSTALLED_APPS += ("djangosecure", )
MIDDLEWARE_CLASSES = (
# Make sure djangosecure.middleware.SecurityMiddleware is listed first
'djangosecure.middleware.SecurityMiddleware',
) + MIDDLEWARE_CLASSES
# set this to 60 seconds and then to 518400 when you can prove it works
SECURE_HSTS_SECONDS = 60
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool("DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True)
SECURE_FRAME_DENY = env.bool("DJANGO_SECURE_FRAME_DENY", default=True)
SECURE_CONTENT_TYPE_NOSNIFF = env.bool("DJANGO_SECURE_CONTENT_TYPE_NOSNIFF", default=True)
SECURE_BROWSER_XSS_FILTER = True
SESSION_COOKIE_SECURE = False
SESSION_COOKIE_HTTPONLY = True
SECURE_SSL_REDIRECT = env.bool("DJANGO_SECURE_SSL_REDIRECT", default=True)
# SITE CONFIGURATION
# ------------------------------------------------------------------------------
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ["*"]
# END SITE CONFIGURATION
# EMAIL
# ------------------------------------------------------------------------------
DEFAULT_FROM_EMAIL = env('DJANGO_DEFAULT_FROM_EMAIL',
default='{{cookiecutter.project_name}} <noreply@{{cookiecutter.domain_name}}>')
EMAIL_BACKEND = 'django_mailgun.MailgunBackend'
MAILGUN_ACCESS_KEY = env('DJANGO_MAILGUN_API_KEY')
MAILGUN_SERVER_NAME = env('DJANGO_MAILGUN_SERVER_NAME')
EMAIL_SUBJECT_PREFIX = env("DJANGO_EMAIL_SUBJECT_PREFIX", default='[{{cookiecutter.project_name}}] ')
SERVER_EMAIL = env('DJANGO_SERVER_EMAIL', default=DEFAULT_FROM_EMAIL)
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/templates/api/#django.template.loaders.cached.Loader
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]),
]
# CACHE CONFIGURATION
# ------------------------------------------------------------------------------
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': [
'redis:6379',
],
'OPTIONS': {
'DB': 1,
'PARSER_CLASS': 'redis.connection.HiredisParser',
'CONNECTION_POOL_CLASS': 'redis.BlockingConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 50,
'timeout': 20,
},
'MAX_CONNECTIONS': 1000,
'PICKLE_VERSION': -1,
},
},
}
# ASSET CONFIGURATION
# ------------------------------------------------------------------------------
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
STATIC_ROOT = '/static'
MEDIA_ROOT = '/media'
STATICFILES_DIRS = (
unicode(APPS_DIR.path("static")),
)
{% if cookiecutter.use_celery %}
# CELERY BROKER CONFIGURATION
# ------------------------------------------------------------------------------
BROKER_URL = "amqp://guest:guest@rabbitmq:5672//"
{% endif %}
{% if cookiecutter.use_sentry %}
# SENTRY CONFIGURATION
# ------------------------------------------------------------------------------
RAVEN_CONFIG = {
'dsn': env("SENTRY_URL"),
}
INSTALLED_APPS = INSTALLED_APPS + (
'raven.contrib.django.raven_compat',
)
{% endif %}
# Your production stuff: Below this line define 3rd party library settings
|
Java
|
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.sync;
import android.test.FlakyTest;
import android.test.suitebuilder.annotation.LargeTest;
import android.util.Pair;
import org.chromium.base.ThreadUtils;
import org.chromium.base.test.util.Feature;
import org.chromium.chrome.browser.ChromeApplication;
import org.chromium.chrome.browser.tabmodel.TabModelSelector;
import org.chromium.chrome.browser.tabmodel.TabModelUtils;
import org.chromium.chrome.browser.util.FeatureUtilities;
import org.chromium.chrome.test.util.browser.sync.SyncTestUtil;
import org.chromium.content.browser.test.util.Criteria;
import org.chromium.sync.protocol.EntitySpecifics;
import org.chromium.sync.protocol.SessionHeader;
import org.chromium.sync.protocol.SessionSpecifics;
import org.chromium.sync.protocol.SessionTab;
import org.chromium.sync.protocol.SessionWindow;
import org.chromium.sync.protocol.SyncEnums;
import org.chromium.sync.protocol.TabNavigation;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
/**
* Test suite for the open tabs (sessions) sync data type.
*/
public class OpenTabsTest extends SyncTestBase {
private static final String TAG = "OpenTabsTest";
private static final String OPEN_TABS_TYPE = "Sessions";
// EmbeddedTestServer is preferred here but it can't be used. The test server
// serves pages on localhost and Chrome doesn't sync localhost URLs as typed URLs.
// This type of URL requires no external data connection or resources.
private static final String URL = "data:text,OpenTabsTestURL";
private static final String URL2 = "data:text,OpenTabsTestURL2";
private static final String URL3 = "data:text,OpenTabsTestURL3";
private static final String SESSION_TAG_PREFIX = "FakeSessionTag";
private static final String FAKE_CLIENT = "FakeClient";
// The client name for tabs generated locally will vary based on the device the test is
// running on, so it is determined once in the setUp() method and cached here.
private String mClientName;
// A counter used for generating unique session tags. Resets to 0 in setUp().
private int mSessionTagCounter;
// A container to store OpenTabs information for data verification.
private static class OpenTabs {
public final String headerId;
public final List<String> tabIds;
public final List<String> urls;
private OpenTabs(String headerId, List<String> tabIds, List<String> urls) {
this.headerId = headerId;
this.tabIds = tabIds;
this.urls = urls;
}
}
@Override
protected void setUp() throws Exception {
super.setUp();
setUpTestAccountAndSignInToSync();
mClientName = getClientName();
mSessionTagCounter = 0;
}
// Test syncing an open tab from client to server.
@LargeTest
@Feature({"Sync"})
public void testUploadOpenTab() throws Exception {
loadUrl(URL);
waitForLocalTabsForClient(mClientName, URL);
waitForServerTabs(URL);
}
/*
// Test syncing multiple open tabs from client to server.
@LargeTest
@Feature({"Sync"})
https://crbug.com/592437
*/
@FlakyTest
public void testUploadMultipleOpenTabs() throws Exception {
loadUrl(URL);
loadUrlInNewTab(URL2);
loadUrlInNewTab(URL3);
waitForLocalTabsForClient(mClientName, URL, URL2, URL3);
waitForServerTabs(URL, URL2, URL3);
}
/*
// Test syncing an open tab from client to server.
@LargeTest
@Feature({"Sync"})
https://crbug.com/592437
*/
@FlakyTest
public void testUploadAndCloseOpenTab() throws Exception {
loadUrl(URL);
// Can't have zero tabs, so we have to open two to test closing one.
loadUrlInNewTab(URL2);
waitForLocalTabsForClient(mClientName, URL, URL2);
waitForServerTabs(URL, URL2);
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
TabModelSelector selector = FeatureUtilities.isDocumentMode(getActivity())
? ChromeApplication.getDocumentTabModelSelector()
: getActivity().getTabModelSelector();
assertTrue(TabModelUtils.closeCurrentTab(selector.getCurrentModel()));
}
});
waitForLocalTabsForClient(mClientName, URL);
waitForServerTabs(URL);
}
// Test syncing an open tab from server to client.
@LargeTest
@Feature({"Sync"})
public void testDownloadOpenTab() throws Exception {
addFakeServerTabs(FAKE_CLIENT, URL);
SyncTestUtil.triggerSync();
waitForLocalTabsForClient(FAKE_CLIENT, URL);
}
// Test syncing multiple open tabs from server to client.
@LargeTest
@Feature({"Sync"})
public void testDownloadMultipleOpenTabs() throws Exception {
addFakeServerTabs(FAKE_CLIENT, URL, URL2, URL3);
SyncTestUtil.triggerSync();
waitForLocalTabsForClient(FAKE_CLIENT, URL, URL2, URL3);
}
// Test syncing a tab deletion from server to client.
@LargeTest
@Feature({"Sync"})
public void testDownloadDeletedOpenTab() throws Exception {
// Add the entity to test deleting.
addFakeServerTabs(FAKE_CLIENT, URL);
SyncTestUtil.triggerSync();
waitForLocalTabsForClient(FAKE_CLIENT, URL);
// Delete on server, sync, and verify deleted locally.
deleteServerTabsForClient(FAKE_CLIENT);
SyncTestUtil.triggerSync();
waitForLocalTabsForClient(FAKE_CLIENT);
}
// Test syncing multiple tab deletions from server to client.
@LargeTest
@Feature({"Sync"})
public void testDownloadMultipleDeletedOpenTabs() throws Exception {
// Add the entity to test deleting.
addFakeServerTabs(FAKE_CLIENT, URL, URL2, URL3);
SyncTestUtil.triggerSync();
waitForLocalTabsForClient(FAKE_CLIENT, URL, URL2, URL3);
// Delete on server, sync, and verify deleted locally.
deleteServerTabsForClient(FAKE_CLIENT);
SyncTestUtil.triggerSync();
waitForLocalTabsForClient(FAKE_CLIENT);
}
private String makeSessionTag() {
return SESSION_TAG_PREFIX + (mSessionTagCounter++);
}
private void addFakeServerTabs(String clientName, String... urls)
throws InterruptedException {
String tag = makeSessionTag();
EntitySpecifics header = makeSessionEntity(tag, clientName, urls.length);
mFakeServerHelper.injectUniqueClientEntity(tag, header);
for (int i = 0; i < urls.length; i++) {
EntitySpecifics tab = makeTabEntity(tag, urls[i], i);
// It is critical that the name here is "<tag> <tabNodeId>", otherwise sync crashes
// when it tries to sync due to the use of TabIdToTag in sessions_sync_manager.cc.
mFakeServerHelper.injectUniqueClientEntity(tag + " " + i, tab);
}
}
private EntitySpecifics makeSessionEntity(String tag, String clientName, int numTabs) {
EntitySpecifics specifics = new EntitySpecifics();
specifics.session = new SessionSpecifics();
specifics.session.sessionTag = tag;
specifics.session.header = new SessionHeader();
specifics.session.header.clientName = clientName;
specifics.session.header.deviceType = SyncEnums.TYPE_PHONE;
SessionWindow window = new SessionWindow();
window.windowId = 0;
window.selectedTabIndex = 0;
window.tab = new int[numTabs];
for (int i = 0; i < numTabs; i++) {
window.tab[i] = i;
}
specifics.session.header.window = new SessionWindow[] { window };
return specifics;
}
private EntitySpecifics makeTabEntity(String tag, String url, int id) {
EntitySpecifics specifics = new EntitySpecifics();
specifics.session = new SessionSpecifics();
specifics.session.sessionTag = tag;
specifics.session.tabNodeId = id;
SessionTab tab = new SessionTab();
tab.tabId = id;
tab.currentNavigationIndex = 0;
TabNavigation nav = new TabNavigation();
nav.virtualUrl = url;
tab.navigation = new TabNavigation[] { nav };
specifics.session.tab = tab;
return specifics;
}
private void deleteServerTabsForClient(String clientName) throws JSONException {
OpenTabs openTabs = getLocalTabsForClient(clientName);
mFakeServerHelper.deleteEntity(openTabs.headerId);
for (String tabId : openTabs.tabIds) {
mFakeServerHelper.deleteEntity(tabId);
}
}
private void waitForLocalTabsForClient(final String clientName, String... urls)
throws InterruptedException {
final List<String> urlList = new ArrayList<String>(urls.length);
for (String url : urls) urlList.add(url);
pollInstrumentationThread(Criteria.equals(urlList, new Callable<List<String>>() {
@Override
public List<String> call() throws Exception {
return getLocalTabsForClient(clientName).urls;
}
}));
}
private void waitForServerTabs(final String... urls)
throws InterruptedException {
pollInstrumentationThread(
new Criteria("Expected server open tabs: " + Arrays.toString(urls)) {
@Override
public boolean isSatisfied() {
try {
return mFakeServerHelper.verifySessions(urls);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
});
}
private String getClientName() throws Exception {
pollInstrumentationThread(Criteria.equals(2, new Callable<Integer>() {
@Override
public Integer call() throws Exception {
return SyncTestUtil.getLocalData(mContext, OPEN_TABS_TYPE).size();
}
}));
List<Pair<String, JSONObject>> tabEntities = SyncTestUtil.getLocalData(
mContext, OPEN_TABS_TYPE);
for (Pair<String, JSONObject> tabEntity : tabEntities) {
if (tabEntity.second.has("header")) {
return tabEntity.second.getJSONObject("header").getString("client_name");
}
}
throw new IllegalStateException("No client name found.");
}
private static class HeaderInfo {
public final String sessionTag;
public final String headerId;
public final List<String> tabIds;
public HeaderInfo(String sessionTag, String headerId, List<String> tabIds) {
this.sessionTag = sessionTag;
this.headerId = headerId;
this.tabIds = tabIds;
}
}
// Distills the local session data into a simple data object for the given client.
private OpenTabs getLocalTabsForClient(String clientName) throws JSONException {
List<Pair<String, JSONObject>> tabEntities = SyncTestUtil.getLocalData(
mContext, OPEN_TABS_TYPE);
// Output lists.
List<String> urls = new ArrayList<String>();
List<String> tabEntityIds = new ArrayList<String>();
HeaderInfo info = findHeaderInfoForClient(clientName, tabEntities);
if (info.sessionTag == null) {
// No client was found. Here we still want to return an empty list of urls.
return new OpenTabs("", tabEntityIds, urls);
}
Map<String, String> tabIdsToUrls = new HashMap<String, String>();
Map<String, String> tabIdsToEntityIds = new HashMap<String, String>();
findTabMappings(info.sessionTag, tabEntities, tabIdsToUrls, tabIdsToEntityIds);
// Convert the tabId list to the url list.
for (String tabId : info.tabIds) {
urls.add(tabIdsToUrls.get(tabId));
tabEntityIds.add(tabIdsToEntityIds.get(tabId));
}
return new OpenTabs(info.headerId, tabEntityIds, urls);
}
// Find the header entity for clientName and extract its sessionTag and tabId list.
private HeaderInfo findHeaderInfoForClient(
String clientName, List<Pair<String, JSONObject>> tabEntities) throws JSONException {
String sessionTag = null;
String headerId = null;
List<String> tabIds = new ArrayList<String>();
for (Pair<String, JSONObject> tabEntity : tabEntities) {
JSONObject header = tabEntity.second.optJSONObject("header");
if (header != null && header.getString("client_name").equals(clientName)) {
sessionTag = tabEntity.second.getString("session_tag");
headerId = tabEntity.first;
JSONArray windows = header.getJSONArray("window");
if (windows.length() == 0) {
// The client was found but there are no tabs.
break;
}
assertEquals("Only single windows are supported.", 1, windows.length());
JSONArray tabs = windows.getJSONObject(0).getJSONArray("tab");
for (int i = 0; i < tabs.length(); i++) {
tabIds.add(tabs.getString(i));
}
break;
}
}
return new HeaderInfo(sessionTag, headerId, tabIds);
}
// Find the associated tabs and record their tabId -> url and entityId mappings.
private void findTabMappings(String sessionTag, List<Pair<String, JSONObject>> tabEntities,
// Populating these maps is the output of this function.
Map<String, String> tabIdsToUrls, Map<String, String> tabIdsToEntityIds)
throws JSONException {
for (Pair<String, JSONObject> tabEntity : tabEntities) {
JSONObject json = tabEntity.second;
if (json.has("tab") && json.getString("session_tag").equals(sessionTag)) {
JSONObject tab = json.getJSONObject("tab");
int i = tab.getInt("current_navigation_index");
String tabId = tab.getString("tab_id");
String url = tab.getJSONArray("navigation")
.getJSONObject(i).getString("virtual_url");
tabIdsToUrls.put(tabId, url);
tabIdsToEntityIds.put(tabId, tabEntity.first);
}
}
}
}
|
Java
|
/*
* Copyright 2016 Facebook, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package io.reactivesocket.tck
import java.io.{File, PrintWriter}
class RequesterDSL {
val filename = this.getClass.getSimpleName.reverse.substring(1).reverse + ".txt"
if (!filename.equals("RequesterReflection.txt")) println("writing to " + filename)
var writer: PrintWriter = new PrintWriter(new File(filename))
def requestResponse(data: String, metadata: String) : DSLTestSubscriber =
new DSLTestSubscriber(writer, data, metadata, "rr")
def requestStream(data: String, metadata: String) : DSLTestSubscriber =
new DSLTestSubscriber(writer, data, metadata, "rs")
def firenForget(data: String, metadata: String) : DSLTestSubscriber =
new DSLTestSubscriber(writer, data, metadata, "fnf")
def requestSubscription(data: String, metadata: String) : DSLTestSubscriber =
new DSLTestSubscriber(writer, data, metadata, "sub")
def end() : Unit = {
writer.write("EOF\n")
writer.close()
}
def begintest() : Unit = {
writer.write("!\n")
}
def nametest(name: String) : Unit = writer.write("name%%" + name + "\n")
trait ChannelHandler {
def using(data: String, meta: String) : ChannelHandler
def asFollows(f: () => Unit): Unit
}
object requestChannel extends ChannelHandler {
override def using(data: String, meta: String) : ChannelHandler = {
writer.write("channel%%" + data + "%%" + meta + "%%")
this
}
override def asFollows(f: () => Unit) = {
writer.write("{\n")
f()
writer.write("}\n")
}
}
object createEchoChannel {
def using(data: String, meta: String) : Unit = writer.write("echochannel%%" + data + "%%" + meta + "\n")
}
def channelSubscriber() : DSLTestSubscriber = {
// we create a trivial subscriber because we don't need a "real" one, because we will already pass in a test
// subscriber in the driver, as one should have already been created to get the initial payload from the client
return new DSLTestSubscriber(writer, "", "", "");
}
def respond(marble : String) : Unit = {
writer.write("respond%%" + marble + "\n")
}
def pass() : Unit = writer.write("pass\n")
def fail() : Unit = writer.write("fail\n")
}
|
Java
|
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/password_manager/password_store_mac.h"
#include "base/basictypes.h"
#include "base/files/scoped_temp_dir.h"
#include "base/scoped_observer.h"
#include "base/stl_util.h"
#include "base/strings/string_util.h"
#include "base/strings/utf_string_conversions.h"
#include "base/synchronization/waitable_event.h"
#include "base/test/histogram_tester.h"
#include "base/thread_task_runner_handle.h"
#include "chrome/browser/password_manager/password_store_mac_internal.h"
#include "chrome/common/chrome_paths.h"
#include "components/os_crypt/os_crypt.h"
#include "components/password_manager/core/browser/login_database.h"
#include "components/password_manager/core/browser/password_manager_test_utils.h"
#include "components/password_manager/core/browser/password_store_consumer.h"
#include "content/public/test/test_browser_thread.h"
#include "content/public/test/test_utils.h"
#include "crypto/mock_apple_keychain.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
using autofill::PasswordForm;
using base::ASCIIToUTF16;
using base::WideToUTF16;
using content::BrowserThread;
using crypto::MockAppleKeychain;
using internal_keychain_helpers::FormsMatchForMerge;
using internal_keychain_helpers::STRICT_FORM_MATCH;
using password_manager::CreatePasswordFormFromDataForTesting;
using password_manager::LoginDatabase;
using password_manager::PasswordFormData;
using password_manager::PasswordStore;
using password_manager::PasswordStoreChange;
using password_manager::PasswordStoreChangeList;
using password_manager::PasswordStoreConsumer;
using testing::_;
using testing::DoAll;
using testing::Invoke;
using testing::IsEmpty;
using testing::SizeIs;
using testing::WithArg;
namespace {
ACTION(QuitUIMessageLoop) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
base::MessageLoop::current()->Quit();
}
// From the mock's argument #0 of type const std::vector<PasswordForm*>& takes
// the first form and copies it to the form pointed to by |target_form_ptr|.
ACTION_P(SaveACopyOfFirstForm, target_form_ptr) {
ASSERT_FALSE(arg0.empty());
*target_form_ptr = *arg0[0];
}
void Noop() {
}
class MockPasswordStoreConsumer : public PasswordStoreConsumer {
public:
MOCK_METHOD1(OnGetPasswordStoreResultsConstRef,
void(const std::vector<PasswordForm*>&));
// GMock cannot mock methods with move-only args.
void OnGetPasswordStoreResults(ScopedVector<PasswordForm> results) override {
OnGetPasswordStoreResultsConstRef(results.get());
}
};
class MockPasswordStoreObserver : public PasswordStore::Observer {
public:
MOCK_METHOD1(OnLoginsChanged,
void(const password_manager::PasswordStoreChangeList& changes));
};
// A LoginDatabase that simulates an Init() method that takes a long time.
class SlowToInitLoginDatabase : public password_manager::LoginDatabase {
public:
// Creates an instance whose Init() method will block until |event| is
// signaled. |event| must outlive |this|.
SlowToInitLoginDatabase(const base::FilePath& db_path,
base::WaitableEvent* event)
: password_manager::LoginDatabase(db_path), event_(event) {}
~SlowToInitLoginDatabase() override {}
// LoginDatabase:
bool Init() override {
event_->Wait();
return password_manager::LoginDatabase::Init();
}
private:
base::WaitableEvent* event_;
DISALLOW_COPY_AND_ASSIGN(SlowToInitLoginDatabase);
};
#pragma mark -
// Macro to simplify calling CheckFormsAgainstExpectations with a useful label.
#define CHECK_FORMS(forms, expectations, i) \
CheckFormsAgainstExpectations(forms, expectations, #forms, i)
// Ensures that the data in |forms| match |expectations|, causing test failures
// for any discrepencies.
// TODO(stuartmorgan): This is current order-dependent; ideally it shouldn't
// matter if |forms| and |expectations| are scrambled.
void CheckFormsAgainstExpectations(
const std::vector<PasswordForm*>& forms,
const std::vector<PasswordFormData*>& expectations,
const char* forms_label, unsigned int test_number) {
EXPECT_EQ(expectations.size(), forms.size()) << forms_label << " in test "
<< test_number;
if (expectations.size() != forms.size())
return;
for (unsigned int i = 0; i < expectations.size(); ++i) {
SCOPED_TRACE(testing::Message() << forms_label << " in test " << test_number
<< ", item " << i);
PasswordForm* form = forms[i];
PasswordFormData* expectation = expectations[i];
EXPECT_EQ(expectation->scheme, form->scheme);
EXPECT_EQ(std::string(expectation->signon_realm), form->signon_realm);
EXPECT_EQ(GURL(expectation->origin), form->origin);
EXPECT_EQ(GURL(expectation->action), form->action);
EXPECT_EQ(WideToUTF16(expectation->submit_element), form->submit_element);
EXPECT_EQ(WideToUTF16(expectation->username_element),
form->username_element);
EXPECT_EQ(WideToUTF16(expectation->password_element),
form->password_element);
if (expectation->username_value) {
EXPECT_EQ(WideToUTF16(expectation->username_value), form->username_value);
EXPECT_EQ(WideToUTF16(expectation->username_value), form->display_name);
EXPECT_TRUE(form->skip_zero_click);
if (expectation->password_value &&
wcscmp(expectation->password_value,
password_manager::kTestingFederatedLoginMarker) == 0) {
EXPECT_TRUE(form->password_value.empty());
EXPECT_EQ(GURL(password_manager::kTestingFederationUrlSpec),
form->federation_url);
} else {
EXPECT_EQ(WideToUTF16(expectation->password_value),
form->password_value);
EXPECT_TRUE(form->federation_url.is_empty());
}
} else {
EXPECT_TRUE(form->blacklisted_by_user);
}
EXPECT_EQ(expectation->preferred, form->preferred);
EXPECT_EQ(expectation->ssl_valid, form->ssl_valid);
EXPECT_DOUBLE_EQ(expectation->creation_time,
form->date_created.ToDoubleT());
base::Time created = base::Time::FromDoubleT(expectation->creation_time);
EXPECT_EQ(
created + base::TimeDelta::FromDays(
password_manager::kTestingDaysAfterPasswordsAreSynced),
form->date_synced);
EXPECT_EQ(GURL(password_manager::kTestingIconUrlSpec), form->icon_url);
}
}
PasswordStoreChangeList AddChangeForForm(const PasswordForm& form) {
return PasswordStoreChangeList(
1, PasswordStoreChange(PasswordStoreChange::ADD, form));
}
} // namespace
#pragma mark -
class PasswordStoreMacInternalsTest : public testing::Test {
public:
void SetUp() override {
MockAppleKeychain::KeychainTestData test_data[] = {
// Basic HTML form.
{kSecAuthenticationTypeHTMLForm,
"some.domain.com",
kSecProtocolTypeHTTP,
NULL,
0,
NULL,
"20020601171500Z",
"joe_user",
"sekrit",
false},
// HTML form with path.
{kSecAuthenticationTypeHTMLForm,
"some.domain.com",
kSecProtocolTypeHTTP,
"/insecure.html",
0,
NULL,
"19991231235959Z",
"joe_user",
"sekrit",
false},
// Secure HTML form with path.
{kSecAuthenticationTypeHTMLForm,
"some.domain.com",
kSecProtocolTypeHTTPS,
"/secure.html",
0,
NULL,
"20100908070605Z",
"secure_user",
"password",
false},
// True negative item.
{kSecAuthenticationTypeHTMLForm,
"dont.remember.com",
kSecProtocolTypeHTTP,
NULL,
0,
NULL,
"20000101000000Z",
"",
"",
true},
// De-facto negative item, type one.
{kSecAuthenticationTypeHTMLForm,
"dont.remember.com",
kSecProtocolTypeHTTP,
NULL,
0,
NULL,
"20000101000000Z",
"Password Not Stored",
"",
false},
// De-facto negative item, type two.
{kSecAuthenticationTypeHTMLForm,
"dont.remember.com",
kSecProtocolTypeHTTPS,
NULL,
0,
NULL,
"20000101000000Z",
"Password Not Stored",
" ",
false},
// HTTP auth basic, with port and path.
{kSecAuthenticationTypeHTTPBasic,
"some.domain.com",
kSecProtocolTypeHTTP,
"/insecure.html",
4567,
"low_security",
"19980330100000Z",
"basic_auth_user",
"basic",
false},
// HTTP auth digest, secure.
{kSecAuthenticationTypeHTTPDigest,
"some.domain.com",
kSecProtocolTypeHTTPS,
NULL,
0,
"high_security",
"19980330100000Z",
"digest_auth_user",
"digest",
false},
// An FTP password with an invalid date, for edge-case testing.
{kSecAuthenticationTypeDefault,
"a.server.com",
kSecProtocolTypeFTP,
NULL,
0,
NULL,
"20010203040",
"abc",
"123",
false},
// Password for an Android application.
{kSecAuthenticationTypeHTMLForm,
"android://hash@com.domain.some/",
kSecProtocolTypeHTTPS,
"",
0,
NULL,
"20150515141312Z",
"joe_user",
"secret",
false},
};
keychain_ = new MockAppleKeychain();
for (unsigned int i = 0; i < arraysize(test_data); ++i) {
keychain_->AddTestItem(test_data[i]);
}
}
void TearDown() override {
ExpectCreatesAndFreesBalanced();
ExpectCreatorCodesSet();
delete keychain_;
}
protected:
// Causes a test failure unless everything returned from keychain_'s
// ItemCopyAttributesAndData, SearchCreateFromAttributes, and SearchCopyNext
// was correctly freed.
void ExpectCreatesAndFreesBalanced() {
EXPECT_EQ(0, keychain_->UnfreedSearchCount());
EXPECT_EQ(0, keychain_->UnfreedKeychainItemCount());
EXPECT_EQ(0, keychain_->UnfreedAttributeDataCount());
}
// Causes a test failure unless any Keychain items added during the test have
// their creator code set.
void ExpectCreatorCodesSet() {
EXPECT_TRUE(keychain_->CreatorCodesSetForAddedItems());
}
MockAppleKeychain* keychain_;
};
#pragma mark -
TEST_F(PasswordStoreMacInternalsTest, TestKeychainToFormTranslation) {
typedef struct {
const PasswordForm::Scheme scheme;
const char* signon_realm;
const char* origin;
const wchar_t* username; // Set to NULL to check for a blacklist entry.
const wchar_t* password;
const bool ssl_valid;
const int creation_year;
const int creation_month;
const int creation_day;
const int creation_hour;
const int creation_minute;
const int creation_second;
} TestExpectations;
TestExpectations expected[] = {
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/", L"joe_user", L"sekrit", false,
2002, 6, 1, 17, 15, 0 },
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/insecure.html", L"joe_user", L"sekrit", false,
1999, 12, 31, 23, 59, 59 },
{ PasswordForm::SCHEME_HTML, "https://some.domain.com/",
"https://some.domain.com/secure.html", L"secure_user", L"password", true,
2010, 9, 8, 7, 6, 5 },
{ PasswordForm::SCHEME_HTML, "http://dont.remember.com/",
"http://dont.remember.com/", NULL, NULL, false,
2000, 1, 1, 0, 0, 0 },
{ PasswordForm::SCHEME_HTML, "http://dont.remember.com/",
"http://dont.remember.com/", NULL, NULL, false,
2000, 1, 1, 0, 0, 0 },
{ PasswordForm::SCHEME_HTML, "https://dont.remember.com/",
"https://dont.remember.com/", NULL, NULL, true,
2000, 1, 1, 0, 0, 0 },
{ PasswordForm::SCHEME_BASIC, "http://some.domain.com:4567/low_security",
"http://some.domain.com:4567/insecure.html", L"basic_auth_user", L"basic",
false, 1998, 03, 30, 10, 00, 00 },
{ PasswordForm::SCHEME_DIGEST, "https://some.domain.com/high_security",
"https://some.domain.com/", L"digest_auth_user", L"digest", true,
1998, 3, 30, 10, 0, 0 },
// This one gives us an invalid date, which we will treat as a "NULL" date
// which is 1601.
{ PasswordForm::SCHEME_OTHER, "http://a.server.com/",
"http://a.server.com/", L"abc", L"123", false,
1601, 1, 1, 0, 0, 0 },
{ PasswordForm::SCHEME_HTML, "android://hash@com.domain.some/",
"", L"joe_user", L"secret", true,
2015, 5, 15, 14, 13, 12 },
};
for (unsigned int i = 0; i < arraysize(expected); ++i) {
// Create our fake KeychainItemRef; see MockAppleKeychain docs.
SecKeychainItemRef keychain_item =
reinterpret_cast<SecKeychainItemRef>(i + 1);
PasswordForm form;
bool parsed = internal_keychain_helpers::FillPasswordFormFromKeychainItem(
*keychain_, keychain_item, &form, true);
EXPECT_TRUE(parsed) << "In iteration " << i;
EXPECT_EQ(expected[i].scheme, form.scheme) << "In iteration " << i;
EXPECT_EQ(GURL(expected[i].origin), form.origin) << "In iteration " << i;
EXPECT_EQ(expected[i].ssl_valid, form.ssl_valid) << "In iteration " << i;
EXPECT_EQ(std::string(expected[i].signon_realm), form.signon_realm)
<< "In iteration " << i;
if (expected[i].username) {
EXPECT_EQ(WideToUTF16(expected[i].username), form.username_value)
<< "In iteration " << i;
EXPECT_EQ(WideToUTF16(expected[i].password), form.password_value)
<< "In iteration " << i;
EXPECT_FALSE(form.blacklisted_by_user) << "In iteration " << i;
} else {
EXPECT_TRUE(form.blacklisted_by_user) << "In iteration " << i;
}
base::Time::Exploded exploded_time;
form.date_created.UTCExplode(&exploded_time);
EXPECT_EQ(expected[i].creation_year, exploded_time.year)
<< "In iteration " << i;
EXPECT_EQ(expected[i].creation_month, exploded_time.month)
<< "In iteration " << i;
EXPECT_EQ(expected[i].creation_day, exploded_time.day_of_month)
<< "In iteration " << i;
EXPECT_EQ(expected[i].creation_hour, exploded_time.hour)
<< "In iteration " << i;
EXPECT_EQ(expected[i].creation_minute, exploded_time.minute)
<< "In iteration " << i;
EXPECT_EQ(expected[i].creation_second, exploded_time.second)
<< "In iteration " << i;
}
{
// Use an invalid ref, to make sure errors are reported.
SecKeychainItemRef keychain_item = reinterpret_cast<SecKeychainItemRef>(99);
PasswordForm form;
bool parsed = internal_keychain_helpers::FillPasswordFormFromKeychainItem(
*keychain_, keychain_item, &form, true);
EXPECT_FALSE(parsed);
}
}
TEST_F(PasswordStoreMacInternalsTest, TestKeychainSearch) {
struct TestDataAndExpectation {
const PasswordFormData data;
const size_t expected_fill_matches;
const size_t expected_merge_matches;
};
// Most fields are left blank because we don't care about them for searching.
/* clang-format off */
TestDataAndExpectation test_data[] = {
// An HTML form we've seen.
{ { PasswordForm::SCHEME_HTML, "http://some.domain.com/",
NULL, NULL, NULL, NULL, NULL, L"joe_user", NULL, false, false, 0 },
2, 2 },
{ { PasswordForm::SCHEME_HTML, "http://some.domain.com/",
NULL, NULL, NULL, NULL, NULL, L"wrong_user", NULL, false, false, 0 },
2, 0 },
// An HTML form we haven't seen
{ { PasswordForm::SCHEME_HTML, "http://www.unseendomain.com/",
NULL, NULL, NULL, NULL, NULL, L"joe_user", NULL, false, false, 0 },
0, 0 },
// Basic auth that should match.
{ { PasswordForm::SCHEME_BASIC, "http://some.domain.com:4567/low_security",
NULL, NULL, NULL, NULL, NULL, L"basic_auth_user", NULL, false, false,
0 },
1, 1 },
// Basic auth with the wrong port.
{ { PasswordForm::SCHEME_BASIC, "http://some.domain.com:1111/low_security",
NULL, NULL, NULL, NULL, NULL, L"basic_auth_user", NULL, false, false,
0 },
0, 0 },
// Digest auth we've saved under https, visited with http.
{ { PasswordForm::SCHEME_DIGEST, "http://some.domain.com/high_security",
NULL, NULL, NULL, NULL, NULL, L"digest_auth_user", NULL, false, false,
0 },
0, 0 },
// Digest auth that should match.
{ { PasswordForm::SCHEME_DIGEST, "https://some.domain.com/high_security",
NULL, NULL, NULL, NULL, NULL, L"wrong_user", NULL, false, true, 0 },
1, 0 },
// Digest auth with the wrong domain.
{ { PasswordForm::SCHEME_DIGEST, "https://some.domain.com/other_domain",
NULL, NULL, NULL, NULL, NULL, L"digest_auth_user", NULL, false, true,
0 },
0, 0 },
// Android credentials (both legacy ones with origin, and without).
{ { PasswordForm::SCHEME_HTML, "android://hash@com.domain.some/",
"android://hash@com.domain.some/", NULL, NULL, NULL, NULL, L"joe_user",
NULL, false, true, 0 },
1, 1 },
{ { PasswordForm::SCHEME_HTML, "android://hash@com.domain.some/",
NULL, NULL, NULL, NULL, NULL, L"joe_user", NULL, false, true, 0 },
1, 1 },
// Federated logins do not have a corresponding Keychain entry, and should
// not match the username/password stored for the same application. Note
// that it will match for filling, however, because that part does not know
// that it is a federated login.
{ { PasswordForm::SCHEME_HTML, "android://hash@com.domain.some/",
NULL, NULL, NULL, NULL, NULL, L"joe_user",
password_manager::kTestingFederatedLoginMarker, false, true, 0 },
1, 0 },
/// Garbage forms should have no matches.
{ { PasswordForm::SCHEME_HTML, "foo/bar/baz",
NULL, NULL, NULL, NULL, NULL, NULL, NULL, false, false, 0 }, 0, 0 },
};
/* clang-format on */
MacKeychainPasswordFormAdapter keychain_adapter(keychain_);
MacKeychainPasswordFormAdapter owned_keychain_adapter(keychain_);
owned_keychain_adapter.SetFindsOnlyOwnedItems(true);
for (unsigned int i = 0; i < arraysize(test_data); ++i) {
scoped_ptr<PasswordForm> query_form =
CreatePasswordFormFromDataForTesting(test_data[i].data);
// Check matches treating the form as a fill target.
ScopedVector<autofill::PasswordForm> matching_items =
keychain_adapter.PasswordsFillingForm(query_form->signon_realm,
query_form->scheme);
EXPECT_EQ(test_data[i].expected_fill_matches, matching_items.size());
// Check matches treating the form as a merging target.
EXPECT_EQ(test_data[i].expected_merge_matches > 0,
keychain_adapter.HasPasswordsMergeableWithForm(*query_form));
std::vector<SecKeychainItemRef> keychain_items;
std::vector<internal_keychain_helpers::ItemFormPair> item_form_pairs =
internal_keychain_helpers::
ExtractAllKeychainItemAttributesIntoPasswordForms(&keychain_items,
*keychain_);
matching_items =
internal_keychain_helpers::ExtractPasswordsMergeableWithForm(
*keychain_, item_form_pairs, *query_form);
EXPECT_EQ(test_data[i].expected_merge_matches, matching_items.size());
STLDeleteContainerPairSecondPointers(item_form_pairs.begin(),
item_form_pairs.end());
for (std::vector<SecKeychainItemRef>::iterator i = keychain_items.begin();
i != keychain_items.end(); ++i) {
keychain_->Free(*i);
}
// None of the pre-seeded items are owned by us, so none should match an
// owned-passwords-only search.
matching_items = owned_keychain_adapter.PasswordsFillingForm(
query_form->signon_realm, query_form->scheme);
EXPECT_EQ(0U, matching_items.size());
}
}
// Changes just the origin path of |form|.
static void SetPasswordFormPath(PasswordForm* form, const char* path) {
GURL::Replacements replacement;
std::string new_value(path);
replacement.SetPathStr(new_value);
form->origin = form->origin.ReplaceComponents(replacement);
}
// Changes just the signon_realm port of |form|.
static void SetPasswordFormPort(PasswordForm* form, const char* port) {
GURL::Replacements replacement;
std::string new_value(port);
replacement.SetPortStr(new_value);
GURL signon_gurl = GURL(form->signon_realm);
form->signon_realm = signon_gurl.ReplaceComponents(replacement).spec();
}
// Changes just the signon_ream auth realm of |form|.
static void SetPasswordFormRealm(PasswordForm* form, const char* realm) {
GURL::Replacements replacement;
std::string new_value(realm);
replacement.SetPathStr(new_value);
GURL signon_gurl = GURL(form->signon_realm);
form->signon_realm = signon_gurl.ReplaceComponents(replacement).spec();
}
TEST_F(PasswordStoreMacInternalsTest, TestKeychainExactSearch) {
MacKeychainPasswordFormAdapter keychain_adapter(keychain_);
PasswordFormData base_form_data[] = {
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/insecure.html",
NULL, NULL, NULL, NULL, L"joe_user", NULL, true, false, 0 },
{ PasswordForm::SCHEME_BASIC, "http://some.domain.com:4567/low_security",
"http://some.domain.com:4567/insecure.html",
NULL, NULL, NULL, NULL, L"basic_auth_user", NULL, true, false, 0 },
{ PasswordForm::SCHEME_DIGEST, "https://some.domain.com/high_security",
"https://some.domain.com",
NULL, NULL, NULL, NULL, L"digest_auth_user", NULL, true, true, 0 },
};
for (unsigned int i = 0; i < arraysize(base_form_data); ++i) {
// Create a base form and make sure we find a match.
scoped_ptr<PasswordForm> base_form =
CreatePasswordFormFromDataForTesting(base_form_data[i]);
EXPECT_TRUE(keychain_adapter.HasPasswordsMergeableWithForm(*base_form));
EXPECT_TRUE(keychain_adapter.HasPasswordExactlyMatchingForm(*base_form));
// Make sure that the matching isn't looser than it should be by checking
// that slightly altered forms don't match.
ScopedVector<autofill::PasswordForm> modified_forms;
modified_forms.push_back(new PasswordForm(*base_form));
modified_forms.back()->username_value = ASCIIToUTF16("wrong_user");
modified_forms.push_back(new PasswordForm(*base_form));
SetPasswordFormPath(modified_forms.back(), "elsewhere.html");
modified_forms.push_back(new PasswordForm(*base_form));
modified_forms.back()->scheme = PasswordForm::SCHEME_OTHER;
modified_forms.push_back(new PasswordForm(*base_form));
SetPasswordFormPort(modified_forms.back(), "1234");
modified_forms.push_back(new PasswordForm(*base_form));
modified_forms.back()->blacklisted_by_user = true;
if (base_form->scheme == PasswordForm::SCHEME_BASIC ||
base_form->scheme == PasswordForm::SCHEME_DIGEST) {
modified_forms.push_back(new PasswordForm(*base_form));
SetPasswordFormRealm(modified_forms.back(), "incorrect");
}
for (unsigned int j = 0; j < modified_forms.size(); ++j) {
bool match = keychain_adapter.HasPasswordExactlyMatchingForm(
*modified_forms[j]);
EXPECT_FALSE(match) << "In modified version " << j
<< " of base form " << i;
}
}
}
TEST_F(PasswordStoreMacInternalsTest, TestKeychainAdd) {
struct TestDataAndExpectation {
PasswordFormData data;
bool should_succeed;
};
/* clang-format off */
TestDataAndExpectation test_data[] = {
// Test a variety of scheme/port/protocol/path variations.
{ { PasswordForm::SCHEME_HTML, "http://web.site.com/",
"http://web.site.com/path/to/page.html", NULL, NULL, NULL, NULL,
L"anonymous", L"knock-knock", false, false, 0 }, true },
{ { PasswordForm::SCHEME_HTML, "https://web.site.com/",
"https://web.site.com/", NULL, NULL, NULL, NULL,
L"admin", L"p4ssw0rd", false, false, 0 }, true },
{ { PasswordForm::SCHEME_BASIC, "http://a.site.com:2222/therealm",
"http://a.site.com:2222/", NULL, NULL, NULL, NULL,
L"username", L"password", false, false, 0 }, true },
{ { PasswordForm::SCHEME_DIGEST, "https://digest.site.com/differentrealm",
"https://digest.site.com/secure.html", NULL, NULL, NULL, NULL,
L"testname", L"testpass", false, false, 0 }, true },
// Test that Android credentials can be stored. Also check the legacy form
// when |origin| was still filled with the Android URI (and not left empty).
{ { PasswordForm::SCHEME_HTML, "android://hash@com.example.alpha/",
"", NULL, NULL, NULL, NULL,
L"joe_user", L"password", false, true, 0 }, true },
{ { PasswordForm::SCHEME_HTML, "android://hash@com.example.beta/",
"android://hash@com.example.beta/", NULL, NULL, NULL, NULL,
L"jane_user", L"password2", false, true, 0 }, true },
// Make sure that garbage forms are rejected.
{ { PasswordForm::SCHEME_HTML, "gobbledygook",
"gobbledygook", NULL, NULL, NULL, NULL,
L"anonymous", L"knock-knock", false, false, 0 }, false },
// Test that failing to update a duplicate (forced using the magic failure
// password; see MockAppleKeychain::ItemModifyAttributesAndData) is
// reported.
{ { PasswordForm::SCHEME_HTML, "http://some.domain.com",
"http://some.domain.com/insecure.html", NULL, NULL, NULL, NULL,
L"joe_user", L"fail_me", false, false, 0 }, false },
};
/* clang-format on */
MacKeychainPasswordFormAdapter owned_keychain_adapter(keychain_);
owned_keychain_adapter.SetFindsOnlyOwnedItems(true);
for (unsigned int i = 0; i < arraysize(test_data); ++i) {
scoped_ptr<PasswordForm> in_form =
CreatePasswordFormFromDataForTesting(test_data[i].data);
bool add_succeeded = owned_keychain_adapter.AddPassword(*in_form);
EXPECT_EQ(test_data[i].should_succeed, add_succeeded);
if (add_succeeded) {
EXPECT_TRUE(owned_keychain_adapter.HasPasswordsMergeableWithForm(
*in_form));
EXPECT_TRUE(owned_keychain_adapter.HasPasswordExactlyMatchingForm(
*in_form));
}
}
// Test that adding duplicate item updates the existing item.
// TODO(engedy): Add a test to verify that updating Android credentials work.
// See: https://crbug.com/476851.
{
PasswordFormData data = {
PasswordForm::SCHEME_HTML, "http://some.domain.com",
"http://some.domain.com/insecure.html", NULL,
NULL, NULL, NULL, L"joe_user", L"updated_password", false, false, 0
};
scoped_ptr<PasswordForm> update_form =
CreatePasswordFormFromDataForTesting(data);
MacKeychainPasswordFormAdapter keychain_adapter(keychain_);
EXPECT_TRUE(keychain_adapter.AddPassword(*update_form));
SecKeychainItemRef keychain_item = reinterpret_cast<SecKeychainItemRef>(2);
PasswordForm stored_form;
internal_keychain_helpers::FillPasswordFormFromKeychainItem(*keychain_,
keychain_item,
&stored_form,
true);
EXPECT_EQ(update_form->password_value, stored_form.password_value);
}
}
TEST_F(PasswordStoreMacInternalsTest, TestKeychainRemove) {
struct TestDataAndExpectation {
PasswordFormData data;
bool should_succeed;
};
/* clang-format off */
TestDataAndExpectation test_data[] = {
// Test deletion of an item that we add.
{ { PasswordForm::SCHEME_HTML, "http://web.site.com/",
"http://web.site.com/path/to/page.html", NULL, NULL, NULL, NULL,
L"anonymous", L"knock-knock", false, false, 0 }, true },
// Test that Android credentials can be removed. Also check the legacy case
// when |origin| was still filled with the Android URI (and not left empty).
{ { PasswordForm::SCHEME_HTML, "android://hash@com.example.alpha/",
"", NULL, NULL, NULL, NULL,
L"joe_user", L"secret", false, true, 0 }, true },
{ { PasswordForm::SCHEME_HTML, "android://hash@com.example.beta/",
"android://hash@com.example.beta/", NULL, NULL, NULL, NULL,
L"jane_user", L"secret", false, true, 0 }, true },
// Make sure we don't delete items we don't own.
{ { PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/insecure.html", NULL, NULL, NULL, NULL,
L"joe_user", NULL, true, false, 0 }, false },
};
/* clang-format on */
MacKeychainPasswordFormAdapter owned_keychain_adapter(keychain_);
owned_keychain_adapter.SetFindsOnlyOwnedItems(true);
// Add our test items (except the last one) so that we can delete them.
for (unsigned int i = 0; i + 1 < arraysize(test_data); ++i) {
scoped_ptr<PasswordForm> add_form =
CreatePasswordFormFromDataForTesting(test_data[i].data);
EXPECT_TRUE(owned_keychain_adapter.AddPassword(*add_form));
}
for (unsigned int i = 0; i < arraysize(test_data); ++i) {
scoped_ptr<PasswordForm> form =
CreatePasswordFormFromDataForTesting(test_data[i].data);
EXPECT_EQ(test_data[i].should_succeed,
owned_keychain_adapter.RemovePassword(*form));
MacKeychainPasswordFormAdapter keychain_adapter(keychain_);
bool match = keychain_adapter.HasPasswordExactlyMatchingForm(*form);
EXPECT_EQ(test_data[i].should_succeed, !match);
}
}
TEST_F(PasswordStoreMacInternalsTest, TestFormMatch) {
PasswordForm base_form;
base_form.signon_realm = std::string("http://some.domain.com/");
base_form.origin = GURL("http://some.domain.com/page.html");
base_form.username_value = ASCIIToUTF16("joe_user");
{
// Check that everything unimportant can be changed.
PasswordForm different_form(base_form);
different_form.username_element = ASCIIToUTF16("username");
different_form.submit_element = ASCIIToUTF16("submit");
different_form.username_element = ASCIIToUTF16("password");
different_form.password_value = ASCIIToUTF16("sekrit");
different_form.action = GURL("http://some.domain.com/action.cgi");
different_form.ssl_valid = true;
different_form.preferred = true;
different_form.date_created = base::Time::Now();
EXPECT_TRUE(
FormsMatchForMerge(base_form, different_form, STRICT_FORM_MATCH));
// Check that path differences don't prevent a match.
base_form.origin = GURL("http://some.domain.com/other_page.html");
EXPECT_TRUE(
FormsMatchForMerge(base_form, different_form, STRICT_FORM_MATCH));
}
// Check that any one primary key changing is enough to prevent matching.
{
PasswordForm different_form(base_form);
different_form.scheme = PasswordForm::SCHEME_DIGEST;
EXPECT_FALSE(
FormsMatchForMerge(base_form, different_form, STRICT_FORM_MATCH));
}
{
PasswordForm different_form(base_form);
different_form.signon_realm = std::string("http://some.domain.com:8080/");
EXPECT_FALSE(
FormsMatchForMerge(base_form, different_form, STRICT_FORM_MATCH));
}
{
PasswordForm different_form(base_form);
different_form.username_value = ASCIIToUTF16("john.doe");
EXPECT_FALSE(
FormsMatchForMerge(base_form, different_form, STRICT_FORM_MATCH));
}
{
PasswordForm different_form(base_form);
different_form.blacklisted_by_user = true;
EXPECT_FALSE(
FormsMatchForMerge(base_form, different_form, STRICT_FORM_MATCH));
}
// Blacklist forms should *never* match for merging, even when identical
// (and certainly not when only one is a blacklist entry).
{
PasswordForm form_a(base_form);
form_a.blacklisted_by_user = true;
PasswordForm form_b(form_a);
EXPECT_FALSE(FormsMatchForMerge(form_a, form_b, STRICT_FORM_MATCH));
}
// Federated login forms should never match for merging either.
{
PasswordForm form_b(base_form);
form_b.federation_url = GURL(password_manager::kTestingFederationUrlSpec);
EXPECT_FALSE(FormsMatchForMerge(base_form, form_b, STRICT_FORM_MATCH));
EXPECT_FALSE(FormsMatchForMerge(form_b, base_form, STRICT_FORM_MATCH));
EXPECT_FALSE(FormsMatchForMerge(form_b, form_b, STRICT_FORM_MATCH));
}
}
TEST_F(PasswordStoreMacInternalsTest, TestFormMerge) {
// Set up a bunch of test data to use in varying combinations.
/* clang-format off */
PasswordFormData keychain_user_1 =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/", "", L"", L"", L"", L"joe_user", L"sekrit",
false, false, 1010101010 };
PasswordFormData keychain_user_1_with_path =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/page.html",
"", L"", L"", L"", L"joe_user", L"otherpassword",
false, false, 1010101010 };
PasswordFormData keychain_user_2 =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/", "", L"", L"", L"", L"john.doe", L"sesame",
false, false, 958739876 };
PasswordFormData keychain_blacklist =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/", "", L"", L"", L"", NULL, NULL,
false, false, 1010101010 };
PasswordFormData keychain_android =
{ PasswordForm::SCHEME_HTML, "android://hash@com.domain.some/",
"", "", L"", L"", L"", L"joe_user", L"secret",
false, true, 1234567890 };
PasswordFormData db_user_1 =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/", "http://some.domain.com/action.cgi",
L"submit", L"username", L"password", L"joe_user", L"",
true, false, 1212121212 };
PasswordFormData db_user_1_with_path =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/page.html",
"http://some.domain.com/handlepage.cgi",
L"submit", L"username", L"password", L"joe_user", L"",
true, false, 1234567890 };
PasswordFormData db_user_3_with_path =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/page.html",
"http://some.domain.com/handlepage.cgi",
L"submit", L"username", L"password", L"second-account", L"",
true, false, 1240000000 };
PasswordFormData database_blacklist_with_path =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/path.html", "http://some.domain.com/action.cgi",
L"submit", L"username", L"password", NULL, NULL,
true, false, 1212121212 };
PasswordFormData db_android =
{ PasswordForm::SCHEME_HTML, "android://hash@com.domain.some/",
"android://hash@com.domain.some/", "", L"", L"", L"", L"joe_user", L"",
false, true, 1234567890 };
PasswordFormData db_federated =
{ PasswordForm::SCHEME_HTML, "android://hash@com.domain.some/",
"android://hash@com.domain.some/", "", L"", L"", L"", L"joe_user",
password_manager::kTestingFederatedLoginMarker,
false, true, 3434343434 };
PasswordFormData merged_user_1 =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/", "http://some.domain.com/action.cgi",
L"submit", L"username", L"password", L"joe_user", L"sekrit",
true, false, 1212121212 };
PasswordFormData merged_user_1_with_db_path =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/page.html",
"http://some.domain.com/handlepage.cgi",
L"submit", L"username", L"password", L"joe_user", L"sekrit",
true, false, 1234567890 };
PasswordFormData merged_user_1_with_both_paths =
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/page.html",
"http://some.domain.com/handlepage.cgi",
L"submit", L"username", L"password", L"joe_user", L"otherpassword",
true, false, 1234567890 };
PasswordFormData merged_android =
{ PasswordForm::SCHEME_HTML, "android://hash@com.domain.some/",
"android://hash@com.domain.some/", "", L"", L"", L"", L"joe_user",
L"secret", false, true, 1234567890 };
/* clang-format on */
// Build up the big multi-dimensional array of data sets that will actually
// drive the test. Use vectors rather than arrays so that initialization is
// simple.
enum {
KEYCHAIN_INPUT = 0,
DATABASE_INPUT,
MERGE_OUTPUT,
KEYCHAIN_OUTPUT,
DATABASE_OUTPUT,
MERGE_IO_ARRAY_COUNT // termination marker
};
const unsigned int kTestCount = 5;
std::vector< std::vector< std::vector<PasswordFormData*> > > test_data(
MERGE_IO_ARRAY_COUNT, std::vector< std::vector<PasswordFormData*> >(
kTestCount, std::vector<PasswordFormData*>()));
unsigned int current_test = 0;
// Test a merge with a few accounts in both systems, with partial overlap.
CHECK(current_test < kTestCount);
test_data[KEYCHAIN_INPUT][current_test].push_back(&keychain_user_1);
test_data[KEYCHAIN_INPUT][current_test].push_back(&keychain_user_2);
test_data[DATABASE_INPUT][current_test].push_back(&db_user_1);
test_data[DATABASE_INPUT][current_test].push_back(&db_user_1_with_path);
test_data[DATABASE_INPUT][current_test].push_back(&db_user_3_with_path);
test_data[MERGE_OUTPUT][current_test].push_back(&merged_user_1);
test_data[MERGE_OUTPUT][current_test].push_back(&merged_user_1_with_db_path);
test_data[KEYCHAIN_OUTPUT][current_test].push_back(&keychain_user_2);
test_data[DATABASE_OUTPUT][current_test].push_back(&db_user_3_with_path);
// Test a merge where Chrome has a blacklist entry, and the keychain has
// a stored account.
++current_test;
CHECK(current_test < kTestCount);
test_data[KEYCHAIN_INPUT][current_test].push_back(&keychain_user_1);
test_data[DATABASE_INPUT][current_test].push_back(
&database_blacklist_with_path);
// We expect both to be present because a blacklist could be specific to a
// subpath, and we want access to the password on other paths.
test_data[MERGE_OUTPUT][current_test].push_back(
&database_blacklist_with_path);
test_data[KEYCHAIN_OUTPUT][current_test].push_back(&keychain_user_1);
// Test a merge where Chrome has an account, and Keychain has a blacklist
// (from another browser) and the Chrome password data.
++current_test;
CHECK(current_test < kTestCount);
test_data[KEYCHAIN_INPUT][current_test].push_back(&keychain_blacklist);
test_data[KEYCHAIN_INPUT][current_test].push_back(&keychain_user_1);
test_data[DATABASE_INPUT][current_test].push_back(&db_user_1);
test_data[MERGE_OUTPUT][current_test].push_back(&merged_user_1);
test_data[KEYCHAIN_OUTPUT][current_test].push_back(&keychain_blacklist);
// Test that matches are done using exact path when possible.
++current_test;
CHECK(current_test < kTestCount);
test_data[KEYCHAIN_INPUT][current_test].push_back(&keychain_user_1);
test_data[KEYCHAIN_INPUT][current_test].push_back(&keychain_user_1_with_path);
test_data[DATABASE_INPUT][current_test].push_back(&db_user_1);
test_data[DATABASE_INPUT][current_test].push_back(&db_user_1_with_path);
test_data[MERGE_OUTPUT][current_test].push_back(&merged_user_1);
test_data[MERGE_OUTPUT][current_test].push_back(
&merged_user_1_with_both_paths);
// Test that Android credentails are matched correctly and that federated
// credentials are not tried to be matched with a Keychain item.
++current_test;
CHECK(current_test < kTestCount);
test_data[KEYCHAIN_INPUT][current_test].push_back(&keychain_android);
test_data[DATABASE_INPUT][current_test].push_back(&db_federated);
test_data[DATABASE_INPUT][current_test].push_back(&db_android);
test_data[MERGE_OUTPUT][current_test].push_back(&db_federated);
test_data[MERGE_OUTPUT][current_test].push_back(&merged_android);
for (unsigned int test_case = 0; test_case <= current_test; ++test_case) {
ScopedVector<autofill::PasswordForm> keychain_forms;
for (std::vector<PasswordFormData*>::iterator i =
test_data[KEYCHAIN_INPUT][test_case].begin();
i != test_data[KEYCHAIN_INPUT][test_case].end(); ++i) {
keychain_forms.push_back(
CreatePasswordFormFromDataForTesting(*(*i)).release());
}
ScopedVector<autofill::PasswordForm> database_forms;
for (std::vector<PasswordFormData*>::iterator i =
test_data[DATABASE_INPUT][test_case].begin();
i != test_data[DATABASE_INPUT][test_case].end(); ++i) {
database_forms.push_back(
CreatePasswordFormFromDataForTesting(*(*i)).release());
}
ScopedVector<autofill::PasswordForm> merged_forms;
internal_keychain_helpers::MergePasswordForms(&keychain_forms,
&database_forms,
&merged_forms);
CHECK_FORMS(keychain_forms.get(), test_data[KEYCHAIN_OUTPUT][test_case],
test_case);
CHECK_FORMS(database_forms.get(), test_data[DATABASE_OUTPUT][test_case],
test_case);
CHECK_FORMS(merged_forms.get(), test_data[MERGE_OUTPUT][test_case],
test_case);
}
}
TEST_F(PasswordStoreMacInternalsTest, TestPasswordBulkLookup) {
PasswordFormData db_data[] = {
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/", "http://some.domain.com/action.cgi",
L"submit", L"username", L"password", L"joe_user", L"",
true, false, 1212121212 },
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/page.html",
"http://some.domain.com/handlepage.cgi",
L"submit", L"username", L"password", L"joe_user", L"",
true, false, 1234567890 },
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/page.html",
"http://some.domain.com/handlepage.cgi",
L"submit", L"username", L"password", L"second-account", L"",
true, false, 1240000000 },
{ PasswordForm::SCHEME_HTML, "http://dont.remember.com/",
"http://dont.remember.com/",
"http://dont.remember.com/handlepage.cgi",
L"submit", L"username", L"password", L"joe_user", L"",
true, false, 1240000000 },
{ PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/path.html", "http://some.domain.com/action.cgi",
L"submit", L"username", L"password", NULL, NULL,
true, false, 1212121212 },
};
ScopedVector<autofill::PasswordForm> database_forms;
for (unsigned int i = 0; i < arraysize(db_data); ++i) {
database_forms.push_back(
CreatePasswordFormFromDataForTesting(db_data[i]).release());
}
ScopedVector<autofill::PasswordForm> merged_forms;
internal_keychain_helpers::GetPasswordsForForms(*keychain_, &database_forms,
&merged_forms);
EXPECT_EQ(2U, database_forms.size());
ASSERT_EQ(3U, merged_forms.size());
EXPECT_EQ(ASCIIToUTF16("sekrit"), merged_forms[0]->password_value);
EXPECT_EQ(ASCIIToUTF16("sekrit"), merged_forms[1]->password_value);
EXPECT_TRUE(merged_forms[2]->blacklisted_by_user);
}
TEST_F(PasswordStoreMacInternalsTest, TestBlacklistedFiltering) {
PasswordFormData db_data[] = {
{ PasswordForm::SCHEME_HTML, "http://dont.remember.com/",
"http://dont.remember.com/",
"http://dont.remember.com/handlepage.cgi",
L"submit", L"username", L"password", L"joe_user", L"non_empty_password",
true, false, 1240000000 },
{ PasswordForm::SCHEME_HTML, "https://dont.remember.com/",
"https://dont.remember.com/",
"https://dont.remember.com/handlepage_secure.cgi",
L"submit", L"username", L"password", L"joe_user", L"non_empty_password",
true, false, 1240000000 },
};
ScopedVector<autofill::PasswordForm> database_forms;
for (unsigned int i = 0; i < arraysize(db_data); ++i) {
database_forms.push_back(
CreatePasswordFormFromDataForTesting(db_data[i]).release());
}
ScopedVector<autofill::PasswordForm> merged_forms;
internal_keychain_helpers::GetPasswordsForForms(*keychain_, &database_forms,
&merged_forms);
EXPECT_EQ(2U, database_forms.size());
ASSERT_EQ(0U, merged_forms.size());
}
TEST_F(PasswordStoreMacInternalsTest, TestFillPasswordFormFromKeychainItem) {
// When |extract_password_data| is false, the password field must be empty,
// and |blacklisted_by_user| must be false.
SecKeychainItemRef keychain_item = reinterpret_cast<SecKeychainItemRef>(1);
PasswordForm form_without_extracted_password;
bool parsed = internal_keychain_helpers::FillPasswordFormFromKeychainItem(
*keychain_,
keychain_item,
&form_without_extracted_password,
false); // Do not extract password.
EXPECT_TRUE(parsed);
ASSERT_TRUE(form_without_extracted_password.password_value.empty());
ASSERT_FALSE(form_without_extracted_password.blacklisted_by_user);
// When |extract_password_data| is true and the keychain entry has a non-empty
// password, the password field must be non-empty, and the value of
// |blacklisted_by_user| must be false.
keychain_item = reinterpret_cast<SecKeychainItemRef>(1);
PasswordForm form_with_extracted_password;
parsed = internal_keychain_helpers::FillPasswordFormFromKeychainItem(
*keychain_,
keychain_item,
&form_with_extracted_password,
true); // Extract password.
EXPECT_TRUE(parsed);
ASSERT_EQ(ASCIIToUTF16("sekrit"),
form_with_extracted_password.password_value);
ASSERT_FALSE(form_with_extracted_password.blacklisted_by_user);
// When |extract_password_data| is true and the keychain entry has an empty
// username and password (""), the password field must be empty, and the value
// of |blacklisted_by_user| must be true.
keychain_item = reinterpret_cast<SecKeychainItemRef>(4);
PasswordForm negative_form;
parsed = internal_keychain_helpers::FillPasswordFormFromKeychainItem(
*keychain_,
keychain_item,
&negative_form,
true); // Extract password.
EXPECT_TRUE(parsed);
ASSERT_TRUE(negative_form.username_value.empty());
ASSERT_TRUE(negative_form.password_value.empty());
ASSERT_TRUE(negative_form.blacklisted_by_user);
// When |extract_password_data| is true and the keychain entry has an empty
// password (""), the password field must be empty (""), and the value of
// |blacklisted_by_user| must be true.
keychain_item = reinterpret_cast<SecKeychainItemRef>(5);
PasswordForm form_with_empty_password_a;
parsed = internal_keychain_helpers::FillPasswordFormFromKeychainItem(
*keychain_,
keychain_item,
&form_with_empty_password_a,
true); // Extract password.
EXPECT_TRUE(parsed);
ASSERT_TRUE(form_with_empty_password_a.password_value.empty());
ASSERT_TRUE(form_with_empty_password_a.blacklisted_by_user);
// When |extract_password_data| is true and the keychain entry has a single
// space password (" "), the password field must be a single space (" "), and
// the value of |blacklisted_by_user| must be true.
keychain_item = reinterpret_cast<SecKeychainItemRef>(6);
PasswordForm form_with_empty_password_b;
parsed = internal_keychain_helpers::FillPasswordFormFromKeychainItem(
*keychain_,
keychain_item,
&form_with_empty_password_b,
true); // Extract password.
EXPECT_TRUE(parsed);
ASSERT_EQ(ASCIIToUTF16(" "),
form_with_empty_password_b.password_value);
ASSERT_TRUE(form_with_empty_password_b.blacklisted_by_user);
}
TEST_F(PasswordStoreMacInternalsTest, TestPasswordGetAll) {
MacKeychainPasswordFormAdapter keychain_adapter(keychain_);
MacKeychainPasswordFormAdapter owned_keychain_adapter(keychain_);
owned_keychain_adapter.SetFindsOnlyOwnedItems(true);
// Add a few passwords of various types so that we own some.
PasswordFormData owned_password_data[] = {
{ PasswordForm::SCHEME_HTML, "http://web.site.com/",
"http://web.site.com/path/to/page.html", NULL, NULL, NULL, NULL,
L"anonymous", L"knock-knock", false, false, 0 },
{ PasswordForm::SCHEME_BASIC, "http://a.site.com:2222/therealm",
"http://a.site.com:2222/", NULL, NULL, NULL, NULL,
L"username", L"password", false, false, 0 },
{ PasswordForm::SCHEME_DIGEST, "https://digest.site.com/differentrealm",
"https://digest.site.com/secure.html", NULL, NULL, NULL, NULL,
L"testname", L"testpass", false, false, 0 },
};
for (unsigned int i = 0; i < arraysize(owned_password_data); ++i) {
scoped_ptr<PasswordForm> form =
CreatePasswordFormFromDataForTesting(owned_password_data[i]);
owned_keychain_adapter.AddPassword(*form);
}
ScopedVector<autofill::PasswordForm> all_passwords =
keychain_adapter.GetAllPasswordFormPasswords();
EXPECT_EQ(9 + arraysize(owned_password_data), all_passwords.size());
ScopedVector<autofill::PasswordForm> owned_passwords =
owned_keychain_adapter.GetAllPasswordFormPasswords();
EXPECT_EQ(arraysize(owned_password_data), owned_passwords.size());
}
#pragma mark -
class PasswordStoreMacTest : public testing::Test {
public:
PasswordStoreMacTest() : ui_thread_(BrowserThread::UI, &message_loop_) {}
void SetUp() override {
ASSERT_TRUE(db_dir_.CreateUniqueTempDir());
histogram_tester_.reset(new base::HistogramTester);
// Ensure that LoginDatabase will use the mock keychain if it needs to
// encrypt/decrypt a password.
OSCrypt::UseMockKeychain(true);
login_db_.reset(
new password_manager::LoginDatabase(test_login_db_file_path()));
thread_.reset(new base::Thread("Chrome_PasswordStore_Thread"));
ASSERT_TRUE(thread_->Start());
ASSERT_TRUE(thread_->task_runner()->PostTask(
FROM_HERE, base::Bind(&PasswordStoreMacTest::InitLoginDatabase,
base::Unretained(login_db_.get()))));
CreateAndInitPasswordStore(login_db_.get());
// Make sure deferred initialization is performed before some tests start
// accessing the |login_db| directly.
FinishAsyncProcessing();
}
void TearDown() override {
ClosePasswordStore();
thread_.reset();
login_db_.reset();
// Whatever a test did, PasswordStoreMac stores only empty password values
// in LoginDatabase. The empty valus do not require encryption and therefore
// OSCrypt shouldn't call the Keychain. The histogram doesn't cover the
// internet passwords.
if (histogram_tester_) {
scoped_ptr<base::HistogramSamples> samples =
histogram_tester_->GetHistogramSamplesSinceCreation(
"OSX.Keychain.Access");
EXPECT_TRUE(!samples || samples->TotalCount() == 0);
}
}
static void InitLoginDatabase(password_manager::LoginDatabase* login_db) {
ASSERT_TRUE(login_db->Init());
}
void CreateAndInitPasswordStore(password_manager::LoginDatabase* login_db) {
store_ = new PasswordStoreMac(
base::ThreadTaskRunnerHandle::Get(), nullptr,
make_scoped_ptr<AppleKeychain>(new MockAppleKeychain));
ASSERT_TRUE(thread_->task_runner()->PostTask(
FROM_HERE, base::Bind(&PasswordStoreMac::InitWithTaskRunner, store_,
thread_->task_runner())));
ASSERT_TRUE(thread_->task_runner()->PostTask(
FROM_HERE, base::Bind(&PasswordStoreMac::set_login_metadata_db, store_,
base::Unretained(login_db))));
}
void ClosePasswordStore() {
if (!store_)
return;
store_->Shutdown();
store_ = nullptr;
}
// Verifies that the given |form| can be properly stored so that it can be
// retrieved by FillMatchingLogins() and GetAutofillableLogins(), and then it
// can be properly removed.
void VerifyCredentialLifecycle(const PasswordForm& form) {
// Run everything twice to make sure no garbage is left behind that would
// prevent storing the form a second time.
for (size_t iteration = 0; iteration < 2; ++iteration) {
SCOPED_TRACE(testing::Message("Iteration: ") << iteration);
MockPasswordStoreConsumer mock_consumer;
EXPECT_CALL(mock_consumer, OnGetPasswordStoreResultsConstRef(IsEmpty()))
.WillOnce(QuitUIMessageLoop());
store()->GetAutofillableLogins(&mock_consumer);
base::MessageLoop::current()->Run();
::testing::Mock::VerifyAndClearExpectations(&mock_consumer);
store()->AddLogin(form);
FinishAsyncProcessing();
PasswordForm returned_form;
EXPECT_CALL(mock_consumer, OnGetPasswordStoreResultsConstRef(SizeIs(1u)))
.WillOnce(
DoAll(SaveACopyOfFirstForm(&returned_form), QuitUIMessageLoop()));
// The query operations will also do some housekeeping: they will remove
// dangling credentials in the LoginDatabase without a matching Keychain
// item when one is expected. If the logic that stores the Keychain item
// is incorrect, this will wipe the newly added form before the second
// query.
store()->GetAutofillableLogins(&mock_consumer);
base::MessageLoop::current()->Run();
::testing::Mock::VerifyAndClearExpectations(&mock_consumer);
EXPECT_EQ(form, returned_form);
PasswordForm query_form = form;
query_form.password_value.clear();
query_form.username_value.clear();
EXPECT_CALL(mock_consumer, OnGetPasswordStoreResultsConstRef(SizeIs(1u)))
.WillOnce(
DoAll(SaveACopyOfFirstForm(&returned_form), QuitUIMessageLoop()));
store()->GetLogins(query_form, PasswordStore::ALLOW_PROMPT,
&mock_consumer);
base::MessageLoop::current()->Run();
::testing::Mock::VerifyAndClearExpectations(&mock_consumer);
EXPECT_EQ(form, returned_form);
store()->RemoveLogin(form);
}
}
base::FilePath test_login_db_file_path() const {
return db_dir_.path().Append(FILE_PATH_LITERAL("login.db"));
}
password_manager::LoginDatabase* login_db() const {
return store_->login_metadata_db();
}
MockAppleKeychain* keychain() {
return static_cast<MockAppleKeychain*>(store_->keychain());
}
void FinishAsyncProcessing() {
scoped_refptr<content::MessageLoopRunner> runner =
new content::MessageLoopRunner;
ASSERT_TRUE(thread_->task_runner()->PostTaskAndReply(
FROM_HERE, base::Bind(&Noop), runner->QuitClosure()));
runner->Run();
}
PasswordStoreMac* store() { return store_.get(); }
protected:
base::MessageLoopForUI message_loop_;
content::TestBrowserThread ui_thread_;
// Thread that the synchronous methods are run on.
scoped_ptr<base::Thread> thread_;
base::ScopedTempDir db_dir_;
scoped_ptr<password_manager::LoginDatabase> login_db_;
scoped_refptr<PasswordStoreMac> store_;
scoped_ptr<base::HistogramTester> histogram_tester_;
};
TEST_F(PasswordStoreMacTest, TestStoreUpdate) {
// Insert a password into both the database and the keychain.
// This is done manually, rather than through store_->AddLogin, because the
// Mock Keychain isn't smart enough to be able to support update generically,
// so some.domain.com triggers special handling to test it that make inserting
// fail.
PasswordFormData joint_data = {
PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/insecure.html", "login.cgi",
L"username", L"password", L"submit", L"joe_user", L"sekrit", true, false, 1
};
scoped_ptr<PasswordForm> joint_form =
CreatePasswordFormFromDataForTesting(joint_data);
EXPECT_EQ(AddChangeForForm(*joint_form), login_db()->AddLogin(*joint_form));
MockAppleKeychain::KeychainTestData joint_keychain_data = {
kSecAuthenticationTypeHTMLForm, "some.domain.com",
kSecProtocolTypeHTTP, "/insecure.html", 0, NULL, "20020601171500Z",
"joe_user", "sekrit", false };
keychain()->AddTestItem(joint_keychain_data);
// Insert a password into the keychain only.
MockAppleKeychain::KeychainTestData keychain_only_data = {
kSecAuthenticationTypeHTMLForm, "keychain.only.com",
kSecProtocolTypeHTTP, NULL, 0, NULL, "20020601171500Z",
"keychain", "only", false
};
keychain()->AddTestItem(keychain_only_data);
struct UpdateData {
PasswordFormData form_data;
const char* password; // NULL indicates no entry should be present.
};
// Make a series of update calls.
UpdateData updates[] = {
// Update the keychain+db passwords (the normal password update case).
{ { PasswordForm::SCHEME_HTML, "http://some.domain.com/",
"http://some.domain.com/insecure.html", "login.cgi",
L"username", L"password", L"submit", L"joe_user", L"53krit",
true, false, 2 },
"53krit",
},
// Update the keychain-only password; this simulates the initial use of a
// password stored by another browsers.
{ { PasswordForm::SCHEME_HTML, "http://keychain.only.com/",
"http://keychain.only.com/login.html", "login.cgi",
L"username", L"password", L"submit", L"keychain", L"only",
true, false, 2 },
"only",
},
// Update a password that doesn't exist in either location. This tests the
// case where a form is filled, then the stored login is removed, then the
// form is submitted.
{ { PasswordForm::SCHEME_HTML, "http://different.com/",
"http://different.com/index.html", "login.cgi",
L"username", L"password", L"submit", L"abc", L"123",
true, false, 2 },
NULL,
},
};
for (unsigned int i = 0; i < arraysize(updates); ++i) {
scoped_ptr<PasswordForm> form =
CreatePasswordFormFromDataForTesting(updates[i].form_data);
store_->UpdateLogin(*form);
}
FinishAsyncProcessing();
MacKeychainPasswordFormAdapter keychain_adapter(keychain());
for (unsigned int i = 0; i < arraysize(updates); ++i) {
scoped_ptr<PasswordForm> query_form =
CreatePasswordFormFromDataForTesting(updates[i].form_data);
ScopedVector<autofill::PasswordForm> matching_items =
keychain_adapter.PasswordsFillingForm(query_form->signon_realm,
query_form->scheme);
if (updates[i].password) {
EXPECT_GT(matching_items.size(), 0U) << "iteration " << i;
if (matching_items.size() >= 1)
EXPECT_EQ(ASCIIToUTF16(updates[i].password),
matching_items[0]->password_value) << "iteration " << i;
} else {
EXPECT_EQ(0U, matching_items.size()) << "iteration " << i;
}
EXPECT_TRUE(login_db()->GetLogins(*query_form, &matching_items));
EXPECT_EQ(updates[i].password ? 1U : 0U, matching_items.size())
<< "iteration " << i;
}
}
TEST_F(PasswordStoreMacTest, TestDBKeychainAssociation) {
// Tests that association between the keychain and login database parts of a
// password added by fuzzy (PSL) matching works.
// 1. Add a password for www.facebook.com
// 2. Get a password for m.facebook.com. This fuzzy matches and returns the
// www.facebook.com password.
// 3. Add the returned password for m.facebook.com.
// 4. Remove both passwords.
// -> check: that both are gone from the login DB and the keychain
// This test should in particular ensure that we don't keep passwords in the
// keychain just before we think we still have other (fuzzy-)matching entries
// for them in the login database. (For example, here if we deleted the
// www.facebook.com password from the login database, we should not be blocked
// from deleting it from the keystore just becaus the m.facebook.com password
// fuzzy-matches the www.facebook.com one.)
// 1. Add a password for www.facebook.com
PasswordFormData www_form_data = {
PasswordForm::SCHEME_HTML, "http://www.facebook.com/",
"http://www.facebook.com/index.html", "login",
L"username", L"password", L"submit", L"joe_user", L"sekrit", true, false, 1
};
scoped_ptr<PasswordForm> www_form =
CreatePasswordFormFromDataForTesting(www_form_data);
EXPECT_EQ(AddChangeForForm(*www_form), login_db()->AddLogin(*www_form));
MacKeychainPasswordFormAdapter owned_keychain_adapter(keychain());
owned_keychain_adapter.SetFindsOnlyOwnedItems(true);
owned_keychain_adapter.AddPassword(*www_form);
// 2. Get a password for m.facebook.com.
PasswordForm m_form(*www_form);
m_form.signon_realm = "http://m.facebook.com";
m_form.origin = GURL("http://m.facebook.com/index.html");
MockPasswordStoreConsumer consumer;
store_->GetLogins(m_form, PasswordStore::ALLOW_PROMPT, &consumer);
PasswordForm returned_form;
EXPECT_CALL(consumer, OnGetPasswordStoreResultsConstRef(SizeIs(1u)))
.WillOnce(
DoAll(SaveACopyOfFirstForm(&returned_form), QuitUIMessageLoop()));
base::MessageLoop::current()->Run();
// 3. Add the returned password for m.facebook.com.
EXPECT_EQ(AddChangeForForm(returned_form),
login_db()->AddLogin(returned_form));
owned_keychain_adapter.AddPassword(m_form);
// 4. Remove both passwords.
store_->RemoveLogin(*www_form);
store_->RemoveLogin(m_form);
FinishAsyncProcessing();
// No trace of www.facebook.com.
ScopedVector<autofill::PasswordForm> matching_items =
owned_keychain_adapter.PasswordsFillingForm(www_form->signon_realm,
www_form->scheme);
EXPECT_EQ(0u, matching_items.size());
EXPECT_TRUE(login_db()->GetLogins(*www_form, &matching_items));
EXPECT_EQ(0u, matching_items.size());
// No trace of m.facebook.com.
matching_items = owned_keychain_adapter.PasswordsFillingForm(
m_form.signon_realm, m_form.scheme);
EXPECT_EQ(0u, matching_items.size());
EXPECT_TRUE(login_db()->GetLogins(m_form, &matching_items));
EXPECT_EQ(0u, matching_items.size());
}
namespace {
class PasswordsChangeObserver :
public password_manager::PasswordStore::Observer {
public:
PasswordsChangeObserver(PasswordStoreMac* store) : observer_(this) {
observer_.Add(store);
}
void WaitAndVerify(PasswordStoreMacTest* test) {
test->FinishAsyncProcessing();
::testing::Mock::VerifyAndClearExpectations(this);
}
// password_manager::PasswordStore::Observer:
MOCK_METHOD1(OnLoginsChanged,
void(const password_manager::PasswordStoreChangeList& changes));
private:
ScopedObserver<password_manager::PasswordStore,
PasswordsChangeObserver> observer_;
};
password_manager::PasswordStoreChangeList GetAddChangeList(
const PasswordForm& form) {
password_manager::PasswordStoreChange change(
password_manager::PasswordStoreChange::ADD, form);
return password_manager::PasswordStoreChangeList(1, change);
}
// Tests RemoveLoginsCreatedBetween or RemoveLoginsSyncedBetween depending on
// |check_created|.
void CheckRemoveLoginsBetween(PasswordStoreMacTest* test, bool check_created) {
PasswordFormData www_form_data_facebook = {
PasswordForm::SCHEME_HTML, "http://www.facebook.com/",
"http://www.facebook.com/index.html", "login", L"submit", L"username",
L"password", L"joe_user", L"sekrit", true, false, 0 };
// The old form doesn't have elements names.
PasswordFormData www_form_data_facebook_old = {
PasswordForm::SCHEME_HTML, "http://www.facebook.com/",
"http://www.facebook.com/index.html", "login", L"", L"",
L"", L"joe_user", L"oldsekrit", true, false, 0 };
PasswordFormData www_form_data_other = {
PasswordForm::SCHEME_HTML, "http://different.com/",
"http://different.com/index.html", "login", L"submit", L"username",
L"password", L"different_joe_user", L"sekrit", true, false, 0 };
scoped_ptr<PasswordForm> form_facebook =
CreatePasswordFormFromDataForTesting(www_form_data_facebook);
scoped_ptr<PasswordForm> form_facebook_old =
CreatePasswordFormFromDataForTesting(www_form_data_facebook_old);
scoped_ptr<PasswordForm> form_other =
CreatePasswordFormFromDataForTesting(www_form_data_other);
base::Time now = base::Time::Now();
// TODO(vasilii): remove the next line once crbug/374132 is fixed.
now = base::Time::FromTimeT(now.ToTimeT());
base::Time next_day = now + base::TimeDelta::FromDays(1);
if (check_created) {
form_facebook_old->date_created = now;
form_facebook->date_created = next_day;
form_other->date_created = next_day;
} else {
form_facebook_old->date_synced = now;
form_facebook->date_synced = next_day;
form_other->date_synced = next_day;
}
PasswordsChangeObserver observer(test->store());
test->store()->AddLogin(*form_facebook_old);
test->store()->AddLogin(*form_facebook);
test->store()->AddLogin(*form_other);
EXPECT_CALL(observer, OnLoginsChanged(GetAddChangeList(*form_facebook_old)));
EXPECT_CALL(observer, OnLoginsChanged(GetAddChangeList(*form_facebook)));
EXPECT_CALL(observer, OnLoginsChanged(GetAddChangeList(*form_other)));
observer.WaitAndVerify(test);
// Check the keychain content.
MacKeychainPasswordFormAdapter owned_keychain_adapter(test->keychain());
owned_keychain_adapter.SetFindsOnlyOwnedItems(false);
ScopedVector<PasswordForm> matching_items(
owned_keychain_adapter.PasswordsFillingForm(form_facebook->signon_realm,
form_facebook->scheme));
EXPECT_EQ(1u, matching_items.size());
matching_items = owned_keychain_adapter.PasswordsFillingForm(
form_other->signon_realm, form_other->scheme);
EXPECT_EQ(1u, matching_items.size());
// Remove facebook.
if (check_created) {
test->store()->RemoveLoginsCreatedBetween(base::Time(), next_day,
base::Closure());
} else {
test->store()->RemoveLoginsSyncedBetween(base::Time(), next_day);
}
password_manager::PasswordStoreChangeList list;
form_facebook_old->password_value.clear();
form_facebook->password_value.clear();
list.push_back(password_manager::PasswordStoreChange(
password_manager::PasswordStoreChange::REMOVE, *form_facebook_old));
list.push_back(password_manager::PasswordStoreChange(
password_manager::PasswordStoreChange::REMOVE, *form_facebook));
EXPECT_CALL(observer, OnLoginsChanged(list));
list.clear();
observer.WaitAndVerify(test);
matching_items = owned_keychain_adapter.PasswordsFillingForm(
form_facebook->signon_realm, form_facebook->scheme);
EXPECT_EQ(0u, matching_items.size());
matching_items = owned_keychain_adapter.PasswordsFillingForm(
form_other->signon_realm, form_other->scheme);
EXPECT_EQ(1u, matching_items.size());
// Remove form_other.
if (check_created) {
test->store()->RemoveLoginsCreatedBetween(next_day, base::Time(),
base::Closure());
} else {
test->store()->RemoveLoginsSyncedBetween(next_day, base::Time());
}
form_other->password_value.clear();
list.push_back(password_manager::PasswordStoreChange(
password_manager::PasswordStoreChange::REMOVE, *form_other));
EXPECT_CALL(observer, OnLoginsChanged(list));
observer.WaitAndVerify(test);
matching_items = owned_keychain_adapter.PasswordsFillingForm(
form_other->signon_realm, form_other->scheme);
EXPECT_EQ(0u, matching_items.size());
}
} // namespace
TEST_F(PasswordStoreMacTest, TestRemoveLoginsCreatedBetween) {
CheckRemoveLoginsBetween(this, true);
}
TEST_F(PasswordStoreMacTest, TestRemoveLoginsSyncedBetween) {
CheckRemoveLoginsBetween(this, false);
}
TEST_F(PasswordStoreMacTest, TestRemoveLoginsMultiProfile) {
// Make sure that RemoveLoginsCreatedBetween does affect only the correct
// profile.
// Add a third-party password.
MockAppleKeychain::KeychainTestData keychain_data = {
kSecAuthenticationTypeHTMLForm, "some.domain.com",
kSecProtocolTypeHTTP, "/insecure.html", 0, NULL, "20020601171500Z",
"joe_user", "sekrit", false };
keychain()->AddTestItem(keychain_data);
// Add a password through the adapter. It has the "Chrome" creator tag.
// However, it's not referenced by the password database.
MacKeychainPasswordFormAdapter owned_keychain_adapter(keychain());
owned_keychain_adapter.SetFindsOnlyOwnedItems(true);
PasswordFormData www_form_data1 = {
PasswordForm::SCHEME_HTML, "http://www.facebook.com/",
"http://www.facebook.com/index.html", "login", L"username", L"password",
L"submit", L"joe_user", L"sekrit", true, false, 1 };
scoped_ptr<PasswordForm> www_form =
CreatePasswordFormFromDataForTesting(www_form_data1);
EXPECT_TRUE(owned_keychain_adapter.AddPassword(*www_form));
// Add a password from the current profile.
PasswordFormData www_form_data2 = {
PasswordForm::SCHEME_HTML, "http://www.facebook.com/",
"http://www.facebook.com/index.html", "login", L"username", L"password",
L"submit", L"not_joe_user", L"12345", true, false, 1 };
www_form = CreatePasswordFormFromDataForTesting(www_form_data2);
store_->AddLogin(*www_form);
FinishAsyncProcessing();
ScopedVector<PasswordForm> matching_items;
EXPECT_TRUE(login_db()->GetLogins(*www_form, &matching_items));
EXPECT_EQ(1u, matching_items.size());
store_->RemoveLoginsCreatedBetween(base::Time(), base::Time(),
base::Closure());
FinishAsyncProcessing();
// Check the second facebook form is gone.
EXPECT_TRUE(login_db()->GetLogins(*www_form, &matching_items));
EXPECT_EQ(0u, matching_items.size());
// Check the first facebook form is still there.
matching_items = owned_keychain_adapter.PasswordsFillingForm(
www_form->signon_realm, www_form->scheme);
ASSERT_EQ(1u, matching_items.size());
EXPECT_EQ(ASCIIToUTF16("joe_user"), matching_items[0]->username_value);
// Check the third-party password is still there.
owned_keychain_adapter.SetFindsOnlyOwnedItems(false);
matching_items = owned_keychain_adapter.PasswordsFillingForm(
"http://some.domain.com/insecure.html", PasswordForm::SCHEME_HTML);
ASSERT_EQ(1u, matching_items.size());
}
// Add a facebook form to the store but not to the keychain. The form is to be
// implicitly deleted. However, the observers shouldn't get notified about
// deletion of non-existent forms like m.facebook.com.
TEST_F(PasswordStoreMacTest, SilentlyRemoveOrphanedForm) {
testing::StrictMock<MockPasswordStoreObserver> mock_observer;
store()->AddObserver(&mock_observer);
// 1. Add a password for www.facebook.com to the LoginDatabase.
PasswordFormData www_form_data = {
PasswordForm::SCHEME_HTML, "http://www.facebook.com/",
"http://www.facebook.com/index.html", "login",
L"username", L"password", L"submit", L"joe_user", L"", true, false, 1
};
scoped_ptr<PasswordForm> www_form(
CreatePasswordFormFromDataForTesting(www_form_data));
EXPECT_EQ(AddChangeForForm(*www_form), login_db()->AddLogin(*www_form));
// 2. Get a PSL-matched password for m.facebook.com. The observer isn't
// notified because the form isn't in the database.
PasswordForm m_form(*www_form);
m_form.signon_realm = "http://m.facebook.com";
m_form.origin = GURL("http://m.facebook.com/index.html");
MockPasswordStoreConsumer consumer;
ON_CALL(consumer, OnGetPasswordStoreResultsConstRef(_))
.WillByDefault(QuitUIMessageLoop());
EXPECT_CALL(mock_observer, OnLoginsChanged(_)).Times(0);
// The PSL-matched form isn't returned because there is no actual password in
// the keychain.
EXPECT_CALL(consumer, OnGetPasswordStoreResultsConstRef(IsEmpty()));
store_->GetLogins(m_form, PasswordStore::ALLOW_PROMPT, &consumer);
base::MessageLoop::current()->Run();
ScopedVector<autofill::PasswordForm> all_forms;
EXPECT_TRUE(login_db()->GetAutofillableLogins(&all_forms));
EXPECT_EQ(1u, all_forms.size());
::testing::Mock::VerifyAndClearExpectations(&mock_observer);
// 3. Get a password for www.facebook.com. The form is implicitly removed and
// the observer is notified.
password_manager::PasswordStoreChangeList list;
list.push_back(password_manager::PasswordStoreChange(
password_manager::PasswordStoreChange::REMOVE, *www_form));
EXPECT_CALL(mock_observer, OnLoginsChanged(list));
EXPECT_CALL(consumer, OnGetPasswordStoreResultsConstRef(IsEmpty()));
store_->GetLogins(*www_form, PasswordStore::ALLOW_PROMPT, &consumer);
base::MessageLoop::current()->Run();
EXPECT_TRUE(login_db()->GetAutofillableLogins(&all_forms));
EXPECT_EQ(0u, all_forms.size());
}
// Verify that Android app passwords can be stored, retrieved, and deleted.
// Regression test for http://crbug.com/455551
TEST_F(PasswordStoreMacTest, StoringAndRetrievingAndroidCredentials) {
PasswordForm form;
form.signon_realm = "android://7x7IDboo8u9YKraUsbmVkuf1@net.rateflix.app/";
form.username_value = base::UTF8ToUTF16("randomusername");
form.password_value = base::UTF8ToUTF16("password");
VerifyCredentialLifecycle(form);
}
// Verify that federated credentials can be stored, retrieved and deleted.
TEST_F(PasswordStoreMacTest, StoringAndRetrievingFederatedCredentials) {
PasswordForm form;
form.signon_realm = "android://7x7IDboo8u9YKraUsbmVkuf1@net.rateflix.app/";
form.federation_url = GURL(password_manager::kTestingFederationUrlSpec);
form.username_value = base::UTF8ToUTF16("randomusername");
form.password_value = base::UTF8ToUTF16(""); // No password.
VerifyCredentialLifecycle(form);
}
void CheckMigrationResult(PasswordStoreMac::MigrationResult expected_result,
PasswordStoreMac::MigrationResult result) {
EXPECT_EQ(expected_result, result);
QuitUIMessageLoop();
}
// Import the passwords from the Keychain to LoginDatabase.
TEST_F(PasswordStoreMacTest, ImportFromKeychain) {
PasswordForm form1;
form1.origin = GURL("http://accounts.google.com/LoginAuth");
form1.signon_realm = "http://accounts.google.com/";
form1.username_value = ASCIIToUTF16("my_username");
form1.password_value = ASCIIToUTF16("my_password");
PasswordForm form2;
form2.origin = GURL("http://facebook.com/Login");
form2.signon_realm = "http://facebook.com/";
form2.username_value = ASCIIToUTF16("my_username");
form2.password_value = ASCIIToUTF16("my_password");
PasswordForm blacklisted_form;
blacklisted_form.origin = GURL("http://badsite.com/Login");
blacklisted_form.signon_realm = "http://badsite.com/";
blacklisted_form.blacklisted_by_user = true;
store()->AddLogin(form1);
store()->AddLogin(form2);
store()->AddLogin(blacklisted_form);
FinishAsyncProcessing();
ASSERT_TRUE(base::PostTaskAndReplyWithResult(
thread_->task_runner().get(), FROM_HERE,
base::Bind(&PasswordStoreMac::ImportFromKeychain, store()),
base::Bind(&CheckMigrationResult, PasswordStoreMac::MIGRATION_OK)));
FinishAsyncProcessing();
// The password should be stored in the database by now.
ScopedVector<PasswordForm> matching_items;
EXPECT_TRUE(login_db()->GetLogins(form1, &matching_items));
ASSERT_EQ(1u, matching_items.size());
EXPECT_EQ(form1, *matching_items[0]);
EXPECT_TRUE(login_db()->GetLogins(form2, &matching_items));
ASSERT_EQ(1u, matching_items.size());
EXPECT_EQ(form2, *matching_items[0]);
EXPECT_TRUE(login_db()->GetLogins(blacklisted_form, &matching_items));
ASSERT_EQ(1u, matching_items.size());
EXPECT_EQ(blacklisted_form, *matching_items[0]);
// The passwords are encrypted using a key from the Keychain.
EXPECT_TRUE(histogram_tester_->GetHistogramSamplesSinceCreation(
"OSX.Keychain.Access")->TotalCount());
histogram_tester_.reset();
}
// Import a federated credential while the Keychain is locked.
TEST_F(PasswordStoreMacTest, ImportFederatedFromLockedKeychain) {
keychain()->set_locked(true);
PasswordForm form1;
form1.origin = GURL("http://example.com/Login");
form1.signon_realm = "http://example.com/";
form1.username_value = ASCIIToUTF16("my_username");
form1.federation_url = GURL("https://accounts.google.com/");
store()->AddLogin(form1);
FinishAsyncProcessing();
ASSERT_TRUE(base::PostTaskAndReplyWithResult(
thread_->task_runner().get(), FROM_HERE,
base::Bind(&PasswordStoreMac::ImportFromKeychain, store()),
base::Bind(&CheckMigrationResult, PasswordStoreMac::MIGRATION_OK)));
FinishAsyncProcessing();
ScopedVector<PasswordForm> matching_items;
EXPECT_TRUE(login_db()->GetLogins(form1, &matching_items));
ASSERT_EQ(1u, matching_items.size());
EXPECT_EQ(form1, *matching_items[0]);
}
// Try to import while the Keychain is locked but the encryption key had been
// read earlier.
TEST_F(PasswordStoreMacTest, ImportFromLockedKeychainError) {
PasswordForm form1;
form1.origin = GURL("http://accounts.google.com/LoginAuth");
form1.signon_realm = "http://accounts.google.com/";
form1.username_value = ASCIIToUTF16("my_username");
form1.password_value = ASCIIToUTF16("my_password");
store()->AddLogin(form1);
FinishAsyncProcessing();
// Add a second keychain item matching the Database entry.
PasswordForm form2 = form1;
form2.origin = GURL("http://accounts.google.com/Login");
form2.password_value = ASCIIToUTF16("1234");
MacKeychainPasswordFormAdapter adapter(keychain());
EXPECT_TRUE(adapter.AddPassword(form2));
keychain()->set_locked(true);
ASSERT_TRUE(base::PostTaskAndReplyWithResult(
thread_->task_runner().get(), FROM_HERE,
base::Bind(&PasswordStoreMac::ImportFromKeychain, store()),
base::Bind(&CheckMigrationResult, PasswordStoreMac::KEYCHAIN_BLOCKED)));
FinishAsyncProcessing();
ScopedVector<PasswordForm> matching_items;
EXPECT_TRUE(login_db()->GetLogins(form1, &matching_items));
ASSERT_EQ(1u, matching_items.size());
EXPECT_EQ(base::string16(), matching_items[0]->password_value);
histogram_tester_->ExpectUniqueSample(
"PasswordManager.KeychainMigration.NumPasswordsOnFailure", 1, 1);
histogram_tester_->ExpectUniqueSample(
"PasswordManager.KeychainMigration.NumFailedPasswords", 1, 1);
histogram_tester_->ExpectUniqueSample(
"PasswordManager.KeychainMigration.NumChromeOwnedInaccessiblePasswords",
2, 1);
// Don't test the encryption key access.
histogram_tester_.reset();
}
|
Java
|
Markdown to presentation
========================
A simple tool for creating presentations from markdown files.
License
=======
md2p is licensed under BSD3 license. This refers to `src/Main.hs` and `output/js/present.js` files.
**Note:** any other content is not a part of md2p and is not licensed by it.
Example
-------
- [Source](https://github.com/soupi/markdown-to-presentation/blob/gh-pages/input/Example.md)
- [Result](http://soupi.github.io/markdown-to-presentation/)
How To Use:
------
1. Create a Markdown file and seperate slides with `---` (html's `<hr>` tag)
2. Use _md2p_ tool (haskell compiler and cabal needed to compile md2p) to create an HTML file from your Markdown and place it in the output folder
You can also use any other md2html converter. just add the following to the html:
```html
<link rel="stylesheet" type="text/css" href="css/style.css">
<link rel="stylesheet" type="text/css" href="css/github.css">
<link rel="stylesheet" type="text/css" href="highlight/styles/solarized_light.css"> <!-- Or your preferable syntax highlight theme -->
<script src="js/jquery-1.11.0.min.js"></script>
<script src="highlight/highlight.pack.js"></script>
<script>hljs.initHighlightingOnLoad();</script>
<script src="js/present.js"></script>
```
Actually, you can omit everything except:
```html
<script src="js/jquery-1.11.0.min.js"></script>
<script src="js/present.js"></script>
```
but then styling is up to you.
How to Install md2p:
-------------------
use cabal to download dependencies and install md2p
```
cabal update && cabal install
```
Packages and Libraries used to create m2p:
------------------------------------------
- [markdown](http://hackage.haskell.org/package/markdown) - for markdown to html conversion
- [highlight.js](https://highlightjs.org/) - for syntax highlight
- [a slighty modified github.css](https://gist.github.com/andyferra/2554919) - for styling
|
Java
|
/*
-- MAGMA (version 1.5.0-beta3) --
Univ. of Tennessee, Knoxville
Univ. of California, Berkeley
Univ. of Colorado, Denver
@date July 2014
@generated from magma_z_init.cpp normal z -> s, Fri Jul 18 17:34:30 2014
@author Hartwig Anzt
*/
#include <fstream>
#include <stdlib.h>
#include <string>
#include <sstream>
#include <iostream>
#include <ostream>
#include <assert.h>
#include <stdio.h>
#include "../include/magmasparse_s.h"
#include "../../include/magma.h"
#include "../include/mmio.h"
using namespace std;
/**
Purpose
-------
Initialize a magma_s_vector.
Arguments
---------
@param
x magma_s_vector
vector to initialize
@param
mem_loc magma_location_t
memory for vector
@param
num_rows magma_int_t
desired length of vector
@param
values float
entries in vector
@ingroup magmasparse_saux
********************************************************************/
magma_int_t
magma_s_vinit( magma_s_vector *x,
magma_location_t mem_loc,
magma_int_t num_rows,
float values ){
x->memory_location = Magma_CPU;
x->num_rows = num_rows;
x->nnz = num_rows;
if( mem_loc == Magma_CPU ){
x->memory_location = Magma_CPU;
magma_smalloc_cpu( &x->val, num_rows );
if ( x->val == NULL )
return MAGMA_ERR_HOST_ALLOC;
for( magma_int_t i=0; i<num_rows; i++)
x->val[i] = values;
return MAGMA_SUCCESS;
}
else if( mem_loc == Magma_DEV ){
x->memory_location = Magma_DEV;
float *tmp;
magma_smalloc_cpu( &tmp, num_rows );
if ( tmp == NULL )
return MAGMA_ERR_HOST_ALLOC;
for( magma_int_t i=0; i<num_rows; i++)
tmp[i] = values;
if (MAGMA_SUCCESS != magma_smalloc( &x->val, x->num_rows))
return MAGMA_ERR_DEVICE_ALLOC;
// data transfer
magma_ssetvector( x->num_rows, tmp, 1, x->val, 1 );
magma_free_cpu(tmp);
return MAGMA_SUCCESS;
}
return MAGMA_SUCCESS;
}
|
Java
|
//To Test:http://localhost:8080/nbia-auth/services/v3/getProtectionGrpList?format=html
package gov.nih.nci.nbia.restAPI;
import gov.nih.nci.nbia.dao.TrialDataProvenanceDAO;
import gov.nih.nci.nbia.util.SpringApplicationContext;
import gov.nih.nci.security.SecurityServiceProvider;
import gov.nih.nci.security.UserProvisioningManager;
import gov.nih.nci.security.authorization.domainobjects.ProtectionGroup;
import gov.nih.nci.security.authorization.domainobjects.ProtectionElement;
import gov.nih.nci.security.authorization.domainobjects.Role;
import gov.nih.nci.security.dao.RoleSearchCriteria;
import gov.nih.nci.security.dao.SearchCriteria;
import gov.nih.nci.security.exceptions.CSConfigurationException;
import gov.nih.nci.security.exceptions.CSException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Path;
import javax.ws.rs.GET;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.springframework.dao.DataAccessException;
@Path("/v3/getProtectionGrpList")
public class V3_getProtectionGrpList extends getData{
private static final String[] columns={"pgName", "description", "dataSetName"};
public final static String TEXT_CSV = "text/csv";
@Context private HttpServletRequest httpRequest;
/**
* This method get a list of names of protection group
*
* @return String - list of names of protection group
*/
@GET
@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON, MediaType.TEXT_HTML, TEXT_CSV})
public Response constructResponse(@QueryParam("format") String format) {
List<Object[]> data = null;
try {
UserProvisioningManager upm = getUpm();
java.util.List<ProtectionGroup> protectionGrpLst = upm.getProtectionGroups();
if ( protectionGrpLst != null) {
data = new ArrayList<Object []>();
for(ProtectionGroup pg : protectionGrpLst) {
List<ProtectionElement> pes = new ArrayList<ProtectionElement>(upm.getProtectionElements(pg.getProtectionGroupId().toString()));
for (ProtectionElement pe : pes) {
Object [] objs = {pg.getProtectionGroupName(),
pg.getProtectionGroupDescription(),
pe.getProtectionElementName()};
data.add(objs);
}
}
}
else {
Object [] objs = {"Warning: No Protection Group has defined yet!", "NA", "NA"};
data.add(objs);
}
} catch (CSConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (CSException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return formatResponse(format, data, columns);
}
}
|
Java
|
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CONTENT_BROWSER_RENDERER_HOST_CLIPBOARD_MESSAGE_FILTER_H_
#define CONTENT_BROWSER_RENDERER_HOST_CLIPBOARD_MESSAGE_FILTER_H_
#include <string>
#include <vector>
#include "base/basictypes.h"
#include "content/public/browser/browser_message_filter.h"
#include "ui/base/clipboard/clipboard.h"
class GURL;
namespace content {
class ClipboardMessageFilter : public BrowserMessageFilter {
public:
ClipboardMessageFilter();
virtual void OverrideThreadForMessage(
const IPC::Message& message,
BrowserThread::ID* thread) OVERRIDE;
virtual bool OnMessageReceived(const IPC::Message& message,
bool* message_was_ok) OVERRIDE;
private:
virtual ~ClipboardMessageFilter();
void OnWriteObjectsAsync(const ui::Clipboard::ObjectMap& objects);
void OnWriteObjectsSync(const ui::Clipboard::ObjectMap& objects,
base::SharedMemoryHandle bitmap_handle);
void OnGetSequenceNumber(const ui::Clipboard::Buffer buffer,
uint64* sequence_number);
void OnIsFormatAvailable(const ui::Clipboard::FormatType& format,
ui::Clipboard::Buffer buffer,
bool* result);
void OnClear(ui::Clipboard::Buffer buffer);
void OnReadAvailableTypes(ui::Clipboard::Buffer buffer,
std::vector<string16>* types,
bool* contains_filenames);
void OnReadText(ui::Clipboard::Buffer buffer, string16* result);
void OnReadAsciiText(ui::Clipboard::Buffer buffer, std::string* result);
void OnReadHTML(ui::Clipboard::Buffer buffer, string16* markup, GURL* url,
uint32* fragment_start, uint32* fragment_end);
void OnReadRTF(ui::Clipboard::Buffer buffer, std::string* result);
void OnReadImage(ui::Clipboard::Buffer buffer, IPC::Message* reply_msg);
void OnReadImageReply(const SkBitmap& bitmap, IPC::Message* reply_msg);
void OnReadCustomData(ui::Clipboard::Buffer buffer,
const string16& type,
string16* result);
#if defined(OS_MACOSX)
void OnFindPboardWriteString(const string16& text);
#endif
// We have our own clipboard because we want to access the clipboard on the
// IO thread instead of forwarding (possibly synchronous) messages to the UI
// thread. This instance of the clipboard should be accessed only on the IO
// thread.
static ui::Clipboard* GetClipboard();
DISALLOW_COPY_AND_ASSIGN(ClipboardMessageFilter);
};
} // namespace content
#endif // CONTENT_BROWSER_RENDERER_HOST_CLIPBOARD_MESSAGE_FILTER_H_
|
Java
|
# Copyright (c) 2013 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package containing the different outputs.
Each output type is defined inside a module.
"""
|
Java
|
/*
* Copyright (c) 2013-2013, KNOPFLERFISH project
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* - Neither the name of the KNOPFLERFISH project nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.knopflerfish.service.repositorymanager;
import org.osgi.framework.Constants;
import org.osgi.framework.ServiceReference;
import org.osgi.service.repository.Repository;
public class RepositoryInfo implements Comparable<RepositoryInfo> {
final private long id;
final private int rank;
final ServiceReference<Repository> sr;
public RepositoryInfo(ServiceReference<Repository> sr) {
this.id = ((Long)sr.getProperty(Constants.SERVICE_ID)).longValue();
Object r = sr.getProperty(Constants.SERVICE_RANKING);
if (r != null && r instanceof Integer) {
this.rank = ((Integer)r).intValue();
} else {
this.rank = 0;
}
this.sr = sr;
}
public RepositoryInfo(RepositoryInfo old, int rank) {
this.id = old.id;
this.rank = rank;
this.sr = old.sr;
}
public long getId() {
return id;
}
public int getRank() {
return rank;
}
public Object getProperty(String prop) {
return sr.getProperty(prop);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (int) (id ^ (id >>> 32));
return result;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null)
return false;
if (getClass() != o.getClass())
return false;
RepositoryInfo rio = (RepositoryInfo) o;
if (id != rio.id || rank != rio.rank)
return false;
return true;
}
@Override
public int compareTo(RepositoryInfo o) {
if (equals(o)) {
return 0;
}
if (rank != o.rank) {
return o.rank - rank;
} else {
return id < o.id ? -1 : 1;
}
}
public ServiceReference<Repository> getServiceReference() {
return sr;
}
@Override
public String toString() {
return "RepositoryInfo [id=" + id + ", rank=" + rank + "]";
}
}
|
Java
|
//------------------------------------------------------------------------------
// GB_Descriptor_get: get the status of a descriptor
//------------------------------------------------------------------------------
// SuiteSparse:GraphBLAS, Timothy A. Davis, (c) 2017-2018, All Rights Reserved.
// http://suitesparse.com See GraphBLAS/Doc/License.txt for license.
//------------------------------------------------------------------------------
// A descriptor modifies how the behavoir of a GraphBLAS operation. In the
// current GraphBLAS spec, the following descriptor fields may be set.
// Descriptor field: Descriptor value:
// desc->out GxB_DEFAULT or GrB_REPLACE
// GrB_REPLACE means that the output matrix C is cleared just
// prior to writing results back into it, via C<Mask> = results. This
// descriptor does not affect how C is used to compute the results. If
// GxB_DEFAULT, then C is not cleared before doing C<Mask>=results.
// desc->mask GxB_DEFAULT or GrB_SCMP
// An optional 'write mask' defines how the results are to be written back
// into C. The boolean Mask matrix has the same size as C (Mask is
// typecasted to boolean if it has another type). If the Mask input to
// the GraphBLAS method is NULL, then implicitly Mask(i,j)=1 for all i and
// j. Let Z be the results to be written into C (the same dimension as
// C). If desc->mask is GxB_DEFAULT, and Mask(i,j)=1, then C(i,j) is
// over-written with Z(i,j). Otherwise, if Mask(i,j)=0 C(i,j) is left
// unmodified (it remains an implicit zero if it is so, or its value is
// unchanged if it has one). If desc->mask is GrB_SCMP, then the use of
// Mask is negated: Mask(i,j)=0 means that C(i,j) is overwritten with
// Z(i,j), and Mask(i,j)=1 means that C(i,j) is left unchanged.
// Writing results Z into C via the Mask is written as C<Mask>=Z in
// GraphBLAS notation.
// Note that it is the value of Mask(i,j) that determines how C(i,j) is
// overwritten. If the (i,j) entry is present in the Mask matrix data
// structure but has a numerical value of zero, then it is the same as if
// (i,j) is not present and thus implicitly zero. Both mean 'Mask(i,j)=0'
// in the description above of how the Mask works.
// desc->in0 and desc->in1 GxB_DEFAULT or GrB_TRAN
// A GrB_Matrix passed as an input parameter to GraphBLAS methods can
// optionally transpose them prior to using them. desc->in0 always refers
// to the first input to the method, and desc->in1 always refers to the
// second one.
// If the value of this descriptor is GxB_DEFAULT, then the matrix is used
// as-is. Otherwise, it is transposed first. That is, the results are
// the same as if the transpose of the matrix was passed to the method.
// desc->axb see GraphBLAS.h; can be:
// GrB_DEFAULT automatic selection
// GxB_AxB_GUSTAVSON gather-scatter saxpy method
// GxB_AxB_HEAP heap-based saxpy method
// GxB_AxB_DOT dot product
#include "GB.h"
GrB_Info GB_Descriptor_get // get the contents of a descriptor
(
const GrB_Descriptor desc, // descriptor to query, may be NULL
bool *C_replace, // if true replace C before C<Mask>=Z
bool *Mask_comp, // if true use logical negation of Mask
bool *In0_transpose, // if true transpose first input
bool *In1_transpose, // if true transpose second input
GrB_Desc_Value *AxB_method, // method for C=A*B
GB_Context Context
)
{
//--------------------------------------------------------------------------
// check inputs
//--------------------------------------------------------------------------
// desc may be null, but if not NULL it must be initialized
GB_RETURN_IF_FAULTY (desc) ;
//--------------------------------------------------------------------------
// get the contents of the descriptor
//--------------------------------------------------------------------------
// default values if descriptor is NULL
GrB_Desc_Value C_desc = GxB_DEFAULT ;
GrB_Desc_Value Mask_desc = GxB_DEFAULT ;
GrB_Desc_Value In0_desc = GxB_DEFAULT ;
GrB_Desc_Value In1_desc = GxB_DEFAULT ;
GrB_Desc_Value AxB_desc = GxB_DEFAULT ;
// non-defaults descriptors
if (desc != NULL)
{
// get the contents
C_desc = desc->out ; // DEFAULT or REPLACE
Mask_desc = desc->mask ; // DEFAULT or SCMP
In0_desc = desc->in0 ; // DEFAULT or TRAN
In1_desc = desc->in1 ; // DEFAULT or TRAN
AxB_desc = desc->axb ; // DEFAULT, GUSTAVSON, HEAP, or DOT
}
// check for valid values of each descriptor field
if (!(C_desc == GxB_DEFAULT || C_desc == GrB_REPLACE) ||
!(Mask_desc == GxB_DEFAULT || Mask_desc == GrB_SCMP) ||
!(In0_desc == GxB_DEFAULT || In0_desc == GrB_TRAN) ||
!(In1_desc == GxB_DEFAULT || In1_desc == GrB_TRAN) ||
!(AxB_desc == GxB_DEFAULT || AxB_desc == GxB_AxB_GUSTAVSON ||
AxB_desc == GxB_AxB_DOT || AxB_desc == GxB_AxB_HEAP))
{
return (GB_ERROR (GrB_INVALID_OBJECT, (GB_LOG, "Descriptor invalid"))) ;
}
if (C_replace != NULL)
{
*C_replace = (C_desc == GrB_REPLACE) ;
}
if (Mask_comp != NULL)
{
*Mask_comp = (Mask_desc == GrB_SCMP) ;
}
if (In0_transpose != NULL)
{
*In0_transpose = (In0_desc == GrB_TRAN) ;
}
if (In1_transpose != NULL)
{
*In1_transpose = (In1_desc == GrB_TRAN) ;
}
if (AxB_method != NULL)
{
*AxB_method = AxB_desc ;
}
return (GrB_SUCCESS) ;
}
|
Java
|
/* this file has been autogenerated by vtkNodeJsWrap */
/* editing this might proof futile */
#define VTK_WRAPPING_CXX
#define VTK_STREAMS_FWD_ONLY
#include <nan.h>
#include "vtkObjectWrap.h"
#include "vtkAbstractContextItemWrap.h"
#include "vtkObjectBaseWrap.h"
#include "vtkContext2DWrap.h"
#include "vtkContextSceneWrap.h"
#include "../../plus/plus.h"
using namespace v8;
extern Nan::Persistent<v8::Object> vtkNodeJsNoWrap;
Nan::Persistent<v8::FunctionTemplate> VtkAbstractContextItemWrap::ptpl;
VtkAbstractContextItemWrap::VtkAbstractContextItemWrap()
{ }
VtkAbstractContextItemWrap::VtkAbstractContextItemWrap(vtkSmartPointer<vtkAbstractContextItem> _native)
{ native = _native; }
VtkAbstractContextItemWrap::~VtkAbstractContextItemWrap()
{ }
void VtkAbstractContextItemWrap::Init(v8::Local<v8::Object> exports)
{
Nan::SetAccessor(exports, Nan::New("vtkAbstractContextItem").ToLocalChecked(), ConstructorGetter);
Nan::SetAccessor(exports, Nan::New("AbstractContextItem").ToLocalChecked(), ConstructorGetter);
}
void VtkAbstractContextItemWrap::ConstructorGetter(
v8::Local<v8::String> property,
const Nan::PropertyCallbackInfo<v8::Value>& info)
{
InitPtpl();
info.GetReturnValue().Set(Nan::New(ptpl)->GetFunction());
}
void VtkAbstractContextItemWrap::InitPtpl()
{
if (!ptpl.IsEmpty()) return;
v8::Local<v8::FunctionTemplate> tpl = Nan::New<v8::FunctionTemplate>(New);
VtkObjectWrap::InitPtpl( );
tpl->Inherit(Nan::New<FunctionTemplate>(VtkObjectWrap::ptpl));
tpl->SetClassName(Nan::New("VtkAbstractContextItemWrap").ToLocalChecked());
tpl->InstanceTemplate()->SetInternalFieldCount(1);
Nan::SetPrototypeMethod(tpl, "ClearItems", ClearItems);
Nan::SetPrototypeMethod(tpl, "clearItems", ClearItems);
Nan::SetPrototypeMethod(tpl, "GetInteractive", GetInteractive);
Nan::SetPrototypeMethod(tpl, "getInteractive", GetInteractive);
Nan::SetPrototypeMethod(tpl, "GetParent", GetParent);
Nan::SetPrototypeMethod(tpl, "getParent", GetParent);
Nan::SetPrototypeMethod(tpl, "GetScene", GetScene);
Nan::SetPrototypeMethod(tpl, "getScene", GetScene);
Nan::SetPrototypeMethod(tpl, "GetVisible", GetVisible);
Nan::SetPrototypeMethod(tpl, "getVisible", GetVisible);
Nan::SetPrototypeMethod(tpl, "NewInstance", NewInstance);
Nan::SetPrototypeMethod(tpl, "newInstance", NewInstance);
Nan::SetPrototypeMethod(tpl, "Paint", Paint);
Nan::SetPrototypeMethod(tpl, "paint", Paint);
Nan::SetPrototypeMethod(tpl, "PaintChildren", PaintChildren);
Nan::SetPrototypeMethod(tpl, "paintChildren", PaintChildren);
Nan::SetPrototypeMethod(tpl, "ReleaseGraphicsResources", ReleaseGraphicsResources);
Nan::SetPrototypeMethod(tpl, "releaseGraphicsResources", ReleaseGraphicsResources);
Nan::SetPrototypeMethod(tpl, "RemoveItem", RemoveItem);
Nan::SetPrototypeMethod(tpl, "removeItem", RemoveItem);
Nan::SetPrototypeMethod(tpl, "SafeDownCast", SafeDownCast);
Nan::SetPrototypeMethod(tpl, "safeDownCast", SafeDownCast);
Nan::SetPrototypeMethod(tpl, "SetInteractive", SetInteractive);
Nan::SetPrototypeMethod(tpl, "setInteractive", SetInteractive);
Nan::SetPrototypeMethod(tpl, "SetParent", SetParent);
Nan::SetPrototypeMethod(tpl, "setParent", SetParent);
Nan::SetPrototypeMethod(tpl, "SetScene", SetScene);
Nan::SetPrototypeMethod(tpl, "setScene", SetScene);
Nan::SetPrototypeMethod(tpl, "SetVisible", SetVisible);
Nan::SetPrototypeMethod(tpl, "setVisible", SetVisible);
Nan::SetPrototypeMethod(tpl, "Update", Update);
Nan::SetPrototypeMethod(tpl, "update", Update);
#ifdef VTK_NODE_PLUS_VTKABSTRACTCONTEXTITEMWRAP_INITPTPL
VTK_NODE_PLUS_VTKABSTRACTCONTEXTITEMWRAP_INITPTPL
#endif
ptpl.Reset( tpl );
}
void VtkAbstractContextItemWrap::New(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
if(!info.IsConstructCall())
{
Nan::ThrowError("Constructor not called in a construct call.");
return;
}
if(info.Length() == 0)
{
Nan::ThrowError("Cannot create instance of abstract class.");
return;
}
else
{
if(info[0]->ToObject() != vtkNodeJsNoWrap )
{
Nan::ThrowError("Parameter Error");
return;
}
}
info.GetReturnValue().Set(info.This());
}
void VtkAbstractContextItemWrap::ClearItems(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
native->ClearItems();
}
void VtkAbstractContextItemWrap::GetInteractive(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
bool r;
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->GetInteractive();
info.GetReturnValue().Set(Nan::New(r));
}
void VtkAbstractContextItemWrap::GetParent(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
vtkAbstractContextItem * r;
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->GetParent();
VtkAbstractContextItemWrap::InitPtpl();
v8::Local<v8::Value> argv[1] =
{ Nan::New(vtkNodeJsNoWrap) };
v8::Local<v8::Function> cons =
Nan::New<v8::FunctionTemplate>(VtkAbstractContextItemWrap::ptpl)->GetFunction();
v8::Local<v8::Object> wo = cons->NewInstance(1, argv);
VtkAbstractContextItemWrap *w = new VtkAbstractContextItemWrap();
w->native = r;
w->Wrap(wo);
info.GetReturnValue().Set(wo);
}
void VtkAbstractContextItemWrap::GetScene(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
vtkContextScene * r;
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->GetScene();
VtkContextSceneWrap::InitPtpl();
v8::Local<v8::Value> argv[1] =
{ Nan::New(vtkNodeJsNoWrap) };
v8::Local<v8::Function> cons =
Nan::New<v8::FunctionTemplate>(VtkContextSceneWrap::ptpl)->GetFunction();
v8::Local<v8::Object> wo = cons->NewInstance(1, argv);
VtkContextSceneWrap *w = new VtkContextSceneWrap();
w->native = r;
w->Wrap(wo);
info.GetReturnValue().Set(wo);
}
void VtkAbstractContextItemWrap::GetVisible(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
bool r;
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->GetVisible();
info.GetReturnValue().Set(Nan::New(r));
}
void VtkAbstractContextItemWrap::NewInstance(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
vtkAbstractContextItem * r;
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->NewInstance();
VtkAbstractContextItemWrap::InitPtpl();
v8::Local<v8::Value> argv[1] =
{ Nan::New(vtkNodeJsNoWrap) };
v8::Local<v8::Function> cons =
Nan::New<v8::FunctionTemplate>(VtkAbstractContextItemWrap::ptpl)->GetFunction();
v8::Local<v8::Object> wo = cons->NewInstance(1, argv);
VtkAbstractContextItemWrap *w = new VtkAbstractContextItemWrap();
w->native = r;
w->Wrap(wo);
info.GetReturnValue().Set(wo);
}
void VtkAbstractContextItemWrap::Paint(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsObject() && (Nan::New(VtkContext2DWrap::ptpl))->HasInstance(info[0]))
{
VtkContext2DWrap *a0 = ObjectWrap::Unwrap<VtkContext2DWrap>(info[0]->ToObject());
bool r;
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->Paint(
(vtkContext2D *) a0->native.GetPointer()
);
info.GetReturnValue().Set(Nan::New(r));
return;
}
Nan::ThrowError("Parameter mismatch");
}
void VtkAbstractContextItemWrap::PaintChildren(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsObject() && (Nan::New(VtkContext2DWrap::ptpl))->HasInstance(info[0]))
{
VtkContext2DWrap *a0 = ObjectWrap::Unwrap<VtkContext2DWrap>(info[0]->ToObject());
bool r;
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->PaintChildren(
(vtkContext2D *) a0->native.GetPointer()
);
info.GetReturnValue().Set(Nan::New(r));
return;
}
Nan::ThrowError("Parameter mismatch");
}
void VtkAbstractContextItemWrap::ReleaseGraphicsResources(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
native->ReleaseGraphicsResources();
}
void VtkAbstractContextItemWrap::RemoveItem(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsObject() && (Nan::New(VtkAbstractContextItemWrap::ptpl))->HasInstance(info[0]))
{
VtkAbstractContextItemWrap *a0 = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info[0]->ToObject());
bool r;
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->RemoveItem(
(vtkAbstractContextItem *) a0->native.GetPointer()
);
info.GetReturnValue().Set(Nan::New(r));
return;
}
Nan::ThrowError("Parameter mismatch");
}
void VtkAbstractContextItemWrap::SafeDownCast(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsObject() && (Nan::New(VtkObjectBaseWrap::ptpl))->HasInstance(info[0]))
{
VtkObjectBaseWrap *a0 = ObjectWrap::Unwrap<VtkObjectBaseWrap>(info[0]->ToObject());
vtkAbstractContextItem * r;
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->SafeDownCast(
(vtkObjectBase *) a0->native.GetPointer()
);
VtkAbstractContextItemWrap::InitPtpl();
v8::Local<v8::Value> argv[1] =
{ Nan::New(vtkNodeJsNoWrap) };
v8::Local<v8::Function> cons =
Nan::New<v8::FunctionTemplate>(VtkAbstractContextItemWrap::ptpl)->GetFunction();
v8::Local<v8::Object> wo = cons->NewInstance(1, argv);
VtkAbstractContextItemWrap *w = new VtkAbstractContextItemWrap();
w->native = r;
w->Wrap(wo);
info.GetReturnValue().Set(wo);
return;
}
Nan::ThrowError("Parameter mismatch");
}
void VtkAbstractContextItemWrap::SetInteractive(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsBoolean())
{
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
native->SetInteractive(
info[0]->BooleanValue()
);
return;
}
Nan::ThrowError("Parameter mismatch");
}
void VtkAbstractContextItemWrap::SetParent(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsObject() && (Nan::New(VtkAbstractContextItemWrap::ptpl))->HasInstance(info[0]))
{
VtkAbstractContextItemWrap *a0 = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info[0]->ToObject());
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
native->SetParent(
(vtkAbstractContextItem *) a0->native.GetPointer()
);
return;
}
Nan::ThrowError("Parameter mismatch");
}
void VtkAbstractContextItemWrap::SetScene(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsObject() && (Nan::New(VtkContextSceneWrap::ptpl))->HasInstance(info[0]))
{
VtkContextSceneWrap *a0 = ObjectWrap::Unwrap<VtkContextSceneWrap>(info[0]->ToObject());
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
native->SetScene(
(vtkContextScene *) a0->native.GetPointer()
);
return;
}
Nan::ThrowError("Parameter mismatch");
}
void VtkAbstractContextItemWrap::SetVisible(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsBoolean())
{
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
native->SetVisible(
info[0]->BooleanValue()
);
return;
}
Nan::ThrowError("Parameter mismatch");
}
void VtkAbstractContextItemWrap::Update(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkAbstractContextItemWrap *wrapper = ObjectWrap::Unwrap<VtkAbstractContextItemWrap>(info.Holder());
vtkAbstractContextItem *native = (vtkAbstractContextItem *)wrapper->native.GetPointer();
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
native->Update();
}
|
Java
|
<!DOCTYPE html>
<html>
<head>
<title>PKIjs Mocha Test - OCSP Request Complex Example</title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link rel="stylesheet" href="https://cdn.rawgit.com/mochajs/mocha/2.2.5/mocha.css" />
</head>
<body>
<div id="mocha"></div>
<script src="https://cdn.rawgit.com/jquery/jquery/2.1.4/dist/jquery.min.js"></script>
<script src="https://cdn.rawgit.com/mochajs/mocha/2.2.5/mocha.js"></script>
<script src="https://cdn.rawgit.com/chaijs/chai/4.0.0-canary.1/chai.js"></script>
<script>mocha.setup('bdd'); window.assert = chai.assert;</script>
<script type="text/javascript" src="ocspRequestComplexExample.js"></script>
<script>
mocha.checkLeaks();
mocha.globals(['jQuery']);
mocha.run();
</script>
</body>
</html>
|
Java
|
package org.chasen.mecab.wrapper;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.junit.Test;
public class NodeIteratorTest {
@Test
public void threads() throws InterruptedException {
List<Thread> threads = new ArrayList<Thread>();
threads.add(new Thread(){
public void run(){
Tagger t = Tagger.create("-r /opt/local/etc/mecabrc");
for(MecabNode<Node, Path> node: t.iterator("本日は晴天なり")){
System.out.println(node.getSurface());
}
}
});
threads.add(new Thread(){
public void run(){
Tagger t = Tagger.create("-r /opt/local/etc/mecabrc");
for(MecabNode<Node, Path> node: t.iterator("本日は雨です")){
System.out.println(node.getSurface());
}
}
});
threads.add(new Thread(){
public void run(){
Tagger t = Tagger.create("-r /opt/local/etc/mecabrc");
for(MecabNode<Node, Path> node: t.iterator("昨日は曇りでした")){
System.out.println(node.getSurface());
}
}
});
for(Thread th: threads){
th.start();
}
for(Thread th: threads){
th.join();
}
}
@Test
public void executors() throws InterruptedException, ExecutionException {
class Hoge {
public void parse(String str){
Tagger t = Tagger.create("-r /opt/local/etc/mecabrc");
for(MecabNode<Node, Path> node: t.iterator(str)){
System.out.println(node.getSurface());
}
}
}
final Hoge hoge = new Hoge();
ExecutorService executors = Executors.newCachedThreadPool();
List<Future<?>> futures = new ArrayList<Future<?>>();
futures.add(executors.submit(new Callable<Void>(){
public Void call() throws Exception {
hoge.parse("本日は晴天なり");
return null;
}
}));
futures.add(executors.submit(new Callable<Void>(){
public Void call() throws Exception {
hoge.parse("本日は雨です");
return null;
}
}));
futures.add(executors.submit(new Callable<Void>(){
public Void call() throws Exception {
hoge.parse("昨日は曇りでした");
return null;
}
}));
for(Future<?> f: futures){
f.get();
}
}
@Test
public void executors_runnable() throws InterruptedException, ExecutionException {
class Hoge implements Runnable {
String str;
Hoge(String str){
this.str = str;
}
public void run(){
Tagger t = Tagger.create("-r /opt/local/etc/mecabrc");
for(MecabNode<Node, Path> node: t.iterator(str)){
System.out.println(node.getSurface());
}
}
}
ExecutorService executors = Executors.newCachedThreadPool();
List<Future<?>> futures = new ArrayList<Future<?>>();
futures.add(executors.submit(new Hoge("本日は晴天なり")));
futures.add(executors.submit(new Hoge("本日は雨です")));
futures.add(executors.submit(new Hoge("昨日は曇りでした")));
for(Future<?> f: futures){
f.get();
}
}
}
|
Java
|
<html>
<body>
<p>Registration was requested {{ email }}. Open the link below to confirm e-mail address and continue registration.</p>
<a href="{{ url }}">{{ url }}</a>
</body>
</html>
|
Java
|
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <iostream>
#include <string>
#include "parser.h"
#include "CLI/wrapper.h"
#include "Libraries/linenoise.h"
#include "CLI/interface.h"
#define HIST_FILENAME ".polyBobHistory"
int main(int argc, char **argv)
{
char* line;
unsigned int promptNb = 1;
char promptMsg[100];
srand(time(NULL));
printLogo();
/* Set the completion callback. This will be called every time the
* user uses the <tab> key. */
linenoiseSetCompletionCallback(completion);
/* Load history from file.*/
linenoiseHistoryLoad(HIST_FILENAME); /* Load the history at startup */
snprintf(promptMsg, 100, "%s[%d]: ", "\033[0m", promptNb);
while((line = linenoise(promptMsg)) != NULL)
{
linenoiseHistoryAdd(line); /* Add to the history. */
linenoiseHistorySave(HIST_FILENAME); /* Save the history on disk. */
/* Do something with the string. */
rmSuperscript(line);
if(line[0] == '/')
parseCommand(&(line[1]));
else if(!strcmp(line, "exit") || !strcmp(line, "quit") || (line[1] == 0 && (line[0] == 'e' || line[0] == 'q')))
break;
else if(line[0] != '\0')
{
simpleParserAPI(line);
}
snprintf(promptMsg, 100, "[%d]: ", ++promptNb);
}
finalProcessing();
return 0;
}
|
Java
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.10"/>
<title>BuildmLearn Store: org.buildmlearn.appstore.activities.HomeActivity Class Reference</title>
<link href="../../tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="../../jquery.js"></script>
<script type="text/javascript" src="../../dynsections.js"></script>
<link href="../../navtree.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="../../resize.js"></script>
<script type="text/javascript" src="../../navtreedata.js"></script>
<script type="text/javascript" src="../../navtree.js"></script>
<script type="text/javascript">
$(document).ready(initResizable);
$(window).load(resizeHeight);
</script>
<link href="../../search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="../../search/searchdata.js"></script>
<script type="text/javascript" src="../../search/search.js"></script>
<script type="text/javascript">
$(document).ready(function() { init_search(); });
</script>
<link href="../../doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td id="projectlogo"><img alt="Logo" src="../../ic_launcher.png"/></td>
<td id="projectalign" style="padding-left: 0.5em;">
<div id="projectname">BuildmLearn Store
 <span id="projectnumber">1.0.0.0</span>
</div>
<div id="projectbrief">An android app, which is a store for apps built using BuildmLearn ToolKit</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.10 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "../../search",false,'Search');
</script>
<div id="navrow1" class="tabs">
<ul class="tablist">
<li><a href="../../index.html"><span>Main Page</span></a></li>
<li><a href="../../namespaces.html"><span>Packages</span></a></li>
<li class="current"><a href="../../annotated.html"><span>Classes</span></a></li>
<li><a href="../../files.html"><span>Files</span></a></li>
<li>
<div id="MSearchBox" class="MSearchBoxInactive">
<span class="left">
<img id="MSearchSelect" src="../../search/mag_sel.png"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
alt=""/>
<input type="text" id="MSearchField" value="Search" accesskey="S"
onfocus="searchBox.OnSearchFieldFocus(true)"
onblur="searchBox.OnSearchFieldFocus(false)"
onkeyup="searchBox.OnSearchFieldChange(event)"/>
</span><span class="right">
<a id="MSearchClose" href="javascript:searchBox.CloseResultsWindow()"><img id="MSearchCloseImg" border="0" src="../../search/close.png" alt=""/></a>
</span>
</div>
</li>
</ul>
</div>
<div id="navrow2" class="tabs2">
<ul class="tablist">
<li><a href="../../annotated.html"><span>Class List</span></a></li>
<li><a href="../../classes.html"><span>Class Index</span></a></li>
<li><a href="../../inherits.html"><span>Class Hierarchy</span></a></li>
<li><a href="../../functions.html"><span>Class Members</span></a></li>
</ul>
</div>
</div><!-- top -->
<div id="side-nav" class="ui-resizable side-nav-resizable">
<div id="nav-tree">
<div id="nav-tree-contents">
<div id="nav-sync" class="sync"></div>
</div>
</div>
<div id="splitbar" style="-moz-user-select:none;"
class="ui-resizable-handle">
</div>
</div>
<script type="text/javascript">
$(document).ready(function(){initNavTree('d8/dfa/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity.html','../../');});
</script>
<div id="doc-content">
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
onkeydown="return searchBox.OnSearchSelectKey(event)">
</div>
<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0"
name="MSearchResults" id="MSearchResults">
</iframe>
</div>
<div class="header">
<div class="summary">
<a href="#pub-methods">Public Member Functions</a> |
<a href="#pub-static-methods">Static Public Member Functions</a> |
<a href="#pro-methods">Protected Member Functions</a> |
<a href="#pri-attribs">Private Attributes</a> |
<a href="#pri-static-attribs">Static Private Attributes</a> |
<a href="../../d9/dbd/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity-members.html">List of all members</a> </div>
<div class="headertitle">
<div class="title">org.buildmlearn.appstore.activities.HomeActivity Class Reference</div> </div>
</div><!--header-->
<div class="contents">
<p>This class is the Home Page, which has a viewpager to display tabs for Store section and My-Apps section.
<a href="../../d8/dfa/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity.html#details">More...</a></p>
<div id="dynsection-0" onclick="return toggleVisibility(this)" class="dynheader closed" style="cursor:pointer;">
<img id="dynsection-0-trigger" src="../../closed.png" alt="+"/> Inheritance diagram for org.buildmlearn.appstore.activities.HomeActivity:</div>
<div id="dynsection-0-summary" class="dynsummary" style="display:block;">
</div>
<div id="dynsection-0-content" class="dyncontent" style="display:none;">
<div class="center"><img src="../../da/d6b/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity__inherit__graph.png" border="0" usemap="#org_8buildmlearn_8appstore_8activities_8_home_activity_inherit__map" alt="Inheritance graph"/></div>
<map name="org_8buildmlearn_8appstore_8activities_8_home_activity_inherit__map" id="org_8buildmlearn_8appstore_8activities_8_home_activity_inherit__map">
<area shape="rect" id="node2" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html" title="This class is the parent class to which almost all the activities extends to. " alt="" coords="5,5,228,361"/>
</map>
</div>
<div id="dynsection-1" onclick="return toggleVisibility(this)" class="dynheader closed" style="cursor:pointer;">
<img id="dynsection-1-trigger" src="../../closed.png" alt="+"/> Collaboration diagram for org.buildmlearn.appstore.activities.HomeActivity:</div>
<div id="dynsection-1-summary" class="dynsummary" style="display:block;">
</div>
<div id="dynsection-1-content" class="dyncontent" style="display:none;">
<div class="center"><img src="../../de/d49/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity__coll__graph.png" border="0" usemap="#org_8buildmlearn_8appstore_8activities_8_home_activity_coll__map" alt="Collaboration graph"/></div>
<map name="org_8buildmlearn_8appstore_8activities_8_home_activity_coll__map" id="org_8buildmlearn_8appstore_8activities_8_home_activity_coll__map">
<area shape="rect" id="node2" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html" title="This class is the parent class to which almost all the activities extends to. " alt="" coords="5,5,228,361"/>
</map>
</div>
<table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pub-methods"></a>
Public Member Functions</h2></td></tr>
<tr class="memitem:a7c7ce22b50eb6f7d92223be23f285191"><td class="memItemLeft" align="right" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d8/dfa/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity.html#a7c7ce22b50eb6f7d92223be23f285191">onBackPressed</a> ()</td></tr>
<tr class="memdesc:a7c7ce22b50eb6f7d92223be23f285191"><td class="mdescLeft"> </td><td class="mdescRight">This method is automatically called when the user presses the back button on his mobile. <a href="#a7c7ce22b50eb6f7d92223be23f285191">More...</a><br /></td></tr>
<tr class="separator:a7c7ce22b50eb6f7d92223be23f285191"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="inherit_header pub_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td colspan="2" onclick="javascript:toggleInherit('pub_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity')"><img src="../../closed.png" alt="-"/> Public Member Functions inherited from <a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html">org.buildmlearn.appstore.activities.NavigationActivity</a></td></tr>
<tr class="memitem:a0d03e67c759108fd119b9e5ad2e48014 inherit pub_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">boolean </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#a0d03e67c759108fd119b9e5ad2e48014">onCreateOptionsMenu</a> (Menu menu)</td></tr>
<tr class="memdesc:a0d03e67c759108fd119b9e5ad2e48014 inherit pub_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="mdescLeft"> </td><td class="mdescRight">This method creates menu items to be shown on the Action Bar. <a href="#a0d03e67c759108fd119b9e5ad2e48014">More...</a><br /></td></tr>
<tr class="separator:a0d03e67c759108fd119b9e5ad2e48014 inherit pub_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pub-static-methods"></a>
Static Public Member Functions</h2></td></tr>
<tr class="memitem:a104177f795c32e9b28838760f994dc5f"><td class="memItemLeft" align="right" valign="top">static void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d8/dfa/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity.html#a104177f795c32e9b28838760f994dc5f">MyAppsView</a> ()</td></tr>
<tr class="memdesc:a104177f795c32e9b28838760f994dc5f"><td class="mdescLeft"> </td><td class="mdescRight">Set the current view to My-Apps section. <a href="#a104177f795c32e9b28838760f994dc5f">More...</a><br /></td></tr>
<tr class="separator:a104177f795c32e9b28838760f994dc5f"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="inherit_header pub_static_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td colspan="2" onclick="javascript:toggleInherit('pub_static_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity')"><img src="../../closed.png" alt="-"/> Static Public Member Functions inherited from <a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html">org.buildmlearn.appstore.activities.NavigationActivity</a></td></tr>
<tr class="memitem:a3819b15bd7eeb6b579ffc4ae12bb289b inherit pub_static_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">static void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#a3819b15bd7eeb6b579ffc4ae12bb289b">clearSearch</a> ()</td></tr>
<tr class="memdesc:a3819b15bd7eeb6b579ffc4ae12bb289b inherit pub_static_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="mdescLeft"> </td><td class="mdescRight">It clears the text in the search tool and collapses the search tool. <a href="#a3819b15bd7eeb6b579ffc4ae12bb289b">More...</a><br /></td></tr>
<tr class="separator:a3819b15bd7eeb6b579ffc4ae12bb289b inherit pub_static_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pro-methods"></a>
Protected Member Functions</h2></td></tr>
<tr class="memitem:a1a3c72988991108cb3f2f70b345a1a0d"><td class="memItemLeft" align="right" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d8/dfa/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity.html#a1a3c72988991108cb3f2f70b345a1a0d">onCreate</a> (Bundle savedInstanceState)</td></tr>
<tr class="memdesc:a1a3c72988991108cb3f2f70b345a1a0d"><td class="mdescLeft"> </td><td class="mdescRight">The method is executed first when the activity is created. <a href="#a1a3c72988991108cb3f2f70b345a1a0d">More...</a><br /></td></tr>
<tr class="separator:a1a3c72988991108cb3f2f70b345a1a0d"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="inherit_header pro_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td colspan="2" onclick="javascript:toggleInherit('pro_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity')"><img src="../../closed.png" alt="-"/> Protected Member Functions inherited from <a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html">org.buildmlearn.appstore.activities.NavigationActivity</a></td></tr>
<tr class="memitem:ac91b6839b81f831ca5accce5fb956f16 inherit pro_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">void </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#ac91b6839b81f831ca5accce5fb956f16">onCreate</a> (Bundle savedInstanceState)</td></tr>
<tr class="memdesc:ac91b6839b81f831ca5accce5fb956f16 inherit pro_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="mdescLeft"> </td><td class="mdescRight">The method is executed first when the activity is created. <a href="#ac91b6839b81f831ca5accce5fb956f16">More...</a><br /></td></tr>
<tr class="separator:ac91b6839b81f831ca5accce5fb956f16 inherit pro_methods_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pri-attribs"></a>
Private Attributes</h2></td></tr>
<tr class="memitem:a53f19cef5fb3efb3ee0a9f85888e669e"><td class="memItemLeft" align="right" valign="top">final CharSequence[] </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d8/dfa/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity.html#a53f19cef5fb3efb3ee0a9f85888e669e">TITLES</a> ={"Store","My <a class="el" href="../../d7/d09/classorg_1_1buildmlearn_1_1appstore_1_1models_1_1_apps.html">Apps</a>"}</td></tr>
<tr class="separator:a53f19cef5fb3efb3ee0a9f85888e669e"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:aef6edc2f21ae7abe6c149f93db9a2ffa"><td class="memItemLeft" align="right" valign="top">MaterialDialog </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d8/dfa/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity.html#aef6edc2f21ae7abe6c149f93db9a2ffa">mAlertDialog</a></td></tr>
<tr class="separator:aef6edc2f21ae7abe6c149f93db9a2ffa"><td class="memSeparator" colspan="2"> </td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="pri-static-attribs"></a>
Static Private Attributes</h2></td></tr>
<tr class="memitem:ab762b13301ac55e4a33fbd31ec3a77da"><td class="memItemLeft" align="right" valign="top">static ViewPager </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d8/dfa/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity.html#ab762b13301ac55e4a33fbd31ec3a77da">mPager</a></td></tr>
<tr class="separator:ab762b13301ac55e4a33fbd31ec3a77da"><td class="memSeparator" colspan="2"> </td></tr>
</table><table class="memberdecls">
<tr class="heading"><td colspan="2"><h2 class="groupheader"><a name="inherited"></a>
Additional Inherited Members</h2></td></tr>
<tr class="inherit_header pub_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td colspan="2" onclick="javascript:toggleInherit('pub_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity')"><img src="../../closed.png" alt="-"/> Static Public Attributes inherited from <a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html">org.buildmlearn.appstore.activities.NavigationActivity</a></td></tr>
<tr class="memitem:aa63ef9aa194cb5b06d2a8b22d32b03fc inherit pub_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">static FrameLayout </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#aa63ef9aa194cb5b06d2a8b22d32b03fc">frameLayout</a></td></tr>
<tr class="separator:aa63ef9aa194cb5b06d2a8b22d32b03fc inherit pub_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a92e76cddf17afa5981f148028c476ee7 inherit pub_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">static final List< <a class="el" href="../../d7/d09/classorg_1_1buildmlearn_1_1appstore_1_1models_1_1_apps.html">Apps</a> > </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#a92e76cddf17afa5981f148028c476ee7">appList</a> = new ArrayList<>()</td></tr>
<tr class="separator:a92e76cddf17afa5981f148028c476ee7 inherit pub_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="inherit_header pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td colspan="2" onclick="javascript:toggleInherit('pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity')"><img src="../../closed.png" alt="-"/> Static Package Attributes inherited from <a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html">org.buildmlearn.appstore.activities.NavigationActivity</a></td></tr>
<tr class="memitem:a1b782826a8dd79a6b9a28232af4d4e1f inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">static DrawerLayout </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#a1b782826a8dd79a6b9a28232af4d4e1f">mDrawer</a></td></tr>
<tr class="separator:a1b782826a8dd79a6b9a28232af4d4e1f inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a8cd9a3fefc0d4e9b943c0ca4ef055f5d inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">static int </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#a8cd9a3fefc0d4e9b943c0ca4ef055f5d">mActive</a> =1</td></tr>
<tr class="separator:a8cd9a3fefc0d4e9b943c0ca4ef055f5d inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a37acfc524065531d4601db7f69291c63 inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">static String </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#a37acfc524065531d4601db7f69291c63">searchQuery</a> =""</td></tr>
<tr class="separator:a37acfc524065531d4601db7f69291c63 inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a5a00a6fabe2745f6e93c4c795a74b558 inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">static int </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#a5a00a6fabe2745f6e93c4c795a74b558">mActiveSearchInterface</a> =0</td></tr>
<tr class="separator:a5a00a6fabe2745f6e93c4c795a74b558 inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a3016ec45af2d5da1524d1341d0ac4c94 inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">static NavigationView </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#a3016ec45af2d5da1524d1341d0ac4c94">navigationView</a></td></tr>
<tr class="separator:a3016ec45af2d5da1524d1341d0ac4c94 inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
<tr class="memitem:a6491b85f1ab4a70a783da29fc081956a inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memItemLeft" align="right" valign="top">static boolean </td><td class="memItemRight" valign="bottom"><a class="el" href="../../d0/d60/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity.html#a6491b85f1ab4a70a783da29fc081956a">isDrawerOpened</a> =false</td></tr>
<tr class="separator:a6491b85f1ab4a70a783da29fc081956a inherit pac_static_attribs_classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_navigation_activity"><td class="memSeparator" colspan="2"> </td></tr>
</table>
<a name="details" id="details"></a><h2 class="groupheader">Detailed Description</h2>
<div class="textblock"><p>This class is the Home Page, which has a viewpager to display tabs for Store section and My-Apps section. </p>
</div><h2 class="groupheader">Member Function Documentation</h2>
<a class="anchor" id="a104177f795c32e9b28838760f994dc5f"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">static void org.buildmlearn.appstore.activities.HomeActivity.MyAppsView </td>
<td>(</td>
<td class="paramname"></td><td>)</td>
<td></td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">static</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
<p>Set the current view to My-Apps section. </p>
<p>This method is helpful when the user selects to open the app from My-Apps section in the Settings Page. </p>
</div>
</div>
<a class="anchor" id="a7c7ce22b50eb6f7d92223be23f285191"></a>
<div class="memitem">
<div class="memproto">
<table class="memname">
<tr>
<td class="memname">void org.buildmlearn.appstore.activities.HomeActivity.onBackPressed </td>
<td>(</td>
<td class="paramname"></td><td>)</td>
<td></td>
</tr>
</table>
</div><div class="memdoc">
<p>This method is automatically called when the user presses the back button on his mobile. </p>
<p>It closes the Navigation Drawer if its open. Otherwise, it displays a popup to close the app. </p>
</div>
</div>
<a class="anchor" id="a1a3c72988991108cb3f2f70b345a1a0d"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">void org.buildmlearn.appstore.activities.HomeActivity.onCreate </td>
<td>(</td>
<td class="paramtype">Bundle </td>
<td class="paramname"><em>savedInstanceState</em></td><td>)</td>
<td></td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">protected</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
<p>The method is executed first when the activity is created. </p>
<dl class="params"><dt>Parameters</dt><dd>
<table class="params">
<tr><td class="paramname">savedInstanceState</td><td>The bundle stores all the related parameters, if it has to be used when resuming the app. </td></tr>
</table>
</dd>
</dl>
<p>When the page selection is changed, search view should reset. The app list on the My_Apps section is also refreshed, just in case the user has installed any app form the Store section. </p><dl class="params"><dt>Parameters</dt><dd>
<table class="params">
<tr><td class="paramname">position</td><td>0:Store Section; 1: My-Apps Section</td></tr>
</table>
</dd>
</dl>
</div>
</div>
<h2 class="groupheader">Member Data Documentation</h2>
<a class="anchor" id="aef6edc2f21ae7abe6c149f93db9a2ffa"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">MaterialDialog org.buildmlearn.appstore.activities.HomeActivity.mAlertDialog</td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">private</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
</div>
</div>
<a class="anchor" id="ab762b13301ac55e4a33fbd31ec3a77da"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">ViewPager org.buildmlearn.appstore.activities.HomeActivity.mPager</td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">static</span><span class="mlabel">private</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
</div>
</div>
<a class="anchor" id="a53f19cef5fb3efb3ee0a9f85888e669e"></a>
<div class="memitem">
<div class="memproto">
<table class="mlabels">
<tr>
<td class="mlabels-left">
<table class="memname">
<tr>
<td class="memname">final CharSequence [] org.buildmlearn.appstore.activities.HomeActivity.TITLES ={"Store","My <a class="el" href="../../d7/d09/classorg_1_1buildmlearn_1_1appstore_1_1models_1_1_apps.html">Apps</a>"}</td>
</tr>
</table>
</td>
<td class="mlabels-right">
<span class="mlabels"><span class="mlabel">private</span></span> </td>
</tr>
</table>
</div><div class="memdoc">
</div>
</div>
<hr/>The documentation for this class was generated from the following file:<ul>
<li>C:/Users/Srujan/Documents/GitHub/BuildmLearn-Store/Android/source-code/AppStore/app/src/main/java/org/buildmlearn/appstore/activities/<a class="el" href="../../d1/d8f/_home_activity_8java.html">HomeActivity.java</a></li>
</ul>
</div><!-- contents -->
</div><!-- doc-content -->
<!-- start footer part -->
<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
<ul>
<li class="navelem"><a class="el" href="../../db/d96/namespaceorg.html">org</a></li><li class="navelem"><a class="el" href="../../d7/d90/namespaceorg_1_1buildmlearn.html">buildmlearn</a></li><li class="navelem"><a class="el" href="../../d8/dcf/namespaceorg_1_1buildmlearn_1_1appstore.html">appstore</a></li><li class="navelem"><a class="el" href="../../d8/dbc/namespaceorg_1_1buildmlearn_1_1appstore_1_1activities.html">activities</a></li><li class="navelem"><a class="el" href="../../d8/dfa/classorg_1_1buildmlearn_1_1appstore_1_1activities_1_1_home_activity.html">HomeActivity</a></li>
<li class="footer">Generated on Sat Aug 15 2015 21:55:11 for BuildmLearn Store by
<a href="http://www.doxygen.org/index.html">
<img class="footer" src="../../doxygen.png" alt="doxygen"/></a> 1.8.10 </li>
</ul>
</div>
</body>
</html>
|
Java
|
/* this file has been autogenerated by vtkNodeJsWrap */
/* editing this might proof futile */
#define VTK_WRAPPING_CXX
#define VTK_STREAMS_FWD_ONLY
#include <nan.h>
#include "vtkCollectionWrap.h"
#include "vtkRenderWindowCollectionWrap.h"
#include "vtkObjectBaseWrap.h"
#include "vtkRenderWindowWrap.h"
#include "../../plus/plus.h"
using namespace v8;
extern Nan::Persistent<v8::Object> vtkNodeJsNoWrap;
Nan::Persistent<v8::FunctionTemplate> VtkRenderWindowCollectionWrap::ptpl;
VtkRenderWindowCollectionWrap::VtkRenderWindowCollectionWrap()
{ }
VtkRenderWindowCollectionWrap::VtkRenderWindowCollectionWrap(vtkSmartPointer<vtkRenderWindowCollection> _native)
{ native = _native; }
VtkRenderWindowCollectionWrap::~VtkRenderWindowCollectionWrap()
{ }
void VtkRenderWindowCollectionWrap::Init(v8::Local<v8::Object> exports)
{
Nan::SetAccessor(exports, Nan::New("vtkRenderWindowCollection").ToLocalChecked(), ConstructorGetter);
Nan::SetAccessor(exports, Nan::New("RenderWindowCollection").ToLocalChecked(), ConstructorGetter);
}
void VtkRenderWindowCollectionWrap::ConstructorGetter(
v8::Local<v8::String> property,
const Nan::PropertyCallbackInfo<v8::Value>& info)
{
InitPtpl();
info.GetReturnValue().Set(Nan::New(ptpl)->GetFunction());
}
void VtkRenderWindowCollectionWrap::InitPtpl()
{
if (!ptpl.IsEmpty()) return;
v8::Local<v8::FunctionTemplate> tpl = Nan::New<v8::FunctionTemplate>(New);
VtkCollectionWrap::InitPtpl( );
tpl->Inherit(Nan::New<FunctionTemplate>(VtkCollectionWrap::ptpl));
tpl->SetClassName(Nan::New("VtkRenderWindowCollectionWrap").ToLocalChecked());
tpl->InstanceTemplate()->SetInternalFieldCount(1);
Nan::SetPrototypeMethod(tpl, "AddItem", AddItem);
Nan::SetPrototypeMethod(tpl, "addItem", AddItem);
Nan::SetPrototypeMethod(tpl, "GetNextItem", GetNextItem);
Nan::SetPrototypeMethod(tpl, "getNextItem", GetNextItem);
Nan::SetPrototypeMethod(tpl, "NewInstance", NewInstance);
Nan::SetPrototypeMethod(tpl, "newInstance", NewInstance);
Nan::SetPrototypeMethod(tpl, "SafeDownCast", SafeDownCast);
Nan::SetPrototypeMethod(tpl, "safeDownCast", SafeDownCast);
#ifdef VTK_NODE_PLUS_VTKRENDERWINDOWCOLLECTIONWRAP_INITPTPL
VTK_NODE_PLUS_VTKRENDERWINDOWCOLLECTIONWRAP_INITPTPL
#endif
ptpl.Reset( tpl );
}
void VtkRenderWindowCollectionWrap::New(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
if(!info.IsConstructCall())
{
Nan::ThrowError("Constructor not called in a construct call.");
return;
}
if(info.Length() == 0)
{
vtkSmartPointer<vtkRenderWindowCollection> native = vtkSmartPointer<vtkRenderWindowCollection>::New();
VtkRenderWindowCollectionWrap* obj = new VtkRenderWindowCollectionWrap(native);
obj->Wrap(info.This());
}
else
{
if(info[0]->ToObject() != vtkNodeJsNoWrap )
{
Nan::ThrowError("Parameter Error");
return;
}
}
info.GetReturnValue().Set(info.This());
}
void VtkRenderWindowCollectionWrap::AddItem(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkRenderWindowCollectionWrap *wrapper = ObjectWrap::Unwrap<VtkRenderWindowCollectionWrap>(info.Holder());
vtkRenderWindowCollection *native = (vtkRenderWindowCollection *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsObject() && (Nan::New(VtkRenderWindowWrap::ptpl))->HasInstance(info[0]))
{
VtkRenderWindowWrap *a0 = ObjectWrap::Unwrap<VtkRenderWindowWrap>(info[0]->ToObject());
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
native->AddItem(
(vtkRenderWindow *) a0->native.GetPointer()
);
return;
}
Nan::ThrowError("Parameter mismatch");
}
void VtkRenderWindowCollectionWrap::GetNextItem(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkRenderWindowCollectionWrap *wrapper = ObjectWrap::Unwrap<VtkRenderWindowCollectionWrap>(info.Holder());
vtkRenderWindowCollection *native = (vtkRenderWindowCollection *)wrapper->native.GetPointer();
vtkRenderWindow * r;
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->GetNextItem();
VtkRenderWindowWrap::InitPtpl();
v8::Local<v8::Value> argv[1] =
{ Nan::New(vtkNodeJsNoWrap) };
v8::Local<v8::Function> cons =
Nan::New<v8::FunctionTemplate>(VtkRenderWindowWrap::ptpl)->GetFunction();
v8::Local<v8::Object> wo = cons->NewInstance(1, argv);
VtkRenderWindowWrap *w = new VtkRenderWindowWrap();
w->native = r;
w->Wrap(wo);
info.GetReturnValue().Set(wo);
}
void VtkRenderWindowCollectionWrap::NewInstance(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkRenderWindowCollectionWrap *wrapper = ObjectWrap::Unwrap<VtkRenderWindowCollectionWrap>(info.Holder());
vtkRenderWindowCollection *native = (vtkRenderWindowCollection *)wrapper->native.GetPointer();
vtkRenderWindowCollection * r;
if(info.Length() != 0)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->NewInstance();
VtkRenderWindowCollectionWrap::InitPtpl();
v8::Local<v8::Value> argv[1] =
{ Nan::New(vtkNodeJsNoWrap) };
v8::Local<v8::Function> cons =
Nan::New<v8::FunctionTemplate>(VtkRenderWindowCollectionWrap::ptpl)->GetFunction();
v8::Local<v8::Object> wo = cons->NewInstance(1, argv);
VtkRenderWindowCollectionWrap *w = new VtkRenderWindowCollectionWrap();
w->native = r;
w->Wrap(wo);
info.GetReturnValue().Set(wo);
}
void VtkRenderWindowCollectionWrap::SafeDownCast(const Nan::FunctionCallbackInfo<v8::Value>& info)
{
VtkRenderWindowCollectionWrap *wrapper = ObjectWrap::Unwrap<VtkRenderWindowCollectionWrap>(info.Holder());
vtkRenderWindowCollection *native = (vtkRenderWindowCollection *)wrapper->native.GetPointer();
if(info.Length() > 0 && info[0]->IsObject() && (Nan::New(VtkObjectBaseWrap::ptpl))->HasInstance(info[0]))
{
VtkObjectBaseWrap *a0 = ObjectWrap::Unwrap<VtkObjectBaseWrap>(info[0]->ToObject());
vtkRenderWindowCollection * r;
if(info.Length() != 1)
{
Nan::ThrowError("Too many parameters.");
return;
}
r = native->SafeDownCast(
(vtkObjectBase *) a0->native.GetPointer()
);
VtkRenderWindowCollectionWrap::InitPtpl();
v8::Local<v8::Value> argv[1] =
{ Nan::New(vtkNodeJsNoWrap) };
v8::Local<v8::Function> cons =
Nan::New<v8::FunctionTemplate>(VtkRenderWindowCollectionWrap::ptpl)->GetFunction();
v8::Local<v8::Object> wo = cons->NewInstance(1, argv);
VtkRenderWindowCollectionWrap *w = new VtkRenderWindowCollectionWrap();
w->native = r;
w->Wrap(wo);
info.GetReturnValue().Set(wo);
return;
}
Nan::ThrowError("Parameter mismatch");
}
|
Java
|
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from collections import namedtuple
from blinkpy.common.net.results_fetcher import TestResultsFetcher
BuilderStep = namedtuple('BuilderStep', ['build', 'step_name'])
# TODO(qyearsley): To be consistent with other fake ("mock") classes, this
# could be changed so it's not a subclass of TestResultsFetcher.
class MockTestResultsFetcher(TestResultsFetcher):
def __init__(self):
super(MockTestResultsFetcher, self).__init__()
self._canned_results = {}
self._canned_retry_summary_json = {}
self._webdriver_results = {}
self.fetched_builds = []
self.fetched_webdriver_builds = []
self._layout_test_step_name = 'blink_web_tests (with patch)'
def set_results(self, build, results, step_name=None):
step_name = step_name or self.get_layout_test_step_name(build)
step = BuilderStep(build=build, step_name=step_name)
self._canned_results[step] = results
def fetch_results(self, build, full=False, step_name=None):
step_name = step_name or self.get_layout_test_step_name(build)
step = BuilderStep(build=build, step_name=step_name)
self.fetched_builds.append(step)
return self._canned_results.get(step)
def set_results_to_resultdb(self, build, results):
self._canned_results[build.build_id] = results
def fetch_results_from_resultdb(self, host, builds, predicate):
rv = []
for build in builds:
results = self._canned_results.get(build.build_id)
if results:
rv.extend(results)
return rv
def set_webdriver_test_results(self, build, m, results):
self._webdriver_results[(build, m)] = results
def fetch_webdriver_test_results(self, build, m):
self.fetched_webdriver_builds.append((build, m))
return self._webdriver_results.get((build, m))
def set_retry_sumary_json(self, build, content):
self._canned_retry_summary_json[build] = content
def fetch_retry_summary_json(self, build):
return self._canned_retry_summary_json.get(build)
def set_layout_test_step_name(self, name):
self._layout_test_step_name = name
def get_layout_test_step_name(self, build):
return self._layout_test_step_name
|
Java
|
# -*- coding: utf-8 -*-
import access
import util
@auth.requires_login()
def index():
"""Produces a list of the feedback obtained for a given venue,
or for all venues."""
venue_id = request.args(0)
if venue_id == 'all':
q = (db.submission.user == get_user_email())
else:
q = ((db.submission.user == get_user_email())
& (db.submission.venue_id == venue_id))
db.submission.id.represent = lambda x, r: A(T('View'), _class='btn', _href=URL('submission', 'view_own_submission', args=['v', r.id]))
db.submission.id.label = T('Submission')
db.submission.id.readable = True
db.submission.venue_id.readable = True
grid = SQLFORM.grid(q,
fields=[db.submission.id, db.submission.venue_id,
db.submission.date_created, db.submission.date_updated, ],
csv=False, details=False, create=False, editable=False, deletable=False,
args=request.args[:1],
maxtextlength=24,
)
return dict(grid=grid)
@auth.requires_login()
def view_feedback():
"""Shows detailed feedback for a user in a venue.
This controller accepts various types of arguments:
* 's', submission_id
* 'u', venue_id, username
* 'v', venue_id (in which case, shows own submission to that venue)
"""
if len(request.args) == 0:
redirect(URL('default', 'index'))
if request.args(0) == 's':
# submission_id
n_args = 2
subm = db.submission(request.args(1)) or redirect(URL('default', 'index'))
c = db.venue(subm.venue_id) or redirect(URL('default', 'index'))
username = subm.user
elif request.args(0) == 'v':
# venue_id
n_args = 2
c = db.venue(request.args(1)) or redirect(URL('default', 'index'))
username = get_user_email()
subm = db((db.submission.user == username) & (db.submission.venue_id == c.id)).select().first()
else:
# venue_id, username
n_args = 3
c = db.venue(request.args(1)) or redirect(URL('default', 'index'))
username = request.args(2) or redirect(URL('default', 'index'))
subm = db((db.submission.user == username) & (db.submission.venue_id == c.id)).select().first()
# Checks permissions.
props = db(db.user_properties.user == get_user_email()).select().first()
if props == None:
session.flash = T('Not authorized.')
redirect(URL('default', 'index'))
is_author = (username == get_user_email())
can_view_feedback = access.can_view_feedback(c, props) or is_author
if (not can_view_feedback):
session.flash = T('Not authorized.')
redirect(URL('default', 'index'))
if not (access.can_view_feedback(c, props) or datetime.utcnow() > c.rate_close_date):
session.flash = T('The ratings are not yet available.')
redirect(URL('feedback', 'index', args=['all']))
# Produces the link to edit the feedback.
edit_feedback_link = None
if subm is not None and access.can_observe(c, props):
edit_feedback_link = A(T('Edit feedback'), _class='btn',
_href=URL('submission', 'edit_feedback', args=[subm.id]))
# Produces the download link.
download_link = None
if subm is not None and c.allow_file_upload and subm.content is not None:
if is_author:
download_link = A(T('Download'), _class='btn',
_href=URL('submission', 'download_author', args=[subm.id, subm.content]))
else:
download_link = A(T('Download'), _class='btn',
_href=URL('submission', 'download_manager', args=[subm.id, subm.content]))
venue_link = A(c.name, _href=URL('venues', 'view_venue', args=[c.id]))
# Submission link.
subm_link = None
if subm is not None and c.allow_link_submission:
subm_link = A(subm.link, _href=subm.link)
# Submission content and feedback.
subm_comment = None
subm_feedback = None
if subm is not None:
raw_subm_comment = keystore_read(subm.comment)
if raw_subm_comment is not None and len(raw_subm_comment) > 0:
subm_comment = MARKMIN(keystore_read(subm.comment))
raw_feedback = keystore_read(subm.feedback)
if raw_feedback is not None and len(raw_feedback) > 0:
subm_feedback = MARKMIN(raw_feedback)
# Display settings.
db.submission.percentile.readable = True
db.submission.comment.readable = True
db.submission.feedback.readable = True
if access.can_observe(c, props):
db.submission.quality.readable = True
db.submission.error.readable = True
# Reads the grade information.
submission_grade = submission_percentile = None
review_grade = review_percentile = user_reputation = None
final_grade = final_percentile = None
assigned_grade = None
if c.grades_released:
grade_info = db((db.grades.user == username) & (db.grades.venue_id == c.id)).select().first()
if grade_info is not None:
submission_grade = represent_quality(grade_info.submission_grade, None)
submission_percentile = represent_percentage(grade_info.submission_percentile, None)
review_grade = represent_quality_10(grade_info.accuracy, None)
review_percentile = represent_percentage(grade_info.accuracy_percentile, None)
user_reputation = represent_01_as_percentage(grade_info.reputation, None)
final_grade = represent_quality(grade_info.grade, None)
final_percentile = represent_percentage(grade_info.percentile, None)
assigned_grade = represent_quality(grade_info.assigned_grade, None)
# Makes a grid of comments.
db.task.submission_name.readable = False
db.task.assigned_date.readable = False
db.task.completed_date.readable = False
db.task.rejected.readable = True
db.task.helpfulness.readable = db.task.helpfulness.writable = True
# Prevent editing the comments; the only thing editable should be the "is bogus" field.
db.task.comments.writable = False
db.task.comments.readable = True
ranking_link = None
if access.can_observe(c, props):
db.task.user.readable = True
db.task.completed_date.readable = True
links = [
dict(header=T('Review details'), body= lambda r:
A(T('View'), _class='btn', _href=URL('ranking', 'view_comparison', args=[r.id]))),
]
details = False
if subm is not None:
ranking_link = A(T('details'), _href=URL('ranking', 'view_comparisons_given_submission', args=[subm.id]))
reviews_link = A(T('details'), _href=URL('ranking', 'view_comparisons_given_user', args=[username, c.id]))
db.task.user.represent = lambda v, r: A(v, _href=URL('ranking', 'view_comparisons_given_user',
args=[v, c.id], user_signature=True))
else:
user_reputation = None
links = [
dict(header=T('Review feedback'), body = lambda r:
A(T('Give feedback'), _class='btn',
_href=URL('feedback', 'reply_to_review', args=[r.id], user_signature=True))),
]
details = False
ranking_link = None
reviews_link = None
if subm is not None:
q = ((db.task.submission_id == subm.id) & (db.task.is_completed == True))
# q = (db.task.submission_id == subm.id)
else:
q = (db.task.id == -1)
grid = SQLFORM.grid(q,
fields=[db.task.id, db.task.user, db.task.rejected, db.task.comments, db.task.helpfulness, ],
details = details,
csv=False, create=False, editable=False, deletable=False, searchable=False,
links=links,
args=request.args[:n_args],
maxtextlength=24,
)
return dict(subm=subm, download_link=download_link, subm_link=subm_link, username=username,
subm_comment=subm_comment, subm_feedback=subm_feedback,
edit_feedback_link=edit_feedback_link,
is_admin=is_user_admin(),
submission_grade=submission_grade, submission_percentile=submission_percentile,
review_grade=review_grade, review_percentile=review_percentile,
user_reputation=user_reputation,
final_grade=final_grade, final_percentile=final_percentile,
assigned_grade=assigned_grade,
venue_link=venue_link, grid=grid, ranking_link=ranking_link,
reviews_link=reviews_link)
@auth.requires_signature()
def reply_to_review():
t = db.task(request.args(0)) or redirect(URL('default', 'index'))
db.task.submission_name.readable = False
db.task.assigned_date.readable = False
db.task.completed_date.readable = False
db.task.comments.readable = False
db.task.helpfulness.readable = db.task.helpfulness.writable = True
db.task.feedback.readable = db.task.feedback.writable = True
form = SQLFORM(db.task, record=t)
form.vars.feedback = keystore_read(t.feedback)
if form.process(onvalidation=validate_review_feedback(t)).accepted:
session.flash = T('Updated.')
redirect(URL('feedback', 'view_feedback', args=['s', t.submission_id]))
link_to_submission = A(T('View submission'), _href=URL('submission', 'view_own_submission', args=['v', t.submission_id]))
review_comments = MARKMIN(keystore_read(t.comments))
return dict(form=form, link_to_submission=link_to_submission, review_comments=review_comments)
def validate_review_feedback(t):
def f(form):
if not form.errors:
feedback_id = keystore_update(t.feedback, form.vars.feedback)
form.vars.feedback = feedback_id
return f
@auth.requires_login()
def view_my_reviews():
"""This controller displays the reviews a user has written for a venue, along with
the feedback they received."""
c = db.venue(request.args(0)) or redirect(URL('rating', 'review_index'))
link_to_venue = A(c.name, _href=URL('venues', 'view_venue', args=[c.id]))
link_to_eval = A(T('My evaluation in this venue'), _class='btn',
_href=URL('feedback', 'view_feedback', args=['v', c.id]))
q = ((db.task.user == get_user_email()) & (db.task.venue_id == c.id))
db.task.rejected.readable = True
db.task.helpfulness.readable = True
db.task.comments.readable = True
db.task.feedback.readable = True
# To prevent chopping
db.task.submission_name.represent = represent_text_field
grid = SQLFORM.grid(q,
fields=[db.task.submission_name, db.task.rejected, db.task.helpfulness],
details=True,
editable=False, deletable=False, create=False, searchable=False,
csv=False,
args=request.args[:1],
maxtextlength=24,
)
return dict(grid=grid, link_to_venue=link_to_venue, link_to_eval=link_to_eval)
|
Java
|
<?php
use yii\helpers\Html;
use yii\grid\GridView;
/* @var $this yii\web\View */
/* @var $searchModel app\models\search\UserSearch */
/* @var $dataProvider yii\data\ActiveDataProvider */
$this->title = 'Users';
$this->params['breadcrumbs'][] = $this->title;
?>
<div class="user-index">
<h1><?= Html::encode($this->title) ?></h1>
<?php // echo $this->render('_search', ['model' => $searchModel]); ?>
<p>
<?= Html::a('Create User', ['create'], ['class' => 'btn btn-success']) ?>
</p>
<?= GridView::widget([
'dataProvider' => $dataProvider,
'filterModel' => $searchModel,
'columns' => [
['class' => 'yii\grid\SerialColumn'],
'id',
'username',
'password',
'fullname',
'is_seller',
// 'lat',
// 'lng',
// 'category_id',
// 'description:ntext',
['class' => 'yii\grid\ActionColumn'],
],
]); ?>
</div>
|
Java
|
//------------------------------------------------------------------------------
// GB_AxB: hard-coded C=A*B and C<M>=A*B
//------------------------------------------------------------------------------
// SuiteSparse:GraphBLAS, Timothy A. Davis, (c) 2017-2018, All Rights Reserved.
// http://suitesparse.com See GraphBLAS/Doc/License.txt for license.
// If this filename has a double underscore in its name ("__") then it has been
// automatically constructed from Generator/GB_AxB.c, via the Source/axb*.m
// scripts, and should not be editted. Edit the original source file instead.
//------------------------------------------------------------------------------
#include "GB.h"
#ifndef GBCOMPACT
#include "GB_heap.h"
#include "GB_AxB__semirings.h"
// The C=A*B semiring is defined by the following types and operators:
// A*B function (Gustavon): GB_AgusB__times_isgt_int32
// A'*B function (dot): GB_AdotB__times_isgt_int32
// A*B function (heap): GB_AheapB__times_isgt_int32
// Z type: int32_t (the type of C)
// X type: int32_t (the type of x for z=mult(x,y))
// Y type: int32_t (the type of y for z=mult(x,y))
// handle flipxy: 0 (0 if mult(x,y) is commutative, 1 otherwise)
// Identity: 1 (where cij *= identity does not change cij)
// Multiply: z = x > y
// Add: cij *= z
#define GB_XTYPE \
int32_t
#define GB_YTYPE \
int32_t
#define GB_HANDLE_FLIPXY \
0
#define GB_MULTOP(z,x,y) \
z = x > y
//------------------------------------------------------------------------------
// C<M>=A*B and C=A*B: gather/scatter saxpy-based method (Gustavson)
//------------------------------------------------------------------------------
#define GB_IDENTITY \
1
// x [i] = y
#define GB_COPY_SCALAR_TO_ARRAY(x,i,y,s) \
x [i] = y ;
// x = y [i]
#define GB_COPY_ARRAY_TO_SCALAR(x,y,i,s) \
GB_btype x = y [i] ;
// x [i] = y [i]
#define GB_COPY_ARRAY_TO_ARRAY(x,i,y,j,s) \
x [i] = y [j] ;
// mult-add operation (no mask)
#define GB_MULTADD_NOMASK \
{ \
/* Sauna_Work [i] += A(i,k) * B(k,j) */ \
GB_atype aik = Ax [pA] ; \
int32_t t ; \
GB_MULTIPLY (t, aik, bkj) ; \
Sauna_Work [i] *= t ; \
}
// mult-add operation (with mask)
#define GB_MULTADD_WITH_MASK \
{ \
/* Sauna_Work [i] += A(i,k) * B(k,j) */ \
GB_atype aik = Ax [pA] ; \
int32_t t ; \
GB_MULTIPLY (t, aik, bkj) ; \
if (mark == hiwater) \
{ \
/* first time C(i,j) seen */ \
Sauna_Mark [i] = hiwater + 1 ; \
Sauna_Work [i] = t ; \
} \
else \
{ \
/* C(i,j) seen before, update it */ \
Sauna_Work [i] *= t ; \
} \
}
GrB_Info GB_AgusB__times_isgt_int32
(
GrB_Matrix C,
const GrB_Matrix M,
const GrB_Matrix A,
const GrB_Matrix B,
bool flipxy, // if true, A and B have been swapped
GB_Sauna Sauna, // sparse accumulator
GB_Context Context
)
{
int32_t *restrict Sauna_Work = Sauna->Sauna_Work ; // size C->vlen*zsize
int32_t *restrict Cx = C->x ;
GrB_Info info = GrB_SUCCESS ;
#include "GB_AxB_Gustavson_flipxy.c"
return (info) ;
}
//------------------------------------------------------------------------------
// C<M>=A'*B or C=A'*B: dot product
//------------------------------------------------------------------------------
// get A(k,i)
#define GB_DOT_GETA(pA) \
GB_atype aki = Ax [pA] ;
// get B(k,j)
#define GB_DOT_GETB(pB) \
GB_btype bkj = Bx [pB] ;
// t = aki*bkj
#define GB_DOT_MULT(bkj) \
int32_t t ; \
GB_MULTIPLY (t, aki, bkj) ;
// cij += t
#define GB_DOT_ADD \
cij *= t ;
// cij = t
#define GB_DOT_COPY \
cij = t ;
// cij is not a pointer but a scalar; nothing to do
#define GB_DOT_REACQUIRE ;
// clear cij
#define GB_DOT_CLEAR \
cij = 1 ;
// save the value of C(i,j)
#define GB_DOT_SAVE \
Cx [cnz] = cij ;
#define GB_DOT_WORK_TYPE \
GB_btype
#define GB_DOT_WORK(k) Work [k]
// Work [k] = Bx [pB]
#define GB_DOT_SCATTER \
Work [k] = Bx [pB] ;
GrB_Info GB_AdotB__times_isgt_int32
(
GrB_Matrix *Chandle,
const GrB_Matrix M,
const GrB_Matrix A,
const GrB_Matrix B,
bool flipxy, // if true, A and B have been swapped
GB_Context Context
)
{
GrB_Matrix C = (*Chandle) ;
int32_t *restrict Cx = C->x ;
int32_t cij ;
GrB_Info info = GrB_SUCCESS ;
size_t bkj_size = B->type->size ; // no typecasting here
#include "GB_AxB_dot_flipxy.c"
return (info) ;
}
//------------------------------------------------------------------------------
// C<M>=A*B and C=A*B: heap saxpy-based method
//------------------------------------------------------------------------------
#define GB_CIJ_GETB(pB) \
GB_btype bkj = Bx [pB] ;
// C(i,j) = A(i,k) * bkj
#define GB_CIJ_MULT(pA) \
{ \
GB_atype aik = Ax [pA] ; \
GB_MULTIPLY (cij, aik, bkj) ; \
}
// C(i,j) += A(i,k) * B(k,j)
#define GB_CIJ_MULTADD(pA,pB) \
{ \
GB_atype aik = Ax [pA] ; \
GB_btype bkj = Bx [pB] ; \
int32_t t ; \
GB_MULTIPLY (t, aik, bkj) ; \
cij *= t ; \
}
// cij is not a pointer but a scalar; nothing to do
#define GB_CIJ_REACQUIRE ;
// cij = identity
#define GB_CIJ_CLEAR \
cij = 1 ;
// save the value of C(i,j)
#define GB_CIJ_SAVE \
Cx [cnz] = cij ;
GrB_Info GB_AheapB__times_isgt_int32
(
GrB_Matrix *Chandle,
const GrB_Matrix M,
const GrB_Matrix A,
const GrB_Matrix B,
bool flipxy, // if true, A and B have been swapped
int64_t *restrict List,
GB_pointer_pair *restrict pA_pair,
GB_Element *restrict Heap,
const int64_t bjnz_max,
GB_Context Context
)
{
GrB_Matrix C = (*Chandle) ;
int32_t *restrict Cx = C->x ;
int32_t cij ;
int64_t cvlen = C->vlen ;
GrB_Info info = GrB_SUCCESS ;
GB_CIJ_CLEAR ;
#include "GB_AxB_heap_flipxy.c"
return (info) ;
}
//------------------------------------------------------------------------------
// clear macro definitions
//------------------------------------------------------------------------------
#undef GB_XTYPE
#undef GB_YTYPE
#undef GB_HANDLE_FLIPXY
#undef GB_MULTOP
#undef GB_IDENTITY
#undef GB_COPY_SCALAR_TO_ARRAY
#undef GB_COPY_ARRAY_TO_SCALAR
#undef GB_COPY_ARRAY_TO_ARRAY
#undef GB_MULTADD_NOMASK
#undef GB_MULTADD_WITH_MASK
#undef GB_DOT_GETA
#undef GB_DOT_GETB
#undef GB_DOT_MULT
#undef GB_DOT_ADD
#undef GB_DOT_COPY
#undef GB_DOT_REACQUIRE
#undef GB_DOT_CLEAR
#undef GB_DOT_SAVE
#undef GB_DOT_WORK_TYPE
#undef GB_DOT_WORK
#undef GB_DOT_SCATTER
#undef GB_CIJ_GETB
#undef GB_CIJ_MULT
#undef GB_CIJ_MULTADD
#undef GB_CIJ_REACQUIRE
#undef GB_CIJ_CLEAR
#undef GB_CIJ_SAVE
#undef GB_MULTIPLY
#endif
|
Java
|
from __future__ import print_function
import shutil
import os, sys
import time
import logging
from .loaders import PythonLoader, YAMLLoader
from .bundle import get_all_bundle_files
from .exceptions import BuildError
from .updater import TimestampUpdater
from .merge import MemoryHunk
from .version import get_manifest
from .cache import FilesystemCache
from .utils import set, StringIO
__all__ = ('CommandError', 'CommandLineEnvironment', 'main')
# logging has WARNING as default level, for the CLI we want INFO. Set this
# as early as possible, so that user customizations will not be overwritten.
logging.getLogger('webassets.script').setLevel(logging.INFO)
class CommandError(Exception):
pass
class Command(object):
"""Base-class for a command used by :class:`CommandLineEnvironment`.
Each command being a class opens up certain possibilities with respect to
subclassing and customizing the default CLI.
"""
def __init__(self, cmd_env):
self.cmd = cmd_env
def __getattr__(self, name):
# Make stuff from cmd environment easier to access
return getattr(self.cmd, name)
def __call__(self, *args, **kwargs):
raise NotImplementedError()
class BuildCommand(Command):
def __call__(self, bundles=None, output=None, directory=None, no_cache=None,
manifest=None, production=None):
"""Build assets.
``bundles``
A list of bundle names. If given, only this list of bundles
should be built.
``output``
List of (bundle, filename) 2-tuples. If given, only these
bundles will be built, using the custom output filenames.
Cannot be used with ``bundles``.
``directory``
Custom output directory to use for the bundles. The original
basenames defined in the bundle ``output`` attribute will be
used. If the ``output`` of the bundles are pointing to different
directories, they will be offset by their common prefix.
Cannot be used with ``output``.
``no_cache``
If set, a cache (if one is configured) will not be used.
``manifest``
If set, the given manifest instance will be used, instead of
any that might have been configured in the Environment. The value
passed will be resolved through ``get_manifest()``. If this fails,
a file-based manifest will be used using the given value as the
filename.
``production``
If set to ``True``, then :attr:`Environment.debug`` will forcibly
be disabled (set to ``False``) during the build.
"""
# Validate arguments
if bundles and output:
raise CommandError(
'When specifying explicit output filenames you must '
'do so for all bundles you want to build.')
if directory and output:
raise CommandError('A custom output directory cannot be '
'combined with explicit output filenames '
'for individual bundles.')
if production:
# TODO: Reset again (refactor commands to be classes)
self.environment.debug = False
# TODO: Oh how nice it would be to use the future options stack.
if manifest is not None:
try:
manifest = get_manifest(manifest, env=self.environment)
except ValueError:
manifest = get_manifest(
# abspath() is important, or this will be considered
# relative to Environment.directory.
"file:%s" % os.path.abspath(manifest),
env=self.environment)
self.environment.manifest = manifest
# Use output as a dict.
if output:
output = dict(output)
# Validate bundle names
bundle_names = bundles if bundles else (output.keys() if output else [])
for name in bundle_names:
if not name in self.environment:
raise CommandError(
'I do not know a bundle name named "%s".' % name)
# Make a list of bundles to build, and the filename to write to.
if bundle_names:
# TODO: It's not ok to use an internal property here.
bundles = [(n,b) for n, b in self.environment._named_bundles.items()
if n in bundle_names]
else:
# Includes unnamed bundles as well.
bundles = [(None, b) for b in self.environment]
# Determine common prefix for use with ``directory`` option.
if directory:
prefix = os.path.commonprefix(
[os.path.normpath(b.resolve_output())
for _, b in bundles if b.output])
# dirname() gives the right value for a single file.
prefix = os.path.dirname(prefix)
to_build = []
for name, bundle in bundles:
# TODO: We really should support this. This error here
# is just in place of a less understandable error that would
# otherwise occur.
if bundle.is_container and directory:
raise CommandError(
'A custom output directory cannot currently be '
'used with container bundles.')
# Determine which filename to use, if not the default.
overwrite_filename = None
if output:
overwrite_filename = output[name]
elif directory:
offset = os.path.normpath(
bundle.resolve_output())[len(prefix)+1:]
overwrite_filename = os.path.join(directory, offset)
to_build.append((bundle, overwrite_filename, name,))
# Build.
built = []
for bundle, overwrite_filename, name in to_build:
if name:
# A name is not necessary available of the bundle was
# registered without one.
self.log.info("Building bundle: %s (to %s)" % (
name, overwrite_filename or bundle.output))
else:
self.log.info("Building bundle: %s" % bundle.output)
try:
if not overwrite_filename:
with bundle.bind(self.environment):
bundle.build(force=True, disable_cache=no_cache)
else:
# TODO: Rethink how we deal with container bundles here.
# As it currently stands, we write all child bundles
# to the target output, merged (which is also why we
# create and force writing to a StringIO instead of just
# using the ``Hunk`` objects that build() would return
# anyway.
output = StringIO()
with bundle.bind(self.environment):
bundle.build(force=True, output=output,
disable_cache=no_cache)
if directory:
# Only auto-create directories in this mode.
output_dir = os.path.dirname(overwrite_filename)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
MemoryHunk(output.getvalue()).save(overwrite_filename)
built.append(bundle)
except BuildError as e:
self.log.error("Failed, error was: %s" % e)
if len(built):
self.event_handlers['post_build']()
if len(built) != len(to_build):
return 2
class WatchCommand(Command):
def __call__(self, loop=None):
"""Watch assets for changes.
``loop``
A callback, taking no arguments, to be called once every loop
iteration. Can be useful to integrate the command with other code.
If not specified, the loop wil call ``time.sleep()``.
"""
# TODO: This should probably also restart when the code changes.
mtimes = {}
try:
# Before starting to watch for changes, also recognize changes
# made while we did not run, and apply those immediately.
for bundle in self.environment:
print('Bringing up to date: %s' % bundle.output)
bundle.build(force=False)
self.log.info("Watching %d bundles for changes..." %
len(self.environment))
while True:
changed_bundles = self.check_for_changes(mtimes)
built = []
for bundle in changed_bundles:
print("Building bundle: %s ..." % bundle.output, end=' ')
sys.stdout.flush()
try:
bundle.build(force=True)
built.append(bundle)
except BuildError as e:
print("")
print("Failed: %s" % e)
else:
print("done")
if len(built):
self.event_handlers['post_build']()
do_end = loop() if loop else time.sleep(0.1)
if do_end:
break
except KeyboardInterrupt:
pass
def check_for_changes(self, mtimes):
# Do not update original mtimes dict right away, so that we detect
# all bundle changes if a file is in multiple bundles.
_new_mtimes = mtimes.copy()
changed_bundles = set()
# TODO: An optimization was lost here, skipping a bundle once
# a single file has been found to have changed. Bring back.
for filename, bundles_to_update in self.yield_files_to_watch():
stat = os.stat(filename)
mtime = stat.st_mtime
if sys.platform == "win32":
mtime -= stat.st_ctime
if mtimes.get(filename, mtime) != mtime:
if callable(bundles_to_update):
# Hook for when file has changed
try:
bundles_to_update = bundles_to_update()
except EnvironmentError:
# EnvironmentError is what the hooks is allowed to
# raise for a temporary problem, like an invalid config
import traceback
traceback.print_exc()
# Don't update anything, wait for another change
bundles_to_update = set()
if bundles_to_update is True:
# Indicates all bundles should be rebuilt for the change
bundles_to_update = set(self.environment)
changed_bundles |= bundles_to_update
_new_mtimes[filename] = mtime
_new_mtimes[filename] = mtime
mtimes.update(_new_mtimes)
return changed_bundles
def yield_files_to_watch(self):
for bundle in self.environment:
for filename in get_all_bundle_files(bundle):
yield filename, set([bundle])
class CleanCommand(Command):
def __call__(self):
"""Delete generated assets.
"""
self.log.info('Cleaning generated assets...')
for bundle in self.environment:
if not bundle.output:
continue
file_path = bundle.resolve_output(self.environment)
if os.path.exists(file_path):
os.unlink(file_path)
self.log.info("Deleted asset: %s" % bundle.output)
if isinstance(self.environment.cache, FilesystemCache):
shutil.rmtree(self.environment.cache.directory)
class CheckCommand(Command):
def __call__(self):
"""Check to see if assets need to be rebuilt.
A non-zero exit status will be returned if any of the input files are
newer (based on mtime) than their output file. This is intended to be
used in pre-commit hooks.
"""
needsupdate = False
updater = self.environment.updater
if not updater:
self.log.debug('no updater configured, using TimestampUpdater')
updater = TimestampUpdater()
for bundle in self.environment:
self.log.info('Checking asset: %s', bundle.output)
if updater.needs_rebuild(bundle, self.environment):
self.log.info(' needs update')
needsupdate = True
if needsupdate:
sys.exit(-1)
class CommandLineEnvironment(object):
"""Implements the core functionality for a command line frontend to
``webassets``, abstracted in a way to allow frameworks to integrate the
functionality into their own tools, for example, as a Django management
command, or a command for ``Flask-Script``.
"""
def __init__(self, env, log, post_build=None, commands=None):
self.environment = env
self.log = log
self.event_handlers = dict(post_build=lambda: True)
if callable(post_build):
self.event_handlers['post_build'] = post_build
# Instantiate each command
command_def = self.DefaultCommands.copy()
command_def.update(commands or {})
self.commands = {}
for name, construct in command_def.items():
if not construct:
continue
if not isinstance(construct, (list, tuple)):
construct = [construct, (), {}]
self.commands[name] = construct[0](
self, *construct[1], **construct[2])
def __getattr__(self, item):
# Allow method-like access to commands.
if item in self.commands:
return self.commands[item]
raise AttributeError(item)
def invoke(self, command, args):
"""Invoke ``command``, or throw a CommandError.
This is essentially a simple validation mechanism. Feel free
to call the individual command methods manually.
"""
try:
function = self.commands[command]
except KeyError as e:
raise CommandError('unknown command: %s' % e)
else:
return function(**args)
# List of commands installed
DefaultCommands = {
'build': BuildCommand,
'watch': WatchCommand,
'clean': CleanCommand,
'check': CheckCommand
}
class GenericArgparseImplementation(object):
"""Generic command line utility to interact with an webassets environment.
This is effectively a reference implementation of a command line utility
based on the ``CommandLineEnvironment`` class. Implementers may find it
feasible to simple base their own command line utility on this, rather than
implementing something custom on top of ``CommandLineEnvironment``. In
fact, if that is possible, you are encouraged to do so for greater
consistency across implementations.
"""
class WatchCommand(WatchCommand):
"""Extended watch command that also looks at the config file itself."""
def __init__(self, cmd_env, argparse_ns):
WatchCommand.__init__(self, cmd_env)
self.ns = argparse_ns
def yield_files_to_watch(self):
for result in WatchCommand.yield_files_to_watch(self):
yield result
# If the config changes, rebuild all bundles
if getattr(self.ns, 'config', None):
yield self.ns.config, self.reload_config
def reload_config(self):
try:
self.cmd.environment = YAMLLoader(self.ns.config).load_environment()
except Exception as e:
raise EnvironmentError(e)
return True
def __init__(self, env=None, log=None, prog=None, no_global_options=False):
try:
import argparse
except ImportError:
raise RuntimeError(
'The webassets command line now requires the '
'"argparse" library on Python versions <= 2.6.')
else:
self.argparse = argparse
self.env = env
self.log = log
self._construct_parser(prog, no_global_options)
def _construct_parser(self, prog=None, no_global_options=False):
self.parser = parser = self.argparse.ArgumentParser(
description="Manage assets.",
prog=prog)
if not no_global_options:
# Start with the base arguments that are valid for any command.
# XXX: Add those to the subparser?
parser.add_argument("-v", dest="verbose", action="store_true",
help="be verbose")
parser.add_argument("-q", action="store_true", dest="quiet",
help="be quiet")
if self.env is None:
loadenv = parser.add_mutually_exclusive_group()
loadenv.add_argument("-c", "--config", dest="config",
help="read environment from a YAML file")
loadenv.add_argument("-m", "--module", dest="module",
help="read environment from a Python module")
# Add subparsers.
subparsers = parser.add_subparsers(dest='command')
for command in CommandLineEnvironment.DefaultCommands.keys():
command_parser = subparsers.add_parser(command)
maker = getattr(self, 'make_%s_parser' % command, False)
if maker:
maker(command_parser)
@staticmethod
def make_build_parser(parser):
parser.add_argument(
'bundles', nargs='*', metavar='BUNDLE',
help='Optional bundle names to process. If none are '
'specified, then all known bundles will be built.')
parser.add_argument(
'--output', '-o', nargs=2, action='append',
metavar=('BUNDLE', 'FILE'),
help='Build the given bundle, and use a custom output '
'file. Can be given multiple times.')
parser.add_argument(
'--directory', '-d',
help='Write built files to this directory, using the '
'basename defined by the bundle. Will offset '
'the original bundle output paths on their common '
'prefix. Cannot be used with --output.')
parser.add_argument(
'--no-cache', action='store_true',
help='Do not use a cache that might be configured.')
parser.add_argument(
'--manifest',
help='Write a manifest to the given file. Also supports '
'the id:arg format, if you want to use a different '
'manifest implementation.')
parser.add_argument(
'--production', action='store_true',
help='Forcably turn off debug mode for the build. This '
'only has an effect if debug is set to "merge".')
def _setup_logging(self, ns):
if self.log:
log = self.log
else:
log = logging.getLogger('webassets.script')
if not log.handlers:
# In theory, this could run multiple times (e.g. tests)
handler = logging.StreamHandler()
log.addHandler(handler)
# Note that setting the level filter at the handler level is
# better than the logger level, since this is "our" handler,
# we create it, for the purposes of having a default output.
# The logger itself the user may be modifying.
handler.setLevel(logging.DEBUG if ns.verbose else (
logging.WARNING if ns.quiet else logging.INFO))
return log
def _setup_assets_env(self, ns, log):
env = self.env
if env is None:
assert not (ns.module and ns.config)
if ns.module:
env = PythonLoader(ns.module).load_environment()
if ns.config:
env = YAMLLoader(ns.config).load_environment()
return env
def _setup_cmd_env(self, assets_env, log, ns):
return CommandLineEnvironment(assets_env, log, commands={
'watch': (GenericArgparseImplementation.WatchCommand, (ns,), {})
})
def _prepare_command_args(self, ns):
# Prepare a dict of arguments cleaned of values that are not
# command-specific, and which the command method would not accept.
args = vars(ns).copy()
for action in self.parser._actions:
dest = action.dest
if dest in args:
del args[dest]
return args
def run_with_ns(self, ns):
log = self._setup_logging(ns)
env = self._setup_assets_env(ns, log)
if env is None:
raise CommandError(
"Error: No environment given or found. Maybe use -m?")
cmd = self._setup_cmd_env(env, log, ns)
# Run the selected command
args = self._prepare_command_args(ns)
return cmd.invoke(ns.command, args)
def run_with_argv(self, argv):
try:
ns = self.parser.parse_args(argv)
except SystemExit as e:
# We do not want the main() function to exit the program.
# See run() instead.
return e.args[0]
return self.run_with_ns(ns)
def main(self, argv):
"""Parse the given command line.
The commandline is expected to NOT including what would be sys.argv[0].
"""
try:
return self.run_with_argv(argv)
except CommandError as e:
print(e)
return 1
def main(argv, env=None):
"""Execute the generic version of the command line interface.
You only need to work directly with ``GenericArgparseImplementation`` if
you desire to customize things.
If no environment is given, additional arguments will be supported to allow
the user to specify/construct the environment on the command line.
"""
return GenericArgparseImplementation(env).main(argv)
def run():
"""Runs the command line interface via ``main``, then exits the process
with a proper return code."""
sys.exit(main(sys.argv[1:]) or 0)
if __name__ == '__main__':
run()
|
Java
|
/* Copyright (c) 2016, Alexander Entinger / LXRobotics
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of motor-controller-highpower-motorshield nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "ioentity.h"
#include <assert.h>
#include <algorithm>
namespace arduinoio {
/**
* @brief Constructor
* @param
*/
ioentity::ioentity(boost::shared_ptr<serial> const &serial) : m_serial(serial), m_isConfigured(false) {
}
/**
* @brief Destructor
*/
ioentity::~ioentity() {
m_pinVect.clear();
}
} // end of namespace arduinoio
|
Java
|
{-# LANGUAGE FlexibleInstances #-}
-- ghc options
{-# OPTIONS_GHC -Wall #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- {-# OPTIONS_GHC -fno-warn-name-shadowing #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- {-# OPTIONS_GHC -fno-warn-missing-signatures #-}
-- {-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
-- {-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
-- {-# OPTIONS_GHC -fno-warn-incomplete-uni-patterns #-}
-- |
-- Copyright : (c) Andreas Reuleaux 2015
-- License : BSD2
-- Maintainer: Andreas Reuleaux <rx@a-rx.info>
-- Stability : experimental
-- Portability: non-portable
--
-- This module provides pretty printing functionality for Pire's
-- abstract and concrete syntax: names
module Pire.Pretty.Nm where
import Pire.Syntax.Nm
-- import Data.Text as T
import Text.PrettyPrint as TPP
import Pire.Pretty.Common
import Pire.Pretty.Ws()
-- instance Disp s => Disp (Nm_ s) where
-- disp (Nm_ nm (Ws ws)) = do
-- dnm <- disp nm
-- return $ dnm <> (text $ T.unpack ws)
instance Disp s => Disp (Nm1 s) where
disp (Nm1 nm) = disp nm
disp (Nm1_ nm ws) = do
dnm <- disp nm
dws <- disp ws
return $ dnm <> dws
instance Disp s => Disp (Nm s s) where
disp (Nm nm) = disp nm
disp (Nm_ nm ws) = do
dnm <- disp nm
dws <- disp ws
return $ dnm <> dws
|
Java
|
/*
xxHash - Extremely Fast Hash algorithm
Header File
Copyright (C) 2012-2016, Yann Collet.
BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
You can contact the author at :
- xxHash source repository : https://github.com/Cyan4973/xxHash
*/
/* Notice extracted from xxHash homepage :
xxHash is an extremely fast Hash algorithm, running at RAM speed limits.
It also successfully passes all tests from the SMHasher suite.
Comparison (single thread, Windows Seven 32 bits, using SMHasher on a Core 2 Duo
@3GHz)
Name Speed Q.Score Author
xxHash 5.4 GB/s 10
CrapWow 3.2 GB/s 2 Andrew
MumurHash 3a 2.7 GB/s 10 Austin Appleby
SpookyHash 2.0 GB/s 10 Bob Jenkins
SBox 1.4 GB/s 9 Bret Mulvey
Lookup3 1.2 GB/s 9 Bob Jenkins
SuperFastHash 1.2 GB/s 1 Paul Hsieh
CityHash64 1.05 GB/s 10 Pike & Alakuijala
FNV 0.55 GB/s 5 Fowler, Noll, Vo
CRC32 0.43 GB/s 9
MD5-32 0.33 GB/s 10 Ronald L. Rivest
SHA1-32 0.28 GB/s 10
Q.Score is a measure of quality of the hash function.
It depends on successfully passing SMHasher test set.
10 is a perfect score.
A 64-bit version, named XXH64, is available since r35.
It offers much better speed, but for 64-bit applications only.
Name Speed on 64 bits Speed on 32 bits
XXH64 13.8 GB/s 1.9 GB/s
XXH32 6.8 GB/s 6.0 GB/s
*/
#ifndef XXHASH_H_5627135585666179
#define XXHASH_H_5627135585666179 1
#if defined(__cplusplus)
extern "C" {
#endif
/* ****************************
* Definitions
******************************/
#include <stddef.h> /* size_t */
typedef enum { XXH_OK = 0, XXH_ERROR } XXH_errorcode;
/* ****************************
* API modifier
******************************/
/** XXH_INLINE_ALL (and XXH_PRIVATE_API)
* This is useful to include xxhash functions in `static` mode
* in order to inline them, and remove their symbol from the public list.
* Inlining can offer dramatic performance improvement on small keys.
* Methodology :
* #define XXH_INLINE_ALL
* #include "xxhash.h"
* `xxhash.c` is automatically included.
* It's not useful to compile and link it as a separate module.
*/
#if defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API)
#ifndef XXH_STATIC_LINKING_ONLY
#define XXH_STATIC_LINKING_ONLY
#endif
#if defined(__GNUC__)
#define XXH_PUBLIC_API static __inline __attribute__((unused))
#elif defined(__cplusplus) || \
(defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */)
#define XXH_PUBLIC_API static inline
#elif defined(_MSC_VER)
#define XXH_PUBLIC_API static __inline
#else
/* this version may generate warnings for unused static functions */
#define XXH_PUBLIC_API static
#endif
#else
#define XXH_PUBLIC_API /* do nothing */
#endif /* XXH_INLINE_ALL || XXH_PRIVATE_API */
/*! XXH_NAMESPACE, aka Namespace Emulation :
*
* If you want to include _and expose_ xxHash functions from within your own
* library,
* but also want to avoid symbol collisions with other libraries which may also
* include xxHash,
*
* you can use XXH_NAMESPACE, to automatically prefix any public symbol from
* xxhash library
* with the value of XXH_NAMESPACE (therefore, avoid NULL and numeric values).
*
* Note that no change is required within the calling program as long as it
* includes `xxhash.h` :
* regular symbol name will be automatically translated by this header.
*/
#ifdef XXH_NAMESPACE
#define XXH_CAT(A, B) A##B
#define XXH_NAME2(A, B) XXH_CAT(A, B)
#define XXH_versionNumber XXH_NAME2(XXH_NAMESPACE, XXH_versionNumber)
#define XXH32 XXH_NAME2(XXH_NAMESPACE, XXH32)
#define XXH32_createState XXH_NAME2(XXH_NAMESPACE, XXH32_createState)
#define XXH32_freeState XXH_NAME2(XXH_NAMESPACE, XXH32_freeState)
#define XXH32_reset XXH_NAME2(XXH_NAMESPACE, XXH32_reset)
#define XXH32_update XXH_NAME2(XXH_NAMESPACE, XXH32_update)
#define XXH32_digest XXH_NAME2(XXH_NAMESPACE, XXH32_digest)
#define XXH32_copyState XXH_NAME2(XXH_NAMESPACE, XXH32_copyState)
#define XXH32_canonicalFromHash \
XXH_NAME2(XXH_NAMESPACE, XXH32_canonicalFromHash)
#define XXH32_hashFromCanonical \
XXH_NAME2(XXH_NAMESPACE, XXH32_hashFromCanonical)
#define XXH64 XXH_NAME2(XXH_NAMESPACE, XXH64)
#define XXH64_createState XXH_NAME2(XXH_NAMESPACE, XXH64_createState)
#define XXH64_freeState XXH_NAME2(XXH_NAMESPACE, XXH64_freeState)
#define XXH64_reset XXH_NAME2(XXH_NAMESPACE, XXH64_reset)
#define XXH64_update XXH_NAME2(XXH_NAMESPACE, XXH64_update)
#define XXH64_digest XXH_NAME2(XXH_NAMESPACE, XXH64_digest)
#define XXH64_copyState XXH_NAME2(XXH_NAMESPACE, XXH64_copyState)
#define XXH64_canonicalFromHash \
XXH_NAME2(XXH_NAMESPACE, XXH64_canonicalFromHash)
#define XXH64_hashFromCanonical \
XXH_NAME2(XXH_NAMESPACE, XXH64_hashFromCanonical)
#endif
/* *************************************
* Version
***************************************/
#define XXH_VERSION_MAJOR 0
#define XXH_VERSION_MINOR 6
#define XXH_VERSION_RELEASE 5
#define XXH_VERSION_NUMBER \
(XXH_VERSION_MAJOR * 100 * 100 + XXH_VERSION_MINOR * 100 + \
XXH_VERSION_RELEASE)
XXH_PUBLIC_API unsigned XXH_versionNumber(void);
/*-**********************************************************************
* 32-bit hash
************************************************************************/
typedef unsigned int XXH32_hash_t;
/*! XXH32() :
Calculate the 32-bit hash of sequence "length" bytes stored at memory
address "input".
The memory between input & input+length must be valid (allocated and
read-accessible).
"seed" can be used to alter the result predictably.
Speed on Core 2 Duo @ 3 GHz (single thread, SMHasher benchmark) : 5.4 GB/s
*/
XXH_PUBLIC_API XXH32_hash_t XXH32(const void *input, size_t length,
unsigned int seed);
/*====== Streaming ======*/
typedef struct XXH32_state_s XXH32_state_t; /* incomplete type */
XXH_PUBLIC_API XXH32_state_t *XXH32_createState(void);
XXH_PUBLIC_API XXH_errorcode XXH32_freeState(XXH32_state_t *statePtr);
XXH_PUBLIC_API void XXH32_copyState(XXH32_state_t *dst_state,
const XXH32_state_t *src_state);
XXH_PUBLIC_API XXH_errorcode XXH32_reset(XXH32_state_t *statePtr,
unsigned int seed);
XXH_PUBLIC_API XXH_errorcode XXH32_update(XXH32_state_t *statePtr,
const void *input, size_t length);
XXH_PUBLIC_API XXH32_hash_t XXH32_digest(const XXH32_state_t *statePtr);
/*
* Streaming functions generate the xxHash of an input provided in multiple
* segments.
* Note that, for small input, they are slower than single-call functions, due
* to state management.
* For small inputs, prefer `XXH32()` and `XXH64()`, which are better optimized.
*
* XXH state must first be allocated, using XXH*_createState() .
*
* Start a new hash by initializing state with a seed, using XXH*_reset().
*
* Then, feed the hash state by calling XXH*_update() as many times as
* necessary.
* The function returns an error code, with 0 meaning OK, and any other value
* meaning there is an error.
*
* Finally, a hash value can be produced anytime, by using XXH*_digest().
* This function returns the nn-bits hash as an int or long long.
*
* It's still possible to continue inserting input into the hash state after a
* digest,
* and generate some new hashes later on, by calling again XXH*_digest().
*
* When done, free XXH state space if it was allocated dynamically.
*/
/*====== Canonical representation ======*/
typedef struct {
unsigned char digest[4];
} XXH32_canonical_t;
XXH_PUBLIC_API void XXH32_canonicalFromHash(XXH32_canonical_t *dst,
XXH32_hash_t hash);
XXH_PUBLIC_API XXH32_hash_t
XXH32_hashFromCanonical(const XXH32_canonical_t *src);
/* Default result type for XXH functions are primitive unsigned 32 and 64 bits.
* The canonical representation uses human-readable write convention, aka
* big-endian (large digits first).
* These functions allow transformation of hash result into and from its
* canonical format.
* This way, hash values can be written into a file / memory, and remain
* comparable on different systems and programs.
*/
#ifndef XXH_NO_LONG_LONG
/*-**********************************************************************
* 64-bit hash
************************************************************************/
typedef unsigned long long XXH64_hash_t;
/*! XXH64() :
Calculate the 64-bit hash of sequence of length "len" stored at memory
address "input".
"seed" can be used to alter the result predictably.
This function runs faster on 64-bit systems, but slower on 32-bit systems
(see benchmark).
*/
XXH_PUBLIC_API XXH64_hash_t XXH64(const void *input, size_t length,
unsigned long long seed);
/*====== Streaming ======*/
typedef struct XXH64_state_s XXH64_state_t; /* incomplete type */
XXH_PUBLIC_API XXH64_state_t *XXH64_createState(void);
XXH_PUBLIC_API XXH_errorcode XXH64_freeState(XXH64_state_t *statePtr);
XXH_PUBLIC_API void XXH64_copyState(XXH64_state_t *dst_state,
const XXH64_state_t *src_state);
XXH_PUBLIC_API XXH_errorcode XXH64_reset(XXH64_state_t *statePtr,
unsigned long long seed);
XXH_PUBLIC_API XXH_errorcode XXH64_update(XXH64_state_t *statePtr,
const void *input, size_t length);
XXH_PUBLIC_API XXH64_hash_t XXH64_digest(const XXH64_state_t *statePtr);
/*====== Canonical representation ======*/
typedef struct {
unsigned char digest[8];
} XXH64_canonical_t;
XXH_PUBLIC_API void XXH64_canonicalFromHash(XXH64_canonical_t *dst,
XXH64_hash_t hash);
XXH_PUBLIC_API XXH64_hash_t
XXH64_hashFromCanonical(const XXH64_canonical_t *src);
#endif /* XXH_NO_LONG_LONG */
#ifdef XXH_STATIC_LINKING_ONLY
/* ================================================================================================
This section contains declarations which are not guaranteed to remain stable.
They may change in future versions, becoming incompatible with a different
version of the library.
These declarations should only be used with static linking.
Never use them in association with dynamic linking !
===================================================================================================
*/
/* These definitions are only present to allow
* static allocation of XXH state, on stack or in a struct for example.
* Never **ever** use members directly. */
#if !defined(__VMS) && \
(defined(__cplusplus) || \
(defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */))
#include <stdint.h>
struct XXH32_state_s {
uint32_t total_len_32;
uint32_t large_len;
uint32_t v1;
uint32_t v2;
uint32_t v3;
uint32_t v4;
uint32_t mem32[4];
uint32_t memsize;
uint32_t
reserved; /* never read nor write, might be removed in a future version */
}; /* typedef'd to XXH32_state_t */
struct XXH64_state_s {
uint64_t total_len;
uint64_t v1;
uint64_t v2;
uint64_t v3;
uint64_t v4;
uint64_t mem64[4];
uint32_t memsize;
uint32_t reserved[2]; /* never read nor write, might be removed in a future
version */
}; /* typedef'd to XXH64_state_t */
#else
struct XXH32_state_s {
unsigned total_len_32;
unsigned large_len;
unsigned v1;
unsigned v2;
unsigned v3;
unsigned v4;
unsigned mem32[4];
unsigned memsize;
unsigned
reserved; /* never read nor write, might be removed in a future version */
}; /* typedef'd to XXH32_state_t */
#ifndef XXH_NO_LONG_LONG /* remove 64-bit support */
struct XXH64_state_s {
unsigned long long total_len;
unsigned long long v1;
unsigned long long v2;
unsigned long long v3;
unsigned long long v4;
unsigned long long mem64[4];
unsigned memsize;
unsigned reserved[2]; /* never read nor write, might be removed in a future
version */
}; /* typedef'd to XXH64_state_t */
#endif
#endif
#if defined(XXH_INLINE_ALL) || defined(XXH_PRIVATE_API)
#include "xxhash.c" /* include xxhash function bodies as `static`, for inlining */
#endif
#endif /* XXH_STATIC_LINKING_ONLY */
#if defined(__cplusplus)
}
#endif
#endif /* XXHASH_H_5627135585666179 */
|
Java
|
/*
* Copyright (c) 2016, The OpenThread Authors.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holder nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
/**
* @file alarm.c
* Platform abstraction for the alarm
*/
#include <openthread/platform/alarm-milli.h>
#include "platform-da15000.h"
#include "hw_timer0.h"
static bool sIsRunning = false;
static uint32_t sAlarm = 0;
static uint32_t sCounter;
volatile bool sAlarmFired = false;
static void timer0_interrupt_cb(void)
{
sCounter++;
}
void da15000AlarmProcess(otInstance *aInstance)
{
if ((sIsRunning) && (sAlarm <= sCounter))
{
sIsRunning = false;
otPlatAlarmMilliFired(aInstance);
}
}
void da15000AlarmInit(void)
{
hw_timer0_init(NULL);
hw_timer0_set_clock_source(HW_TIMER0_CLK_SRC_FAST);
hw_timer0_set_pwm_mode(HW_TIMER0_MODE_PWM);
hw_timer0_set_fast_clock_div(HW_TIMER0_FAST_CLK_DIV_4);
hw_timer0_set_t0_reload(0x07D0, 0x07D0);
hw_timer0_register_int(timer0_interrupt_cb);
hw_timer0_set_on_clock_div(false);
}
uint32_t otPlatAlarmMilliGetNow(void)
{
return sCounter;
}
void otPlatAlarmMilliStartAt(otInstance *aInstance, uint32_t t0, uint32_t dt)
{
OT_UNUSED_VARIABLE(aInstance);
sAlarm = t0 + dt;
sIsRunning = true;
if (sCounter == 0)
{
hw_timer0_enable();
}
hw_timer0_unfreeze();
}
void otPlatAlarmMilliStop(otInstance *aInstance)
{
OT_UNUSED_VARIABLE(aInstance);
sIsRunning = false;
hw_timer0_freeze();
}
|
Java
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
_extension_runtime = True
_ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
else:
_extension_runtime = False
_ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
from google.appengine.api.api_base_pb import *
import google.appengine.api.api_base_pb
from google.appengine.datastore.action_pb import *
import google.appengine.datastore.action_pb
from google.appengine.datastore.entity_pb import *
import google.appengine.datastore.entity_pb
from google.appengine.datastore.snapshot_pb import *
import google.appengine.datastore.snapshot_pb
class InternalHeader(ProtocolBuffer.ProtocolMessage):
has_requesting_app_id_ = 0
requesting_app_id_ = ""
has_requesting_project_id_ = 0
requesting_project_id_ = ""
has_requesting_version_id_ = 0
requesting_version_id_ = ""
has_api_settings_ = 0
api_settings_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def requesting_app_id(self): return self.requesting_app_id_
def set_requesting_app_id(self, x):
self.has_requesting_app_id_ = 1
self.requesting_app_id_ = x
def clear_requesting_app_id(self):
if self.has_requesting_app_id_:
self.has_requesting_app_id_ = 0
self.requesting_app_id_ = ""
def has_requesting_app_id(self): return self.has_requesting_app_id_
def requesting_project_id(self): return self.requesting_project_id_
def set_requesting_project_id(self, x):
self.has_requesting_project_id_ = 1
self.requesting_project_id_ = x
def clear_requesting_project_id(self):
if self.has_requesting_project_id_:
self.has_requesting_project_id_ = 0
self.requesting_project_id_ = ""
def has_requesting_project_id(self): return self.has_requesting_project_id_
def requesting_version_id(self): return self.requesting_version_id_
def set_requesting_version_id(self, x):
self.has_requesting_version_id_ = 1
self.requesting_version_id_ = x
def clear_requesting_version_id(self):
if self.has_requesting_version_id_:
self.has_requesting_version_id_ = 0
self.requesting_version_id_ = ""
def has_requesting_version_id(self): return self.has_requesting_version_id_
def api_settings(self): return self.api_settings_
def set_api_settings(self, x):
self.has_api_settings_ = 1
self.api_settings_ = x
def clear_api_settings(self):
if self.has_api_settings_:
self.has_api_settings_ = 0
self.api_settings_ = ""
def has_api_settings(self): return self.has_api_settings_
def MergeFrom(self, x):
assert x is not self
if (x.has_requesting_app_id()): self.set_requesting_app_id(x.requesting_app_id())
if (x.has_requesting_project_id()): self.set_requesting_project_id(x.requesting_project_id())
if (x.has_requesting_version_id()): self.set_requesting_version_id(x.requesting_version_id())
if (x.has_api_settings()): self.set_api_settings(x.api_settings())
def Equals(self, x):
if x is self: return 1
if self.has_requesting_app_id_ != x.has_requesting_app_id_: return 0
if self.has_requesting_app_id_ and self.requesting_app_id_ != x.requesting_app_id_: return 0
if self.has_requesting_project_id_ != x.has_requesting_project_id_: return 0
if self.has_requesting_project_id_ and self.requesting_project_id_ != x.requesting_project_id_: return 0
if self.has_requesting_version_id_ != x.has_requesting_version_id_: return 0
if self.has_requesting_version_id_ and self.requesting_version_id_ != x.requesting_version_id_: return 0
if self.has_api_settings_ != x.has_api_settings_: return 0
if self.has_api_settings_ and self.api_settings_ != x.api_settings_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_requesting_app_id_): n += 1 + self.lengthString(len(self.requesting_app_id_))
if (self.has_requesting_project_id_): n += 1 + self.lengthString(len(self.requesting_project_id_))
if (self.has_requesting_version_id_): n += 1 + self.lengthString(len(self.requesting_version_id_))
if (self.has_api_settings_): n += 1 + self.lengthString(len(self.api_settings_))
return n
def ByteSizePartial(self):
n = 0
if (self.has_requesting_app_id_): n += 1 + self.lengthString(len(self.requesting_app_id_))
if (self.has_requesting_project_id_): n += 1 + self.lengthString(len(self.requesting_project_id_))
if (self.has_requesting_version_id_): n += 1 + self.lengthString(len(self.requesting_version_id_))
if (self.has_api_settings_): n += 1 + self.lengthString(len(self.api_settings_))
return n
def Clear(self):
self.clear_requesting_app_id()
self.clear_requesting_project_id()
self.clear_requesting_version_id()
self.clear_api_settings()
def OutputUnchecked(self, out):
if (self.has_requesting_app_id_):
out.putVarInt32(18)
out.putPrefixedString(self.requesting_app_id_)
if (self.has_api_settings_):
out.putVarInt32(26)
out.putPrefixedString(self.api_settings_)
if (self.has_requesting_project_id_):
out.putVarInt32(34)
out.putPrefixedString(self.requesting_project_id_)
if (self.has_requesting_version_id_):
out.putVarInt32(42)
out.putPrefixedString(self.requesting_version_id_)
def OutputPartial(self, out):
if (self.has_requesting_app_id_):
out.putVarInt32(18)
out.putPrefixedString(self.requesting_app_id_)
if (self.has_api_settings_):
out.putVarInt32(26)
out.putPrefixedString(self.api_settings_)
if (self.has_requesting_project_id_):
out.putVarInt32(34)
out.putPrefixedString(self.requesting_project_id_)
if (self.has_requesting_version_id_):
out.putVarInt32(42)
out.putPrefixedString(self.requesting_version_id_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 18:
self.set_requesting_app_id(d.getPrefixedString())
continue
if tt == 26:
self.set_api_settings(d.getPrefixedString())
continue
if tt == 34:
self.set_requesting_project_id(d.getPrefixedString())
continue
if tt == 42:
self.set_requesting_version_id(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_requesting_app_id_: res+=prefix+("requesting_app_id: %s\n" % self.DebugFormatString(self.requesting_app_id_))
if self.has_requesting_project_id_: res+=prefix+("requesting_project_id: %s\n" % self.DebugFormatString(self.requesting_project_id_))
if self.has_requesting_version_id_: res+=prefix+("requesting_version_id: %s\n" % self.DebugFormatString(self.requesting_version_id_))
if self.has_api_settings_: res+=prefix+("api_settings: %s\n" % self.DebugFormatString(self.api_settings_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
krequesting_app_id = 2
krequesting_project_id = 4
krequesting_version_id = 5
kapi_settings = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
2: "requesting_app_id",
3: "api_settings",
4: "requesting_project_id",
5: "requesting_version_id",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.STRING,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.InternalHeader'
class Transaction(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_handle_ = 0
handle_ = 0
has_app_ = 0
app_ = ""
has_mark_changes_ = 0
mark_changes_ = 0
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def handle(self): return self.handle_
def set_handle(self, x):
self.has_handle_ = 1
self.handle_ = x
def clear_handle(self):
if self.has_handle_:
self.has_handle_ = 0
self.handle_ = 0
def has_handle(self): return self.has_handle_
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def mark_changes(self): return self.mark_changes_
def set_mark_changes(self, x):
self.has_mark_changes_ = 1
self.mark_changes_ = x
def clear_mark_changes(self):
if self.has_mark_changes_:
self.has_mark_changes_ = 0
self.mark_changes_ = 0
def has_mark_changes(self): return self.has_mark_changes_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
if (x.has_handle()): self.set_handle(x.handle())
if (x.has_app()): self.set_app(x.app())
if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if self.has_handle_ != x.has_handle_: return 0
if self.has_handle_ and self.handle_ != x.handle_: return 0
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
if self.has_mark_changes_ != x.has_mark_changes_: return 0
if self.has_mark_changes_ and self.mark_changes_ != x.mark_changes_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
if (not self.has_handle_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: handle not set.')
if (not self.has_app_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += self.lengthString(len(self.app_))
if (self.has_mark_changes_): n += 2
return n + 10
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
if (self.has_handle_):
n += 9
if (self.has_app_):
n += 1
n += self.lengthString(len(self.app_))
if (self.has_mark_changes_): n += 2
return n
def Clear(self):
self.clear_header()
self.clear_handle()
self.clear_app()
self.clear_mark_changes()
def OutputUnchecked(self, out):
out.putVarInt32(9)
out.put64(self.handle_)
out.putVarInt32(18)
out.putPrefixedString(self.app_)
if (self.has_mark_changes_):
out.putVarInt32(24)
out.putBoolean(self.mark_changes_)
if (self.has_header_):
out.putVarInt32(34)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_handle_):
out.putVarInt32(9)
out.put64(self.handle_)
if (self.has_app_):
out.putVarInt32(18)
out.putPrefixedString(self.app_)
if (self.has_mark_changes_):
out.putVarInt32(24)
out.putBoolean(self.mark_changes_)
if (self.has_header_):
out.putVarInt32(34)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 9:
self.set_handle(d.get64())
continue
if tt == 18:
self.set_app(d.getPrefixedString())
continue
if tt == 24:
self.set_mark_changes(d.getBoolean())
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_handle_: res+=prefix+("handle: %s\n" % self.DebugFormatFixed64(self.handle_))
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
if self.has_mark_changes_: res+=prefix+("mark_changes: %s\n" % self.DebugFormatBool(self.mark_changes_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 4
khandle = 1
kapp = 2
kmark_changes = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "handle",
2: "app",
3: "mark_changes",
4: "header",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.DOUBLE,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Transaction'
class Query_Filter(ProtocolBuffer.ProtocolMessage):
LESS_THAN = 1
LESS_THAN_OR_EQUAL = 2
GREATER_THAN = 3
GREATER_THAN_OR_EQUAL = 4
EQUAL = 5
IN = 6
EXISTS = 7
_Operator_NAMES = {
1: "LESS_THAN",
2: "LESS_THAN_OR_EQUAL",
3: "GREATER_THAN",
4: "GREATER_THAN_OR_EQUAL",
5: "EQUAL",
6: "IN",
7: "EXISTS",
}
def Operator_Name(cls, x): return cls._Operator_NAMES.get(x, "")
Operator_Name = classmethod(Operator_Name)
has_op_ = 0
op_ = 0
def __init__(self, contents=None):
self.property_ = []
if contents is not None: self.MergeFromString(contents)
def op(self): return self.op_
def set_op(self, x):
self.has_op_ = 1
self.op_ = x
def clear_op(self):
if self.has_op_:
self.has_op_ = 0
self.op_ = 0
def has_op(self): return self.has_op_
def property_size(self): return len(self.property_)
def property_list(self): return self.property_
def property(self, i):
return self.property_[i]
def mutable_property(self, i):
return self.property_[i]
def add_property(self):
x = Property()
self.property_.append(x)
return x
def clear_property(self):
self.property_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_op()): self.set_op(x.op())
for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i))
def Equals(self, x):
if x is self: return 1
if self.has_op_ != x.has_op_: return 0
if self.has_op_ and self.op_ != x.op_: return 0
if len(self.property_) != len(x.property_): return 0
for e1, e2 in zip(self.property_, x.property_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_op_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: op not set.')
for p in self.property_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.op_)
n += 1 * len(self.property_)
for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSize())
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_op_):
n += 1
n += self.lengthVarInt64(self.op_)
n += 1 * len(self.property_)
for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_op()
self.clear_property()
def OutputUnchecked(self, out):
out.putVarInt32(48)
out.putVarInt32(self.op_)
for i in xrange(len(self.property_)):
out.putVarInt32(114)
out.putVarInt32(self.property_[i].ByteSize())
self.property_[i].OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_op_):
out.putVarInt32(48)
out.putVarInt32(self.op_)
for i in xrange(len(self.property_)):
out.putVarInt32(114)
out.putVarInt32(self.property_[i].ByteSizePartial())
self.property_[i].OutputPartial(out)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 36: break
if tt == 48:
self.set_op(d.getVarInt32())
continue
if tt == 114:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_property().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_op_: res+=prefix+("op: %s\n" % self.DebugFormatInt32(self.op_))
cnt=0
for e in self.property_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("property%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
class Query_Order(ProtocolBuffer.ProtocolMessage):
ASCENDING = 1
DESCENDING = 2
_Direction_NAMES = {
1: "ASCENDING",
2: "DESCENDING",
}
def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "")
Direction_Name = classmethod(Direction_Name)
has_property_ = 0
property_ = ""
has_direction_ = 0
direction_ = 1
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def property(self): return self.property_
def set_property(self, x):
self.has_property_ = 1
self.property_ = x
def clear_property(self):
if self.has_property_:
self.has_property_ = 0
self.property_ = ""
def has_property(self): return self.has_property_
def direction(self): return self.direction_
def set_direction(self, x):
self.has_direction_ = 1
self.direction_ = x
def clear_direction(self):
if self.has_direction_:
self.has_direction_ = 0
self.direction_ = 1
def has_direction(self): return self.has_direction_
def MergeFrom(self, x):
assert x is not self
if (x.has_property()): self.set_property(x.property())
if (x.has_direction()): self.set_direction(x.direction())
def Equals(self, x):
if x is self: return 1
if self.has_property_ != x.has_property_: return 0
if self.has_property_ and self.property_ != x.property_: return 0
if self.has_direction_ != x.has_direction_: return 0
if self.has_direction_ and self.direction_ != x.direction_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_property_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: property not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.property_))
if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_property_):
n += 1
n += self.lengthString(len(self.property_))
if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
return n
def Clear(self):
self.clear_property()
self.clear_direction()
def OutputUnchecked(self, out):
out.putVarInt32(82)
out.putPrefixedString(self.property_)
if (self.has_direction_):
out.putVarInt32(88)
out.putVarInt32(self.direction_)
def OutputPartial(self, out):
if (self.has_property_):
out.putVarInt32(82)
out.putPrefixedString(self.property_)
if (self.has_direction_):
out.putVarInt32(88)
out.putVarInt32(self.direction_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 76: break
if tt == 82:
self.set_property(d.getPrefixedString())
continue
if tt == 88:
self.set_direction(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_property_: res+=prefix+("property: %s\n" % self.DebugFormatString(self.property_))
if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
return res
class Query(ProtocolBuffer.ProtocolMessage):
ORDER_FIRST = 1
ANCESTOR_FIRST = 2
FILTER_FIRST = 3
_Hint_NAMES = {
1: "ORDER_FIRST",
2: "ANCESTOR_FIRST",
3: "FILTER_FIRST",
}
def Hint_Name(cls, x): return cls._Hint_NAMES.get(x, "")
Hint_Name = classmethod(Hint_Name)
has_header_ = 0
header_ = None
has_app_ = 0
app_ = ""
has_name_space_ = 0
name_space_ = ""
has_kind_ = 0
kind_ = ""
has_ancestor_ = 0
ancestor_ = None
has_search_query_ = 0
search_query_ = ""
has_hint_ = 0
hint_ = 0
has_count_ = 0
count_ = 0
has_offset_ = 0
offset_ = 0
has_limit_ = 0
limit_ = 0
has_compiled_cursor_ = 0
compiled_cursor_ = None
has_end_compiled_cursor_ = 0
end_compiled_cursor_ = None
has_require_perfect_plan_ = 0
require_perfect_plan_ = 0
has_keys_only_ = 0
keys_only_ = 0
has_transaction_ = 0
transaction_ = None
has_compile_ = 0
compile_ = 0
has_failover_ms_ = 0
failover_ms_ = 0
has_strong_ = 0
strong_ = 0
has_distinct_ = 0
distinct_ = 0
has_min_safe_time_seconds_ = 0
min_safe_time_seconds_ = 0
has_persist_offset_ = 0
persist_offset_ = 1
def __init__(self, contents=None):
self.filter_ = []
self.order_ = []
self.composite_index_ = []
self.property_name_ = []
self.group_by_property_name_ = []
self.safe_replica_name_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def name_space(self): return self.name_space_
def set_name_space(self, x):
self.has_name_space_ = 1
self.name_space_ = x
def clear_name_space(self):
if self.has_name_space_:
self.has_name_space_ = 0
self.name_space_ = ""
def has_name_space(self): return self.has_name_space_
def kind(self): return self.kind_
def set_kind(self, x):
self.has_kind_ = 1
self.kind_ = x
def clear_kind(self):
if self.has_kind_:
self.has_kind_ = 0
self.kind_ = ""
def has_kind(self): return self.has_kind_
def ancestor(self):
if self.ancestor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.ancestor_ is None: self.ancestor_ = Reference()
finally:
self.lazy_init_lock_.release()
return self.ancestor_
def mutable_ancestor(self): self.has_ancestor_ = 1; return self.ancestor()
def clear_ancestor(self):
if self.has_ancestor_:
self.has_ancestor_ = 0;
if self.ancestor_ is not None: self.ancestor_.Clear()
def has_ancestor(self): return self.has_ancestor_
def filter_size(self): return len(self.filter_)
def filter_list(self): return self.filter_
def filter(self, i):
return self.filter_[i]
def mutable_filter(self, i):
return self.filter_[i]
def add_filter(self):
x = Query_Filter()
self.filter_.append(x)
return x
def clear_filter(self):
self.filter_ = []
def search_query(self): return self.search_query_
def set_search_query(self, x):
self.has_search_query_ = 1
self.search_query_ = x
def clear_search_query(self):
if self.has_search_query_:
self.has_search_query_ = 0
self.search_query_ = ""
def has_search_query(self): return self.has_search_query_
def order_size(self): return len(self.order_)
def order_list(self): return self.order_
def order(self, i):
return self.order_[i]
def mutable_order(self, i):
return self.order_[i]
def add_order(self):
x = Query_Order()
self.order_.append(x)
return x
def clear_order(self):
self.order_ = []
def hint(self): return self.hint_
def set_hint(self, x):
self.has_hint_ = 1
self.hint_ = x
def clear_hint(self):
if self.has_hint_:
self.has_hint_ = 0
self.hint_ = 0
def has_hint(self): return self.has_hint_
def count(self): return self.count_
def set_count(self, x):
self.has_count_ = 1
self.count_ = x
def clear_count(self):
if self.has_count_:
self.has_count_ = 0
self.count_ = 0
def has_count(self): return self.has_count_
def offset(self): return self.offset_
def set_offset(self, x):
self.has_offset_ = 1
self.offset_ = x
def clear_offset(self):
if self.has_offset_:
self.has_offset_ = 0
self.offset_ = 0
def has_offset(self): return self.has_offset_
def limit(self): return self.limit_
def set_limit(self, x):
self.has_limit_ = 1
self.limit_ = x
def clear_limit(self):
if self.has_limit_:
self.has_limit_ = 0
self.limit_ = 0
def has_limit(self): return self.has_limit_
def compiled_cursor(self):
if self.compiled_cursor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.compiled_cursor_ is None: self.compiled_cursor_ = CompiledCursor()
finally:
self.lazy_init_lock_.release()
return self.compiled_cursor_
def mutable_compiled_cursor(self): self.has_compiled_cursor_ = 1; return self.compiled_cursor()
def clear_compiled_cursor(self):
if self.has_compiled_cursor_:
self.has_compiled_cursor_ = 0;
if self.compiled_cursor_ is not None: self.compiled_cursor_.Clear()
def has_compiled_cursor(self): return self.has_compiled_cursor_
def end_compiled_cursor(self):
if self.end_compiled_cursor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.end_compiled_cursor_ is None: self.end_compiled_cursor_ = CompiledCursor()
finally:
self.lazy_init_lock_.release()
return self.end_compiled_cursor_
def mutable_end_compiled_cursor(self): self.has_end_compiled_cursor_ = 1; return self.end_compiled_cursor()
def clear_end_compiled_cursor(self):
if self.has_end_compiled_cursor_:
self.has_end_compiled_cursor_ = 0;
if self.end_compiled_cursor_ is not None: self.end_compiled_cursor_.Clear()
def has_end_compiled_cursor(self): return self.has_end_compiled_cursor_
def composite_index_size(self): return len(self.composite_index_)
def composite_index_list(self): return self.composite_index_
def composite_index(self, i):
return self.composite_index_[i]
def mutable_composite_index(self, i):
return self.composite_index_[i]
def add_composite_index(self):
x = CompositeIndex()
self.composite_index_.append(x)
return x
def clear_composite_index(self):
self.composite_index_ = []
def require_perfect_plan(self): return self.require_perfect_plan_
def set_require_perfect_plan(self, x):
self.has_require_perfect_plan_ = 1
self.require_perfect_plan_ = x
def clear_require_perfect_plan(self):
if self.has_require_perfect_plan_:
self.has_require_perfect_plan_ = 0
self.require_perfect_plan_ = 0
def has_require_perfect_plan(self): return self.has_require_perfect_plan_
def keys_only(self): return self.keys_only_
def set_keys_only(self, x):
self.has_keys_only_ = 1
self.keys_only_ = x
def clear_keys_only(self):
if self.has_keys_only_:
self.has_keys_only_ = 0
self.keys_only_ = 0
def has_keys_only(self): return self.has_keys_only_
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def compile(self): return self.compile_
def set_compile(self, x):
self.has_compile_ = 1
self.compile_ = x
def clear_compile(self):
if self.has_compile_:
self.has_compile_ = 0
self.compile_ = 0
def has_compile(self): return self.has_compile_
def failover_ms(self): return self.failover_ms_
def set_failover_ms(self, x):
self.has_failover_ms_ = 1
self.failover_ms_ = x
def clear_failover_ms(self):
if self.has_failover_ms_:
self.has_failover_ms_ = 0
self.failover_ms_ = 0
def has_failover_ms(self): return self.has_failover_ms_
def strong(self): return self.strong_
def set_strong(self, x):
self.has_strong_ = 1
self.strong_ = x
def clear_strong(self):
if self.has_strong_:
self.has_strong_ = 0
self.strong_ = 0
def has_strong(self): return self.has_strong_
def property_name_size(self): return len(self.property_name_)
def property_name_list(self): return self.property_name_
def property_name(self, i):
return self.property_name_[i]
def set_property_name(self, i, x):
self.property_name_[i] = x
def add_property_name(self, x):
self.property_name_.append(x)
def clear_property_name(self):
self.property_name_ = []
def group_by_property_name_size(self): return len(self.group_by_property_name_)
def group_by_property_name_list(self): return self.group_by_property_name_
def group_by_property_name(self, i):
return self.group_by_property_name_[i]
def set_group_by_property_name(self, i, x):
self.group_by_property_name_[i] = x
def add_group_by_property_name(self, x):
self.group_by_property_name_.append(x)
def clear_group_by_property_name(self):
self.group_by_property_name_ = []
def distinct(self): return self.distinct_
def set_distinct(self, x):
self.has_distinct_ = 1
self.distinct_ = x
def clear_distinct(self):
if self.has_distinct_:
self.has_distinct_ = 0
self.distinct_ = 0
def has_distinct(self): return self.has_distinct_
def min_safe_time_seconds(self): return self.min_safe_time_seconds_
def set_min_safe_time_seconds(self, x):
self.has_min_safe_time_seconds_ = 1
self.min_safe_time_seconds_ = x
def clear_min_safe_time_seconds(self):
if self.has_min_safe_time_seconds_:
self.has_min_safe_time_seconds_ = 0
self.min_safe_time_seconds_ = 0
def has_min_safe_time_seconds(self): return self.has_min_safe_time_seconds_
def safe_replica_name_size(self): return len(self.safe_replica_name_)
def safe_replica_name_list(self): return self.safe_replica_name_
def safe_replica_name(self, i):
return self.safe_replica_name_[i]
def set_safe_replica_name(self, i, x):
self.safe_replica_name_[i] = x
def add_safe_replica_name(self, x):
self.safe_replica_name_.append(x)
def clear_safe_replica_name(self):
self.safe_replica_name_ = []
def persist_offset(self): return self.persist_offset_
def set_persist_offset(self, x):
self.has_persist_offset_ = 1
self.persist_offset_ = x
def clear_persist_offset(self):
if self.has_persist_offset_:
self.has_persist_offset_ = 0
self.persist_offset_ = 1
def has_persist_offset(self): return self.has_persist_offset_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
if (x.has_app()): self.set_app(x.app())
if (x.has_name_space()): self.set_name_space(x.name_space())
if (x.has_kind()): self.set_kind(x.kind())
if (x.has_ancestor()): self.mutable_ancestor().MergeFrom(x.ancestor())
for i in xrange(x.filter_size()): self.add_filter().CopyFrom(x.filter(i))
if (x.has_search_query()): self.set_search_query(x.search_query())
for i in xrange(x.order_size()): self.add_order().CopyFrom(x.order(i))
if (x.has_hint()): self.set_hint(x.hint())
if (x.has_count()): self.set_count(x.count())
if (x.has_offset()): self.set_offset(x.offset())
if (x.has_limit()): self.set_limit(x.limit())
if (x.has_compiled_cursor()): self.mutable_compiled_cursor().MergeFrom(x.compiled_cursor())
if (x.has_end_compiled_cursor()): self.mutable_end_compiled_cursor().MergeFrom(x.end_compiled_cursor())
for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
if (x.has_require_perfect_plan()): self.set_require_perfect_plan(x.require_perfect_plan())
if (x.has_keys_only()): self.set_keys_only(x.keys_only())
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
if (x.has_compile()): self.set_compile(x.compile())
if (x.has_failover_ms()): self.set_failover_ms(x.failover_ms())
if (x.has_strong()): self.set_strong(x.strong())
for i in xrange(x.property_name_size()): self.add_property_name(x.property_name(i))
for i in xrange(x.group_by_property_name_size()): self.add_group_by_property_name(x.group_by_property_name(i))
if (x.has_distinct()): self.set_distinct(x.distinct())
if (x.has_min_safe_time_seconds()): self.set_min_safe_time_seconds(x.min_safe_time_seconds())
for i in xrange(x.safe_replica_name_size()): self.add_safe_replica_name(x.safe_replica_name(i))
if (x.has_persist_offset()): self.set_persist_offset(x.persist_offset())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
if self.has_name_space_ != x.has_name_space_: return 0
if self.has_name_space_ and self.name_space_ != x.name_space_: return 0
if self.has_kind_ != x.has_kind_: return 0
if self.has_kind_ and self.kind_ != x.kind_: return 0
if self.has_ancestor_ != x.has_ancestor_: return 0
if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
if len(self.filter_) != len(x.filter_): return 0
for e1, e2 in zip(self.filter_, x.filter_):
if e1 != e2: return 0
if self.has_search_query_ != x.has_search_query_: return 0
if self.has_search_query_ and self.search_query_ != x.search_query_: return 0
if len(self.order_) != len(x.order_): return 0
for e1, e2 in zip(self.order_, x.order_):
if e1 != e2: return 0
if self.has_hint_ != x.has_hint_: return 0
if self.has_hint_ and self.hint_ != x.hint_: return 0
if self.has_count_ != x.has_count_: return 0
if self.has_count_ and self.count_ != x.count_: return 0
if self.has_offset_ != x.has_offset_: return 0
if self.has_offset_ and self.offset_ != x.offset_: return 0
if self.has_limit_ != x.has_limit_: return 0
if self.has_limit_ and self.limit_ != x.limit_: return 0
if self.has_compiled_cursor_ != x.has_compiled_cursor_: return 0
if self.has_compiled_cursor_ and self.compiled_cursor_ != x.compiled_cursor_: return 0
if self.has_end_compiled_cursor_ != x.has_end_compiled_cursor_: return 0
if self.has_end_compiled_cursor_ and self.end_compiled_cursor_ != x.end_compiled_cursor_: return 0
if len(self.composite_index_) != len(x.composite_index_): return 0
for e1, e2 in zip(self.composite_index_, x.composite_index_):
if e1 != e2: return 0
if self.has_require_perfect_plan_ != x.has_require_perfect_plan_: return 0
if self.has_require_perfect_plan_ and self.require_perfect_plan_ != x.require_perfect_plan_: return 0
if self.has_keys_only_ != x.has_keys_only_: return 0
if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if self.has_compile_ != x.has_compile_: return 0
if self.has_compile_ and self.compile_ != x.compile_: return 0
if self.has_failover_ms_ != x.has_failover_ms_: return 0
if self.has_failover_ms_ and self.failover_ms_ != x.failover_ms_: return 0
if self.has_strong_ != x.has_strong_: return 0
if self.has_strong_ and self.strong_ != x.strong_: return 0
if len(self.property_name_) != len(x.property_name_): return 0
for e1, e2 in zip(self.property_name_, x.property_name_):
if e1 != e2: return 0
if len(self.group_by_property_name_) != len(x.group_by_property_name_): return 0
for e1, e2 in zip(self.group_by_property_name_, x.group_by_property_name_):
if e1 != e2: return 0
if self.has_distinct_ != x.has_distinct_: return 0
if self.has_distinct_ and self.distinct_ != x.distinct_: return 0
if self.has_min_safe_time_seconds_ != x.has_min_safe_time_seconds_: return 0
if self.has_min_safe_time_seconds_ and self.min_safe_time_seconds_ != x.min_safe_time_seconds_: return 0
if len(self.safe_replica_name_) != len(x.safe_replica_name_): return 0
for e1, e2 in zip(self.safe_replica_name_, x.safe_replica_name_):
if e1 != e2: return 0
if self.has_persist_offset_ != x.has_persist_offset_: return 0
if self.has_persist_offset_ and self.persist_offset_ != x.persist_offset_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
if (not self.has_app_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app not set.')
if (self.has_ancestor_ and not self.ancestor_.IsInitialized(debug_strs)): initialized = 0
for p in self.filter_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.order_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_compiled_cursor_ and not self.compiled_cursor_.IsInitialized(debug_strs)): initialized = 0
if (self.has_end_compiled_cursor_ and not self.end_compiled_cursor_.IsInitialized(debug_strs)): initialized = 0
for p in self.composite_index_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 2 + self.lengthString(self.header_.ByteSize())
n += self.lengthString(len(self.app_))
if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_))
if (self.has_kind_): n += 1 + self.lengthString(len(self.kind_))
if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSize())
n += 2 * len(self.filter_)
for i in xrange(len(self.filter_)): n += self.filter_[i].ByteSize()
if (self.has_search_query_): n += 1 + self.lengthString(len(self.search_query_))
n += 2 * len(self.order_)
for i in xrange(len(self.order_)): n += self.order_[i].ByteSize()
if (self.has_hint_): n += 2 + self.lengthVarInt64(self.hint_)
if (self.has_count_): n += 2 + self.lengthVarInt64(self.count_)
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_)
if (self.has_compiled_cursor_): n += 2 + self.lengthString(self.compiled_cursor_.ByteSize())
if (self.has_end_compiled_cursor_): n += 2 + self.lengthString(self.end_compiled_cursor_.ByteSize())
n += 2 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
if (self.has_require_perfect_plan_): n += 3
if (self.has_keys_only_): n += 3
if (self.has_transaction_): n += 2 + self.lengthString(self.transaction_.ByteSize())
if (self.has_compile_): n += 3
if (self.has_failover_ms_): n += 2 + self.lengthVarInt64(self.failover_ms_)
if (self.has_strong_): n += 3
n += 2 * len(self.property_name_)
for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
n += 2 * len(self.group_by_property_name_)
for i in xrange(len(self.group_by_property_name_)): n += self.lengthString(len(self.group_by_property_name_[i]))
if (self.has_distinct_): n += 3
if (self.has_min_safe_time_seconds_): n += 2 + self.lengthVarInt64(self.min_safe_time_seconds_)
n += 2 * len(self.safe_replica_name_)
for i in xrange(len(self.safe_replica_name_)): n += self.lengthString(len(self.safe_replica_name_[i]))
if (self.has_persist_offset_): n += 3
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 2 + self.lengthString(self.header_.ByteSizePartial())
if (self.has_app_):
n += 1
n += self.lengthString(len(self.app_))
if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_))
if (self.has_kind_): n += 1 + self.lengthString(len(self.kind_))
if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSizePartial())
n += 2 * len(self.filter_)
for i in xrange(len(self.filter_)): n += self.filter_[i].ByteSizePartial()
if (self.has_search_query_): n += 1 + self.lengthString(len(self.search_query_))
n += 2 * len(self.order_)
for i in xrange(len(self.order_)): n += self.order_[i].ByteSizePartial()
if (self.has_hint_): n += 2 + self.lengthVarInt64(self.hint_)
if (self.has_count_): n += 2 + self.lengthVarInt64(self.count_)
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_)
if (self.has_compiled_cursor_): n += 2 + self.lengthString(self.compiled_cursor_.ByteSizePartial())
if (self.has_end_compiled_cursor_): n += 2 + self.lengthString(self.end_compiled_cursor_.ByteSizePartial())
n += 2 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial())
if (self.has_require_perfect_plan_): n += 3
if (self.has_keys_only_): n += 3
if (self.has_transaction_): n += 2 + self.lengthString(self.transaction_.ByteSizePartial())
if (self.has_compile_): n += 3
if (self.has_failover_ms_): n += 2 + self.lengthVarInt64(self.failover_ms_)
if (self.has_strong_): n += 3
n += 2 * len(self.property_name_)
for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
n += 2 * len(self.group_by_property_name_)
for i in xrange(len(self.group_by_property_name_)): n += self.lengthString(len(self.group_by_property_name_[i]))
if (self.has_distinct_): n += 3
if (self.has_min_safe_time_seconds_): n += 2 + self.lengthVarInt64(self.min_safe_time_seconds_)
n += 2 * len(self.safe_replica_name_)
for i in xrange(len(self.safe_replica_name_)): n += self.lengthString(len(self.safe_replica_name_[i]))
if (self.has_persist_offset_): n += 3
return n
def Clear(self):
self.clear_header()
self.clear_app()
self.clear_name_space()
self.clear_kind()
self.clear_ancestor()
self.clear_filter()
self.clear_search_query()
self.clear_order()
self.clear_hint()
self.clear_count()
self.clear_offset()
self.clear_limit()
self.clear_compiled_cursor()
self.clear_end_compiled_cursor()
self.clear_composite_index()
self.clear_require_perfect_plan()
self.clear_keys_only()
self.clear_transaction()
self.clear_compile()
self.clear_failover_ms()
self.clear_strong()
self.clear_property_name()
self.clear_group_by_property_name()
self.clear_distinct()
self.clear_min_safe_time_seconds()
self.clear_safe_replica_name()
self.clear_persist_offset()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_)
if (self.has_kind_):
out.putVarInt32(26)
out.putPrefixedString(self.kind_)
for i in xrange(len(self.filter_)):
out.putVarInt32(35)
self.filter_[i].OutputUnchecked(out)
out.putVarInt32(36)
if (self.has_search_query_):
out.putVarInt32(66)
out.putPrefixedString(self.search_query_)
for i in xrange(len(self.order_)):
out.putVarInt32(75)
self.order_[i].OutputUnchecked(out)
out.putVarInt32(76)
if (self.has_offset_):
out.putVarInt32(96)
out.putVarInt32(self.offset_)
if (self.has_limit_):
out.putVarInt32(128)
out.putVarInt32(self.limit_)
if (self.has_ancestor_):
out.putVarInt32(138)
out.putVarInt32(self.ancestor_.ByteSize())
self.ancestor_.OutputUnchecked(out)
if (self.has_hint_):
out.putVarInt32(144)
out.putVarInt32(self.hint_)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(154)
out.putVarInt32(self.composite_index_[i].ByteSize())
self.composite_index_[i].OutputUnchecked(out)
if (self.has_require_perfect_plan_):
out.putVarInt32(160)
out.putBoolean(self.require_perfect_plan_)
if (self.has_keys_only_):
out.putVarInt32(168)
out.putBoolean(self.keys_only_)
if (self.has_transaction_):
out.putVarInt32(178)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
if (self.has_count_):
out.putVarInt32(184)
out.putVarInt32(self.count_)
if (self.has_distinct_):
out.putVarInt32(192)
out.putBoolean(self.distinct_)
if (self.has_compile_):
out.putVarInt32(200)
out.putBoolean(self.compile_)
if (self.has_failover_ms_):
out.putVarInt32(208)
out.putVarInt64(self.failover_ms_)
if (self.has_name_space_):
out.putVarInt32(234)
out.putPrefixedString(self.name_space_)
if (self.has_compiled_cursor_):
out.putVarInt32(242)
out.putVarInt32(self.compiled_cursor_.ByteSize())
self.compiled_cursor_.OutputUnchecked(out)
if (self.has_end_compiled_cursor_):
out.putVarInt32(250)
out.putVarInt32(self.end_compiled_cursor_.ByteSize())
self.end_compiled_cursor_.OutputUnchecked(out)
if (self.has_strong_):
out.putVarInt32(256)
out.putBoolean(self.strong_)
for i in xrange(len(self.property_name_)):
out.putVarInt32(266)
out.putPrefixedString(self.property_name_[i])
for i in xrange(len(self.group_by_property_name_)):
out.putVarInt32(274)
out.putPrefixedString(self.group_by_property_name_[i])
if (self.has_min_safe_time_seconds_):
out.putVarInt32(280)
out.putVarInt64(self.min_safe_time_seconds_)
for i in xrange(len(self.safe_replica_name_)):
out.putVarInt32(290)
out.putPrefixedString(self.safe_replica_name_[i])
if (self.has_persist_offset_):
out.putVarInt32(296)
out.putBoolean(self.persist_offset_)
if (self.has_header_):
out.putVarInt32(314)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_app_):
out.putVarInt32(10)
out.putPrefixedString(self.app_)
if (self.has_kind_):
out.putVarInt32(26)
out.putPrefixedString(self.kind_)
for i in xrange(len(self.filter_)):
out.putVarInt32(35)
self.filter_[i].OutputPartial(out)
out.putVarInt32(36)
if (self.has_search_query_):
out.putVarInt32(66)
out.putPrefixedString(self.search_query_)
for i in xrange(len(self.order_)):
out.putVarInt32(75)
self.order_[i].OutputPartial(out)
out.putVarInt32(76)
if (self.has_offset_):
out.putVarInt32(96)
out.putVarInt32(self.offset_)
if (self.has_limit_):
out.putVarInt32(128)
out.putVarInt32(self.limit_)
if (self.has_ancestor_):
out.putVarInt32(138)
out.putVarInt32(self.ancestor_.ByteSizePartial())
self.ancestor_.OutputPartial(out)
if (self.has_hint_):
out.putVarInt32(144)
out.putVarInt32(self.hint_)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(154)
out.putVarInt32(self.composite_index_[i].ByteSizePartial())
self.composite_index_[i].OutputPartial(out)
if (self.has_require_perfect_plan_):
out.putVarInt32(160)
out.putBoolean(self.require_perfect_plan_)
if (self.has_keys_only_):
out.putVarInt32(168)
out.putBoolean(self.keys_only_)
if (self.has_transaction_):
out.putVarInt32(178)
out.putVarInt32(self.transaction_.ByteSizePartial())
self.transaction_.OutputPartial(out)
if (self.has_count_):
out.putVarInt32(184)
out.putVarInt32(self.count_)
if (self.has_distinct_):
out.putVarInt32(192)
out.putBoolean(self.distinct_)
if (self.has_compile_):
out.putVarInt32(200)
out.putBoolean(self.compile_)
if (self.has_failover_ms_):
out.putVarInt32(208)
out.putVarInt64(self.failover_ms_)
if (self.has_name_space_):
out.putVarInt32(234)
out.putPrefixedString(self.name_space_)
if (self.has_compiled_cursor_):
out.putVarInt32(242)
out.putVarInt32(self.compiled_cursor_.ByteSizePartial())
self.compiled_cursor_.OutputPartial(out)
if (self.has_end_compiled_cursor_):
out.putVarInt32(250)
out.putVarInt32(self.end_compiled_cursor_.ByteSizePartial())
self.end_compiled_cursor_.OutputPartial(out)
if (self.has_strong_):
out.putVarInt32(256)
out.putBoolean(self.strong_)
for i in xrange(len(self.property_name_)):
out.putVarInt32(266)
out.putPrefixedString(self.property_name_[i])
for i in xrange(len(self.group_by_property_name_)):
out.putVarInt32(274)
out.putPrefixedString(self.group_by_property_name_[i])
if (self.has_min_safe_time_seconds_):
out.putVarInt32(280)
out.putVarInt64(self.min_safe_time_seconds_)
for i in xrange(len(self.safe_replica_name_)):
out.putVarInt32(290)
out.putPrefixedString(self.safe_replica_name_[i])
if (self.has_persist_offset_):
out.putVarInt32(296)
out.putBoolean(self.persist_offset_)
if (self.has_header_):
out.putVarInt32(314)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app(d.getPrefixedString())
continue
if tt == 26:
self.set_kind(d.getPrefixedString())
continue
if tt == 35:
self.add_filter().TryMerge(d)
continue
if tt == 66:
self.set_search_query(d.getPrefixedString())
continue
if tt == 75:
self.add_order().TryMerge(d)
continue
if tt == 96:
self.set_offset(d.getVarInt32())
continue
if tt == 128:
self.set_limit(d.getVarInt32())
continue
if tt == 138:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_ancestor().TryMerge(tmp)
continue
if tt == 144:
self.set_hint(d.getVarInt32())
continue
if tt == 154:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_composite_index().TryMerge(tmp)
continue
if tt == 160:
self.set_require_perfect_plan(d.getBoolean())
continue
if tt == 168:
self.set_keys_only(d.getBoolean())
continue
if tt == 178:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 184:
self.set_count(d.getVarInt32())
continue
if tt == 192:
self.set_distinct(d.getBoolean())
continue
if tt == 200:
self.set_compile(d.getBoolean())
continue
if tt == 208:
self.set_failover_ms(d.getVarInt64())
continue
if tt == 234:
self.set_name_space(d.getPrefixedString())
continue
if tt == 242:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_compiled_cursor().TryMerge(tmp)
continue
if tt == 250:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_end_compiled_cursor().TryMerge(tmp)
continue
if tt == 256:
self.set_strong(d.getBoolean())
continue
if tt == 266:
self.add_property_name(d.getPrefixedString())
continue
if tt == 274:
self.add_group_by_property_name(d.getPrefixedString())
continue
if tt == 280:
self.set_min_safe_time_seconds(d.getVarInt64())
continue
if tt == 290:
self.add_safe_replica_name(d.getPrefixedString())
continue
if tt == 296:
self.set_persist_offset(d.getBoolean())
continue
if tt == 314:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_))
if self.has_ancestor_:
res+=prefix+"ancestor <\n"
res+=self.ancestor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.filter_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Filter%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_search_query_: res+=prefix+("search_query: %s\n" % self.DebugFormatString(self.search_query_))
cnt=0
for e in self.order_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Order%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_hint_: res+=prefix+("hint: %s\n" % self.DebugFormatInt32(self.hint_))
if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
if self.has_compiled_cursor_:
res+=prefix+"compiled_cursor <\n"
res+=self.compiled_cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_end_compiled_cursor_:
res+=prefix+"end_compiled_cursor <\n"
res+=self.end_compiled_cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.composite_index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("composite_index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_require_perfect_plan_: res+=prefix+("require_perfect_plan: %s\n" % self.DebugFormatBool(self.require_perfect_plan_))
if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_compile_: res+=prefix+("compile: %s\n" % self.DebugFormatBool(self.compile_))
if self.has_failover_ms_: res+=prefix+("failover_ms: %s\n" % self.DebugFormatInt64(self.failover_ms_))
if self.has_strong_: res+=prefix+("strong: %s\n" % self.DebugFormatBool(self.strong_))
cnt=0
for e in self.property_name_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("property_name%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
cnt=0
for e in self.group_by_property_name_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("group_by_property_name%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_distinct_: res+=prefix+("distinct: %s\n" % self.DebugFormatBool(self.distinct_))
if self.has_min_safe_time_seconds_: res+=prefix+("min_safe_time_seconds: %s\n" % self.DebugFormatInt64(self.min_safe_time_seconds_))
cnt=0
for e in self.safe_replica_name_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("safe_replica_name%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_persist_offset_: res+=prefix+("persist_offset: %s\n" % self.DebugFormatBool(self.persist_offset_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 39
kapp = 1
kname_space = 29
kkind = 3
kancestor = 17
kFilterGroup = 4
kFilterop = 6
kFilterproperty = 14
ksearch_query = 8
kOrderGroup = 9
kOrderproperty = 10
kOrderdirection = 11
khint = 18
kcount = 23
koffset = 12
klimit = 16
kcompiled_cursor = 30
kend_compiled_cursor = 31
kcomposite_index = 19
krequire_perfect_plan = 20
kkeys_only = 21
ktransaction = 22
kcompile = 25
kfailover_ms = 26
kstrong = 32
kproperty_name = 33
kgroup_by_property_name = 34
kdistinct = 24
kmin_safe_time_seconds = 35
ksafe_replica_name = 36
kpersist_offset = 37
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app",
3: "kind",
4: "Filter",
6: "op",
8: "search_query",
9: "Order",
10: "property",
11: "direction",
12: "offset",
14: "property",
16: "limit",
17: "ancestor",
18: "hint",
19: "composite_index",
20: "require_perfect_plan",
21: "keys_only",
22: "transaction",
23: "count",
24: "distinct",
25: "compile",
26: "failover_ms",
29: "name_space",
30: "compiled_cursor",
31: "end_compiled_cursor",
32: "strong",
33: "property_name",
34: "group_by_property_name",
35: "min_safe_time_seconds",
36: "safe_replica_name",
37: "persist_offset",
39: "header",
}, 39)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.STARTGROUP,
6: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.STARTGROUP,
10: ProtocolBuffer.Encoder.STRING,
11: ProtocolBuffer.Encoder.NUMERIC,
12: ProtocolBuffer.Encoder.NUMERIC,
14: ProtocolBuffer.Encoder.STRING,
16: ProtocolBuffer.Encoder.NUMERIC,
17: ProtocolBuffer.Encoder.STRING,
18: ProtocolBuffer.Encoder.NUMERIC,
19: ProtocolBuffer.Encoder.STRING,
20: ProtocolBuffer.Encoder.NUMERIC,
21: ProtocolBuffer.Encoder.NUMERIC,
22: ProtocolBuffer.Encoder.STRING,
23: ProtocolBuffer.Encoder.NUMERIC,
24: ProtocolBuffer.Encoder.NUMERIC,
25: ProtocolBuffer.Encoder.NUMERIC,
26: ProtocolBuffer.Encoder.NUMERIC,
29: ProtocolBuffer.Encoder.STRING,
30: ProtocolBuffer.Encoder.STRING,
31: ProtocolBuffer.Encoder.STRING,
32: ProtocolBuffer.Encoder.NUMERIC,
33: ProtocolBuffer.Encoder.STRING,
34: ProtocolBuffer.Encoder.STRING,
35: ProtocolBuffer.Encoder.NUMERIC,
36: ProtocolBuffer.Encoder.STRING,
37: ProtocolBuffer.Encoder.NUMERIC,
39: ProtocolBuffer.Encoder.STRING,
}, 39, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Query'
class CompiledQuery_PrimaryScan(ProtocolBuffer.ProtocolMessage):
has_index_name_ = 0
index_name_ = ""
has_start_key_ = 0
start_key_ = ""
has_start_inclusive_ = 0
start_inclusive_ = 0
has_end_key_ = 0
end_key_ = ""
has_end_inclusive_ = 0
end_inclusive_ = 0
has_end_unapplied_log_timestamp_us_ = 0
end_unapplied_log_timestamp_us_ = 0
def __init__(self, contents=None):
self.start_postfix_value_ = []
self.end_postfix_value_ = []
if contents is not None: self.MergeFromString(contents)
def index_name(self): return self.index_name_
def set_index_name(self, x):
self.has_index_name_ = 1
self.index_name_ = x
def clear_index_name(self):
if self.has_index_name_:
self.has_index_name_ = 0
self.index_name_ = ""
def has_index_name(self): return self.has_index_name_
def start_key(self): return self.start_key_
def set_start_key(self, x):
self.has_start_key_ = 1
self.start_key_ = x
def clear_start_key(self):
if self.has_start_key_:
self.has_start_key_ = 0
self.start_key_ = ""
def has_start_key(self): return self.has_start_key_
def start_inclusive(self): return self.start_inclusive_
def set_start_inclusive(self, x):
self.has_start_inclusive_ = 1
self.start_inclusive_ = x
def clear_start_inclusive(self):
if self.has_start_inclusive_:
self.has_start_inclusive_ = 0
self.start_inclusive_ = 0
def has_start_inclusive(self): return self.has_start_inclusive_
def end_key(self): return self.end_key_
def set_end_key(self, x):
self.has_end_key_ = 1
self.end_key_ = x
def clear_end_key(self):
if self.has_end_key_:
self.has_end_key_ = 0
self.end_key_ = ""
def has_end_key(self): return self.has_end_key_
def end_inclusive(self): return self.end_inclusive_
def set_end_inclusive(self, x):
self.has_end_inclusive_ = 1
self.end_inclusive_ = x
def clear_end_inclusive(self):
if self.has_end_inclusive_:
self.has_end_inclusive_ = 0
self.end_inclusive_ = 0
def has_end_inclusive(self): return self.has_end_inclusive_
def start_postfix_value_size(self): return len(self.start_postfix_value_)
def start_postfix_value_list(self): return self.start_postfix_value_
def start_postfix_value(self, i):
return self.start_postfix_value_[i]
def set_start_postfix_value(self, i, x):
self.start_postfix_value_[i] = x
def add_start_postfix_value(self, x):
self.start_postfix_value_.append(x)
def clear_start_postfix_value(self):
self.start_postfix_value_ = []
def end_postfix_value_size(self): return len(self.end_postfix_value_)
def end_postfix_value_list(self): return self.end_postfix_value_
def end_postfix_value(self, i):
return self.end_postfix_value_[i]
def set_end_postfix_value(self, i, x):
self.end_postfix_value_[i] = x
def add_end_postfix_value(self, x):
self.end_postfix_value_.append(x)
def clear_end_postfix_value(self):
self.end_postfix_value_ = []
def end_unapplied_log_timestamp_us(self): return self.end_unapplied_log_timestamp_us_
def set_end_unapplied_log_timestamp_us(self, x):
self.has_end_unapplied_log_timestamp_us_ = 1
self.end_unapplied_log_timestamp_us_ = x
def clear_end_unapplied_log_timestamp_us(self):
if self.has_end_unapplied_log_timestamp_us_:
self.has_end_unapplied_log_timestamp_us_ = 0
self.end_unapplied_log_timestamp_us_ = 0
def has_end_unapplied_log_timestamp_us(self): return self.has_end_unapplied_log_timestamp_us_
def MergeFrom(self, x):
assert x is not self
if (x.has_index_name()): self.set_index_name(x.index_name())
if (x.has_start_key()): self.set_start_key(x.start_key())
if (x.has_start_inclusive()): self.set_start_inclusive(x.start_inclusive())
if (x.has_end_key()): self.set_end_key(x.end_key())
if (x.has_end_inclusive()): self.set_end_inclusive(x.end_inclusive())
for i in xrange(x.start_postfix_value_size()): self.add_start_postfix_value(x.start_postfix_value(i))
for i in xrange(x.end_postfix_value_size()): self.add_end_postfix_value(x.end_postfix_value(i))
if (x.has_end_unapplied_log_timestamp_us()): self.set_end_unapplied_log_timestamp_us(x.end_unapplied_log_timestamp_us())
def Equals(self, x):
if x is self: return 1
if self.has_index_name_ != x.has_index_name_: return 0
if self.has_index_name_ and self.index_name_ != x.index_name_: return 0
if self.has_start_key_ != x.has_start_key_: return 0
if self.has_start_key_ and self.start_key_ != x.start_key_: return 0
if self.has_start_inclusive_ != x.has_start_inclusive_: return 0
if self.has_start_inclusive_ and self.start_inclusive_ != x.start_inclusive_: return 0
if self.has_end_key_ != x.has_end_key_: return 0
if self.has_end_key_ and self.end_key_ != x.end_key_: return 0
if self.has_end_inclusive_ != x.has_end_inclusive_: return 0
if self.has_end_inclusive_ and self.end_inclusive_ != x.end_inclusive_: return 0
if len(self.start_postfix_value_) != len(x.start_postfix_value_): return 0
for e1, e2 in zip(self.start_postfix_value_, x.start_postfix_value_):
if e1 != e2: return 0
if len(self.end_postfix_value_) != len(x.end_postfix_value_): return 0
for e1, e2 in zip(self.end_postfix_value_, x.end_postfix_value_):
if e1 != e2: return 0
if self.has_end_unapplied_log_timestamp_us_ != x.has_end_unapplied_log_timestamp_us_: return 0
if self.has_end_unapplied_log_timestamp_us_ and self.end_unapplied_log_timestamp_us_ != x.end_unapplied_log_timestamp_us_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_index_name_): n += 1 + self.lengthString(len(self.index_name_))
if (self.has_start_key_): n += 1 + self.lengthString(len(self.start_key_))
if (self.has_start_inclusive_): n += 2
if (self.has_end_key_): n += 1 + self.lengthString(len(self.end_key_))
if (self.has_end_inclusive_): n += 2
n += 2 * len(self.start_postfix_value_)
for i in xrange(len(self.start_postfix_value_)): n += self.lengthString(len(self.start_postfix_value_[i]))
n += 2 * len(self.end_postfix_value_)
for i in xrange(len(self.end_postfix_value_)): n += self.lengthString(len(self.end_postfix_value_[i]))
if (self.has_end_unapplied_log_timestamp_us_): n += 2 + self.lengthVarInt64(self.end_unapplied_log_timestamp_us_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_index_name_): n += 1 + self.lengthString(len(self.index_name_))
if (self.has_start_key_): n += 1 + self.lengthString(len(self.start_key_))
if (self.has_start_inclusive_): n += 2
if (self.has_end_key_): n += 1 + self.lengthString(len(self.end_key_))
if (self.has_end_inclusive_): n += 2
n += 2 * len(self.start_postfix_value_)
for i in xrange(len(self.start_postfix_value_)): n += self.lengthString(len(self.start_postfix_value_[i]))
n += 2 * len(self.end_postfix_value_)
for i in xrange(len(self.end_postfix_value_)): n += self.lengthString(len(self.end_postfix_value_[i]))
if (self.has_end_unapplied_log_timestamp_us_): n += 2 + self.lengthVarInt64(self.end_unapplied_log_timestamp_us_)
return n
def Clear(self):
self.clear_index_name()
self.clear_start_key()
self.clear_start_inclusive()
self.clear_end_key()
self.clear_end_inclusive()
self.clear_start_postfix_value()
self.clear_end_postfix_value()
self.clear_end_unapplied_log_timestamp_us()
def OutputUnchecked(self, out):
if (self.has_index_name_):
out.putVarInt32(18)
out.putPrefixedString(self.index_name_)
if (self.has_start_key_):
out.putVarInt32(26)
out.putPrefixedString(self.start_key_)
if (self.has_start_inclusive_):
out.putVarInt32(32)
out.putBoolean(self.start_inclusive_)
if (self.has_end_key_):
out.putVarInt32(42)
out.putPrefixedString(self.end_key_)
if (self.has_end_inclusive_):
out.putVarInt32(48)
out.putBoolean(self.end_inclusive_)
if (self.has_end_unapplied_log_timestamp_us_):
out.putVarInt32(152)
out.putVarInt64(self.end_unapplied_log_timestamp_us_)
for i in xrange(len(self.start_postfix_value_)):
out.putVarInt32(178)
out.putPrefixedString(self.start_postfix_value_[i])
for i in xrange(len(self.end_postfix_value_)):
out.putVarInt32(186)
out.putPrefixedString(self.end_postfix_value_[i])
def OutputPartial(self, out):
if (self.has_index_name_):
out.putVarInt32(18)
out.putPrefixedString(self.index_name_)
if (self.has_start_key_):
out.putVarInt32(26)
out.putPrefixedString(self.start_key_)
if (self.has_start_inclusive_):
out.putVarInt32(32)
out.putBoolean(self.start_inclusive_)
if (self.has_end_key_):
out.putVarInt32(42)
out.putPrefixedString(self.end_key_)
if (self.has_end_inclusive_):
out.putVarInt32(48)
out.putBoolean(self.end_inclusive_)
if (self.has_end_unapplied_log_timestamp_us_):
out.putVarInt32(152)
out.putVarInt64(self.end_unapplied_log_timestamp_us_)
for i in xrange(len(self.start_postfix_value_)):
out.putVarInt32(178)
out.putPrefixedString(self.start_postfix_value_[i])
for i in xrange(len(self.end_postfix_value_)):
out.putVarInt32(186)
out.putPrefixedString(self.end_postfix_value_[i])
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_index_name(d.getPrefixedString())
continue
if tt == 26:
self.set_start_key(d.getPrefixedString())
continue
if tt == 32:
self.set_start_inclusive(d.getBoolean())
continue
if tt == 42:
self.set_end_key(d.getPrefixedString())
continue
if tt == 48:
self.set_end_inclusive(d.getBoolean())
continue
if tt == 152:
self.set_end_unapplied_log_timestamp_us(d.getVarInt64())
continue
if tt == 178:
self.add_start_postfix_value(d.getPrefixedString())
continue
if tt == 186:
self.add_end_postfix_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_index_name_: res+=prefix+("index_name: %s\n" % self.DebugFormatString(self.index_name_))
if self.has_start_key_: res+=prefix+("start_key: %s\n" % self.DebugFormatString(self.start_key_))
if self.has_start_inclusive_: res+=prefix+("start_inclusive: %s\n" % self.DebugFormatBool(self.start_inclusive_))
if self.has_end_key_: res+=prefix+("end_key: %s\n" % self.DebugFormatString(self.end_key_))
if self.has_end_inclusive_: res+=prefix+("end_inclusive: %s\n" % self.DebugFormatBool(self.end_inclusive_))
cnt=0
for e in self.start_postfix_value_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("start_postfix_value%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
cnt=0
for e in self.end_postfix_value_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("end_postfix_value%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_end_unapplied_log_timestamp_us_: res+=prefix+("end_unapplied_log_timestamp_us: %s\n" % self.DebugFormatInt64(self.end_unapplied_log_timestamp_us_))
return res
class CompiledQuery_MergeJoinScan(ProtocolBuffer.ProtocolMessage):
has_index_name_ = 0
index_name_ = ""
has_value_prefix_ = 0
value_prefix_ = 0
def __init__(self, contents=None):
self.prefix_value_ = []
if contents is not None: self.MergeFromString(contents)
def index_name(self): return self.index_name_
def set_index_name(self, x):
self.has_index_name_ = 1
self.index_name_ = x
def clear_index_name(self):
if self.has_index_name_:
self.has_index_name_ = 0
self.index_name_ = ""
def has_index_name(self): return self.has_index_name_
def prefix_value_size(self): return len(self.prefix_value_)
def prefix_value_list(self): return self.prefix_value_
def prefix_value(self, i):
return self.prefix_value_[i]
def set_prefix_value(self, i, x):
self.prefix_value_[i] = x
def add_prefix_value(self, x):
self.prefix_value_.append(x)
def clear_prefix_value(self):
self.prefix_value_ = []
def value_prefix(self): return self.value_prefix_
def set_value_prefix(self, x):
self.has_value_prefix_ = 1
self.value_prefix_ = x
def clear_value_prefix(self):
if self.has_value_prefix_:
self.has_value_prefix_ = 0
self.value_prefix_ = 0
def has_value_prefix(self): return self.has_value_prefix_
def MergeFrom(self, x):
assert x is not self
if (x.has_index_name()): self.set_index_name(x.index_name())
for i in xrange(x.prefix_value_size()): self.add_prefix_value(x.prefix_value(i))
if (x.has_value_prefix()): self.set_value_prefix(x.value_prefix())
def Equals(self, x):
if x is self: return 1
if self.has_index_name_ != x.has_index_name_: return 0
if self.has_index_name_ and self.index_name_ != x.index_name_: return 0
if len(self.prefix_value_) != len(x.prefix_value_): return 0
for e1, e2 in zip(self.prefix_value_, x.prefix_value_):
if e1 != e2: return 0
if self.has_value_prefix_ != x.has_value_prefix_: return 0
if self.has_value_prefix_ and self.value_prefix_ != x.value_prefix_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_index_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: index_name not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.index_name_))
n += 1 * len(self.prefix_value_)
for i in xrange(len(self.prefix_value_)): n += self.lengthString(len(self.prefix_value_[i]))
if (self.has_value_prefix_): n += 3
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_index_name_):
n += 1
n += self.lengthString(len(self.index_name_))
n += 1 * len(self.prefix_value_)
for i in xrange(len(self.prefix_value_)): n += self.lengthString(len(self.prefix_value_[i]))
if (self.has_value_prefix_): n += 3
return n
def Clear(self):
self.clear_index_name()
self.clear_prefix_value()
self.clear_value_prefix()
def OutputUnchecked(self, out):
out.putVarInt32(66)
out.putPrefixedString(self.index_name_)
for i in xrange(len(self.prefix_value_)):
out.putVarInt32(74)
out.putPrefixedString(self.prefix_value_[i])
if (self.has_value_prefix_):
out.putVarInt32(160)
out.putBoolean(self.value_prefix_)
def OutputPartial(self, out):
if (self.has_index_name_):
out.putVarInt32(66)
out.putPrefixedString(self.index_name_)
for i in xrange(len(self.prefix_value_)):
out.putVarInt32(74)
out.putPrefixedString(self.prefix_value_[i])
if (self.has_value_prefix_):
out.putVarInt32(160)
out.putBoolean(self.value_prefix_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 60: break
if tt == 66:
self.set_index_name(d.getPrefixedString())
continue
if tt == 74:
self.add_prefix_value(d.getPrefixedString())
continue
if tt == 160:
self.set_value_prefix(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_index_name_: res+=prefix+("index_name: %s\n" % self.DebugFormatString(self.index_name_))
cnt=0
for e in self.prefix_value_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("prefix_value%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_value_prefix_: res+=prefix+("value_prefix: %s\n" % self.DebugFormatBool(self.value_prefix_))
return res
class CompiledQuery_EntityFilter(ProtocolBuffer.ProtocolMessage):
has_distinct_ = 0
distinct_ = 0
has_kind_ = 0
kind_ = ""
has_ancestor_ = 0
ancestor_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def distinct(self): return self.distinct_
def set_distinct(self, x):
self.has_distinct_ = 1
self.distinct_ = x
def clear_distinct(self):
if self.has_distinct_:
self.has_distinct_ = 0
self.distinct_ = 0
def has_distinct(self): return self.has_distinct_
def kind(self): return self.kind_
def set_kind(self, x):
self.has_kind_ = 1
self.kind_ = x
def clear_kind(self):
if self.has_kind_:
self.has_kind_ = 0
self.kind_ = ""
def has_kind(self): return self.has_kind_
def ancestor(self):
if self.ancestor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.ancestor_ is None: self.ancestor_ = Reference()
finally:
self.lazy_init_lock_.release()
return self.ancestor_
def mutable_ancestor(self): self.has_ancestor_ = 1; return self.ancestor()
def clear_ancestor(self):
if self.has_ancestor_:
self.has_ancestor_ = 0;
if self.ancestor_ is not None: self.ancestor_.Clear()
def has_ancestor(self): return self.has_ancestor_
def MergeFrom(self, x):
assert x is not self
if (x.has_distinct()): self.set_distinct(x.distinct())
if (x.has_kind()): self.set_kind(x.kind())
if (x.has_ancestor()): self.mutable_ancestor().MergeFrom(x.ancestor())
def Equals(self, x):
if x is self: return 1
if self.has_distinct_ != x.has_distinct_: return 0
if self.has_distinct_ and self.distinct_ != x.distinct_: return 0
if self.has_kind_ != x.has_kind_: return 0
if self.has_kind_ and self.kind_ != x.kind_: return 0
if self.has_ancestor_ != x.has_ancestor_: return 0
if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_ancestor_ and not self.ancestor_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_distinct_): n += 2
if (self.has_kind_): n += 2 + self.lengthString(len(self.kind_))
if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_distinct_): n += 2
if (self.has_kind_): n += 2 + self.lengthString(len(self.kind_))
if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSizePartial())
return n
def Clear(self):
self.clear_distinct()
self.clear_kind()
self.clear_ancestor()
def OutputUnchecked(self, out):
if (self.has_distinct_):
out.putVarInt32(112)
out.putBoolean(self.distinct_)
if (self.has_kind_):
out.putVarInt32(138)
out.putPrefixedString(self.kind_)
if (self.has_ancestor_):
out.putVarInt32(146)
out.putVarInt32(self.ancestor_.ByteSize())
self.ancestor_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_distinct_):
out.putVarInt32(112)
out.putBoolean(self.distinct_)
if (self.has_kind_):
out.putVarInt32(138)
out.putPrefixedString(self.kind_)
if (self.has_ancestor_):
out.putVarInt32(146)
out.putVarInt32(self.ancestor_.ByteSizePartial())
self.ancestor_.OutputPartial(out)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 108: break
if tt == 112:
self.set_distinct(d.getBoolean())
continue
if tt == 138:
self.set_kind(d.getPrefixedString())
continue
if tt == 146:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_ancestor().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_distinct_: res+=prefix+("distinct: %s\n" % self.DebugFormatBool(self.distinct_))
if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_))
if self.has_ancestor_:
res+=prefix+"ancestor <\n"
res+=self.ancestor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
class CompiledQuery(ProtocolBuffer.ProtocolMessage):
has_primaryscan_ = 0
has_index_def_ = 0
index_def_ = None
has_offset_ = 0
offset_ = 0
has_limit_ = 0
limit_ = 0
has_keys_only_ = 0
keys_only_ = 0
has_distinct_infix_size_ = 0
distinct_infix_size_ = 0
has_entityfilter_ = 0
entityfilter_ = None
has_plan_label_ = 0
plan_label_ = ""
def __init__(self, contents=None):
self.primaryscan_ = CompiledQuery_PrimaryScan()
self.mergejoinscan_ = []
self.property_name_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def primaryscan(self): return self.primaryscan_
def mutable_primaryscan(self): self.has_primaryscan_ = 1; return self.primaryscan_
def clear_primaryscan(self):self.has_primaryscan_ = 0; self.primaryscan_.Clear()
def has_primaryscan(self): return self.has_primaryscan_
def mergejoinscan_size(self): return len(self.mergejoinscan_)
def mergejoinscan_list(self): return self.mergejoinscan_
def mergejoinscan(self, i):
return self.mergejoinscan_[i]
def mutable_mergejoinscan(self, i):
return self.mergejoinscan_[i]
def add_mergejoinscan(self):
x = CompiledQuery_MergeJoinScan()
self.mergejoinscan_.append(x)
return x
def clear_mergejoinscan(self):
self.mergejoinscan_ = []
def index_def(self):
if self.index_def_ is None:
self.lazy_init_lock_.acquire()
try:
if self.index_def_ is None: self.index_def_ = Index()
finally:
self.lazy_init_lock_.release()
return self.index_def_
def mutable_index_def(self): self.has_index_def_ = 1; return self.index_def()
def clear_index_def(self):
if self.has_index_def_:
self.has_index_def_ = 0;
if self.index_def_ is not None: self.index_def_.Clear()
def has_index_def(self): return self.has_index_def_
def offset(self): return self.offset_
def set_offset(self, x):
self.has_offset_ = 1
self.offset_ = x
def clear_offset(self):
if self.has_offset_:
self.has_offset_ = 0
self.offset_ = 0
def has_offset(self): return self.has_offset_
def limit(self): return self.limit_
def set_limit(self, x):
self.has_limit_ = 1
self.limit_ = x
def clear_limit(self):
if self.has_limit_:
self.has_limit_ = 0
self.limit_ = 0
def has_limit(self): return self.has_limit_
def keys_only(self): return self.keys_only_
def set_keys_only(self, x):
self.has_keys_only_ = 1
self.keys_only_ = x
def clear_keys_only(self):
if self.has_keys_only_:
self.has_keys_only_ = 0
self.keys_only_ = 0
def has_keys_only(self): return self.has_keys_only_
def property_name_size(self): return len(self.property_name_)
def property_name_list(self): return self.property_name_
def property_name(self, i):
return self.property_name_[i]
def set_property_name(self, i, x):
self.property_name_[i] = x
def add_property_name(self, x):
self.property_name_.append(x)
def clear_property_name(self):
self.property_name_ = []
def distinct_infix_size(self): return self.distinct_infix_size_
def set_distinct_infix_size(self, x):
self.has_distinct_infix_size_ = 1
self.distinct_infix_size_ = x
def clear_distinct_infix_size(self):
if self.has_distinct_infix_size_:
self.has_distinct_infix_size_ = 0
self.distinct_infix_size_ = 0
def has_distinct_infix_size(self): return self.has_distinct_infix_size_
def entityfilter(self):
if self.entityfilter_ is None:
self.lazy_init_lock_.acquire()
try:
if self.entityfilter_ is None: self.entityfilter_ = CompiledQuery_EntityFilter()
finally:
self.lazy_init_lock_.release()
return self.entityfilter_
def mutable_entityfilter(self): self.has_entityfilter_ = 1; return self.entityfilter()
def clear_entityfilter(self):
if self.has_entityfilter_:
self.has_entityfilter_ = 0;
if self.entityfilter_ is not None: self.entityfilter_.Clear()
def has_entityfilter(self): return self.has_entityfilter_
def plan_label(self): return self.plan_label_
def set_plan_label(self, x):
self.has_plan_label_ = 1
self.plan_label_ = x
def clear_plan_label(self):
if self.has_plan_label_:
self.has_plan_label_ = 0
self.plan_label_ = ""
def has_plan_label(self): return self.has_plan_label_
def MergeFrom(self, x):
assert x is not self
if (x.has_primaryscan()): self.mutable_primaryscan().MergeFrom(x.primaryscan())
for i in xrange(x.mergejoinscan_size()): self.add_mergejoinscan().CopyFrom(x.mergejoinscan(i))
if (x.has_index_def()): self.mutable_index_def().MergeFrom(x.index_def())
if (x.has_offset()): self.set_offset(x.offset())
if (x.has_limit()): self.set_limit(x.limit())
if (x.has_keys_only()): self.set_keys_only(x.keys_only())
for i in xrange(x.property_name_size()): self.add_property_name(x.property_name(i))
if (x.has_distinct_infix_size()): self.set_distinct_infix_size(x.distinct_infix_size())
if (x.has_entityfilter()): self.mutable_entityfilter().MergeFrom(x.entityfilter())
if (x.has_plan_label()): self.set_plan_label(x.plan_label())
def Equals(self, x):
if x is self: return 1
if self.has_primaryscan_ != x.has_primaryscan_: return 0
if self.has_primaryscan_ and self.primaryscan_ != x.primaryscan_: return 0
if len(self.mergejoinscan_) != len(x.mergejoinscan_): return 0
for e1, e2 in zip(self.mergejoinscan_, x.mergejoinscan_):
if e1 != e2: return 0
if self.has_index_def_ != x.has_index_def_: return 0
if self.has_index_def_ and self.index_def_ != x.index_def_: return 0
if self.has_offset_ != x.has_offset_: return 0
if self.has_offset_ and self.offset_ != x.offset_: return 0
if self.has_limit_ != x.has_limit_: return 0
if self.has_limit_ and self.limit_ != x.limit_: return 0
if self.has_keys_only_ != x.has_keys_only_: return 0
if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
if len(self.property_name_) != len(x.property_name_): return 0
for e1, e2 in zip(self.property_name_, x.property_name_):
if e1 != e2: return 0
if self.has_distinct_infix_size_ != x.has_distinct_infix_size_: return 0
if self.has_distinct_infix_size_ and self.distinct_infix_size_ != x.distinct_infix_size_: return 0
if self.has_entityfilter_ != x.has_entityfilter_: return 0
if self.has_entityfilter_ and self.entityfilter_ != x.entityfilter_: return 0
if self.has_plan_label_ != x.has_plan_label_: return 0
if self.has_plan_label_ and self.plan_label_ != x.plan_label_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_primaryscan_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: primaryscan not set.')
elif not self.primaryscan_.IsInitialized(debug_strs): initialized = 0
for p in self.mergejoinscan_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_index_def_ and not self.index_def_.IsInitialized(debug_strs)): initialized = 0
if (not self.has_keys_only_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: keys_only not set.')
if (self.has_entityfilter_ and not self.entityfilter_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.primaryscan_.ByteSize()
n += 2 * len(self.mergejoinscan_)
for i in xrange(len(self.mergejoinscan_)): n += self.mergejoinscan_[i].ByteSize()
if (self.has_index_def_): n += 2 + self.lengthString(self.index_def_.ByteSize())
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_limit_): n += 1 + self.lengthVarInt64(self.limit_)
n += 2 * len(self.property_name_)
for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
if (self.has_distinct_infix_size_): n += 2 + self.lengthVarInt64(self.distinct_infix_size_)
if (self.has_entityfilter_): n += 2 + self.entityfilter_.ByteSize()
if (self.has_plan_label_): n += 2 + self.lengthString(len(self.plan_label_))
return n + 4
def ByteSizePartial(self):
n = 0
if (self.has_primaryscan_):
n += 2
n += self.primaryscan_.ByteSizePartial()
n += 2 * len(self.mergejoinscan_)
for i in xrange(len(self.mergejoinscan_)): n += self.mergejoinscan_[i].ByteSizePartial()
if (self.has_index_def_): n += 2 + self.lengthString(self.index_def_.ByteSizePartial())
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_limit_): n += 1 + self.lengthVarInt64(self.limit_)
if (self.has_keys_only_):
n += 2
n += 2 * len(self.property_name_)
for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
if (self.has_distinct_infix_size_): n += 2 + self.lengthVarInt64(self.distinct_infix_size_)
if (self.has_entityfilter_): n += 2 + self.entityfilter_.ByteSizePartial()
if (self.has_plan_label_): n += 2 + self.lengthString(len(self.plan_label_))
return n
def Clear(self):
self.clear_primaryscan()
self.clear_mergejoinscan()
self.clear_index_def()
self.clear_offset()
self.clear_limit()
self.clear_keys_only()
self.clear_property_name()
self.clear_distinct_infix_size()
self.clear_entityfilter()
self.clear_plan_label()
def OutputUnchecked(self, out):
out.putVarInt32(11)
self.primaryscan_.OutputUnchecked(out)
out.putVarInt32(12)
for i in xrange(len(self.mergejoinscan_)):
out.putVarInt32(59)
self.mergejoinscan_[i].OutputUnchecked(out)
out.putVarInt32(60)
if (self.has_offset_):
out.putVarInt32(80)
out.putVarInt32(self.offset_)
if (self.has_limit_):
out.putVarInt32(88)
out.putVarInt32(self.limit_)
out.putVarInt32(96)
out.putBoolean(self.keys_only_)
if (self.has_entityfilter_):
out.putVarInt32(107)
self.entityfilter_.OutputUnchecked(out)
out.putVarInt32(108)
if (self.has_index_def_):
out.putVarInt32(170)
out.putVarInt32(self.index_def_.ByteSize())
self.index_def_.OutputUnchecked(out)
for i in xrange(len(self.property_name_)):
out.putVarInt32(194)
out.putPrefixedString(self.property_name_[i])
if (self.has_distinct_infix_size_):
out.putVarInt32(200)
out.putVarInt32(self.distinct_infix_size_)
if (self.has_plan_label_):
out.putVarInt32(210)
out.putPrefixedString(self.plan_label_)
def OutputPartial(self, out):
if (self.has_primaryscan_):
out.putVarInt32(11)
self.primaryscan_.OutputPartial(out)
out.putVarInt32(12)
for i in xrange(len(self.mergejoinscan_)):
out.putVarInt32(59)
self.mergejoinscan_[i].OutputPartial(out)
out.putVarInt32(60)
if (self.has_offset_):
out.putVarInt32(80)
out.putVarInt32(self.offset_)
if (self.has_limit_):
out.putVarInt32(88)
out.putVarInt32(self.limit_)
if (self.has_keys_only_):
out.putVarInt32(96)
out.putBoolean(self.keys_only_)
if (self.has_entityfilter_):
out.putVarInt32(107)
self.entityfilter_.OutputPartial(out)
out.putVarInt32(108)
if (self.has_index_def_):
out.putVarInt32(170)
out.putVarInt32(self.index_def_.ByteSizePartial())
self.index_def_.OutputPartial(out)
for i in xrange(len(self.property_name_)):
out.putVarInt32(194)
out.putPrefixedString(self.property_name_[i])
if (self.has_distinct_infix_size_):
out.putVarInt32(200)
out.putVarInt32(self.distinct_infix_size_)
if (self.has_plan_label_):
out.putVarInt32(210)
out.putPrefixedString(self.plan_label_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.mutable_primaryscan().TryMerge(d)
continue
if tt == 59:
self.add_mergejoinscan().TryMerge(d)
continue
if tt == 80:
self.set_offset(d.getVarInt32())
continue
if tt == 88:
self.set_limit(d.getVarInt32())
continue
if tt == 96:
self.set_keys_only(d.getBoolean())
continue
if tt == 107:
self.mutable_entityfilter().TryMerge(d)
continue
if tt == 170:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_index_def().TryMerge(tmp)
continue
if tt == 194:
self.add_property_name(d.getPrefixedString())
continue
if tt == 200:
self.set_distinct_infix_size(d.getVarInt32())
continue
if tt == 210:
self.set_plan_label(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_primaryscan_:
res+=prefix+"PrimaryScan {\n"
res+=self.primaryscan_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt=0
for e in self.mergejoinscan_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("MergeJoinScan%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_index_def_:
res+=prefix+"index_def <\n"
res+=self.index_def_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
cnt=0
for e in self.property_name_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("property_name%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_distinct_infix_size_: res+=prefix+("distinct_infix_size: %s\n" % self.DebugFormatInt32(self.distinct_infix_size_))
if self.has_entityfilter_:
res+=prefix+"EntityFilter {\n"
res+=self.entityfilter_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_plan_label_: res+=prefix+("plan_label: %s\n" % self.DebugFormatString(self.plan_label_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kPrimaryScanGroup = 1
kPrimaryScanindex_name = 2
kPrimaryScanstart_key = 3
kPrimaryScanstart_inclusive = 4
kPrimaryScanend_key = 5
kPrimaryScanend_inclusive = 6
kPrimaryScanstart_postfix_value = 22
kPrimaryScanend_postfix_value = 23
kPrimaryScanend_unapplied_log_timestamp_us = 19
kMergeJoinScanGroup = 7
kMergeJoinScanindex_name = 8
kMergeJoinScanprefix_value = 9
kMergeJoinScanvalue_prefix = 20
kindex_def = 21
koffset = 10
klimit = 11
kkeys_only = 12
kproperty_name = 24
kdistinct_infix_size = 25
kEntityFilterGroup = 13
kEntityFilterdistinct = 14
kEntityFilterkind = 17
kEntityFilterancestor = 18
kplan_label = 26
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "PrimaryScan",
2: "index_name",
3: "start_key",
4: "start_inclusive",
5: "end_key",
6: "end_inclusive",
7: "MergeJoinScan",
8: "index_name",
9: "prefix_value",
10: "offset",
11: "limit",
12: "keys_only",
13: "EntityFilter",
14: "distinct",
17: "kind",
18: "ancestor",
19: "end_unapplied_log_timestamp_us",
20: "value_prefix",
21: "index_def",
22: "start_postfix_value",
23: "end_postfix_value",
24: "property_name",
25: "distinct_infix_size",
26: "plan_label",
}, 26)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.NUMERIC,
7: ProtocolBuffer.Encoder.STARTGROUP,
8: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.NUMERIC,
11: ProtocolBuffer.Encoder.NUMERIC,
12: ProtocolBuffer.Encoder.NUMERIC,
13: ProtocolBuffer.Encoder.STARTGROUP,
14: ProtocolBuffer.Encoder.NUMERIC,
17: ProtocolBuffer.Encoder.STRING,
18: ProtocolBuffer.Encoder.STRING,
19: ProtocolBuffer.Encoder.NUMERIC,
20: ProtocolBuffer.Encoder.NUMERIC,
21: ProtocolBuffer.Encoder.STRING,
22: ProtocolBuffer.Encoder.STRING,
23: ProtocolBuffer.Encoder.STRING,
24: ProtocolBuffer.Encoder.STRING,
25: ProtocolBuffer.Encoder.NUMERIC,
26: ProtocolBuffer.Encoder.STRING,
}, 26, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompiledQuery'
class CompiledCursor_PositionIndexValue(ProtocolBuffer.ProtocolMessage):
has_property_ = 0
property_ = ""
has_value_ = 0
def __init__(self, contents=None):
self.value_ = PropertyValue()
if contents is not None: self.MergeFromString(contents)
def property(self): return self.property_
def set_property(self, x):
self.has_property_ = 1
self.property_ = x
def clear_property(self):
if self.has_property_:
self.has_property_ = 0
self.property_ = ""
def has_property(self): return self.has_property_
def value(self): return self.value_
def mutable_value(self): self.has_value_ = 1; return self.value_
def clear_value(self):self.has_value_ = 0; self.value_.Clear()
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_property()): self.set_property(x.property())
if (x.has_value()): self.mutable_value().MergeFrom(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_property_ != x.has_property_: return 0
if self.has_property_ and self.property_ != x.property_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
elif not self.value_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_property_): n += 2 + self.lengthString(len(self.property_))
n += self.lengthString(self.value_.ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_property_): n += 2 + self.lengthString(len(self.property_))
if (self.has_value_):
n += 2
n += self.lengthString(self.value_.ByteSizePartial())
return n
def Clear(self):
self.clear_property()
self.clear_value()
def OutputUnchecked(self, out):
if (self.has_property_):
out.putVarInt32(242)
out.putPrefixedString(self.property_)
out.putVarInt32(250)
out.putVarInt32(self.value_.ByteSize())
self.value_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_property_):
out.putVarInt32(242)
out.putPrefixedString(self.property_)
if (self.has_value_):
out.putVarInt32(250)
out.putVarInt32(self.value_.ByteSizePartial())
self.value_.OutputPartial(out)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 236: break
if tt == 242:
self.set_property(d.getPrefixedString())
continue
if tt == 250:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_value().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_property_: res+=prefix+("property: %s\n" % self.DebugFormatString(self.property_))
if self.has_value_:
res+=prefix+"value <\n"
res+=self.value_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
class CompiledCursor_Position(ProtocolBuffer.ProtocolMessage):
has_start_key_ = 0
start_key_ = ""
has_key_ = 0
key_ = None
has_start_inclusive_ = 0
start_inclusive_ = 1
has_before_ascending_ = 0
before_ascending_ = 0
def __init__(self, contents=None):
self.indexvalue_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def start_key(self): return self.start_key_
def set_start_key(self, x):
self.has_start_key_ = 1
self.start_key_ = x
def clear_start_key(self):
if self.has_start_key_:
self.has_start_key_ = 0
self.start_key_ = ""
def has_start_key(self): return self.has_start_key_
def indexvalue_size(self): return len(self.indexvalue_)
def indexvalue_list(self): return self.indexvalue_
def indexvalue(self, i):
return self.indexvalue_[i]
def mutable_indexvalue(self, i):
return self.indexvalue_[i]
def add_indexvalue(self):
x = CompiledCursor_PositionIndexValue()
self.indexvalue_.append(x)
return x
def clear_indexvalue(self):
self.indexvalue_ = []
def key(self):
if self.key_ is None:
self.lazy_init_lock_.acquire()
try:
if self.key_ is None: self.key_ = Reference()
finally:
self.lazy_init_lock_.release()
return self.key_
def mutable_key(self): self.has_key_ = 1; return self.key()
def clear_key(self):
if self.has_key_:
self.has_key_ = 0;
if self.key_ is not None: self.key_.Clear()
def has_key(self): return self.has_key_
def start_inclusive(self): return self.start_inclusive_
def set_start_inclusive(self, x):
self.has_start_inclusive_ = 1
self.start_inclusive_ = x
def clear_start_inclusive(self):
if self.has_start_inclusive_:
self.has_start_inclusive_ = 0
self.start_inclusive_ = 1
def has_start_inclusive(self): return self.has_start_inclusive_
def before_ascending(self): return self.before_ascending_
def set_before_ascending(self, x):
self.has_before_ascending_ = 1
self.before_ascending_ = x
def clear_before_ascending(self):
if self.has_before_ascending_:
self.has_before_ascending_ = 0
self.before_ascending_ = 0
def has_before_ascending(self): return self.has_before_ascending_
def MergeFrom(self, x):
assert x is not self
if (x.has_start_key()): self.set_start_key(x.start_key())
for i in xrange(x.indexvalue_size()): self.add_indexvalue().CopyFrom(x.indexvalue(i))
if (x.has_key()): self.mutable_key().MergeFrom(x.key())
if (x.has_start_inclusive()): self.set_start_inclusive(x.start_inclusive())
if (x.has_before_ascending()): self.set_before_ascending(x.before_ascending())
def Equals(self, x):
if x is self: return 1
if self.has_start_key_ != x.has_start_key_: return 0
if self.has_start_key_ and self.start_key_ != x.start_key_: return 0
if len(self.indexvalue_) != len(x.indexvalue_): return 0
for e1, e2 in zip(self.indexvalue_, x.indexvalue_):
if e1 != e2: return 0
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_start_inclusive_ != x.has_start_inclusive_: return 0
if self.has_start_inclusive_ and self.start_inclusive_ != x.start_inclusive_: return 0
if self.has_before_ascending_ != x.has_before_ascending_: return 0
if self.has_before_ascending_ and self.before_ascending_ != x.before_ascending_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.indexvalue_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_key_ and not self.key_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_start_key_): n += 2 + self.lengthString(len(self.start_key_))
n += 4 * len(self.indexvalue_)
for i in xrange(len(self.indexvalue_)): n += self.indexvalue_[i].ByteSize()
if (self.has_key_): n += 2 + self.lengthString(self.key_.ByteSize())
if (self.has_start_inclusive_): n += 3
if (self.has_before_ascending_): n += 3
return n
def ByteSizePartial(self):
n = 0
if (self.has_start_key_): n += 2 + self.lengthString(len(self.start_key_))
n += 4 * len(self.indexvalue_)
for i in xrange(len(self.indexvalue_)): n += self.indexvalue_[i].ByteSizePartial()
if (self.has_key_): n += 2 + self.lengthString(self.key_.ByteSizePartial())
if (self.has_start_inclusive_): n += 3
if (self.has_before_ascending_): n += 3
return n
def Clear(self):
self.clear_start_key()
self.clear_indexvalue()
self.clear_key()
self.clear_start_inclusive()
self.clear_before_ascending()
def OutputUnchecked(self, out):
if (self.has_start_key_):
out.putVarInt32(218)
out.putPrefixedString(self.start_key_)
if (self.has_start_inclusive_):
out.putVarInt32(224)
out.putBoolean(self.start_inclusive_)
for i in xrange(len(self.indexvalue_)):
out.putVarInt32(235)
self.indexvalue_[i].OutputUnchecked(out)
out.putVarInt32(236)
if (self.has_key_):
out.putVarInt32(258)
out.putVarInt32(self.key_.ByteSize())
self.key_.OutputUnchecked(out)
if (self.has_before_ascending_):
out.putVarInt32(264)
out.putBoolean(self.before_ascending_)
def OutputPartial(self, out):
if (self.has_start_key_):
out.putVarInt32(218)
out.putPrefixedString(self.start_key_)
if (self.has_start_inclusive_):
out.putVarInt32(224)
out.putBoolean(self.start_inclusive_)
for i in xrange(len(self.indexvalue_)):
out.putVarInt32(235)
self.indexvalue_[i].OutputPartial(out)
out.putVarInt32(236)
if (self.has_key_):
out.putVarInt32(258)
out.putVarInt32(self.key_.ByteSizePartial())
self.key_.OutputPartial(out)
if (self.has_before_ascending_):
out.putVarInt32(264)
out.putBoolean(self.before_ascending_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 20: break
if tt == 218:
self.set_start_key(d.getPrefixedString())
continue
if tt == 224:
self.set_start_inclusive(d.getBoolean())
continue
if tt == 235:
self.add_indexvalue().TryMerge(d)
continue
if tt == 258:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_key().TryMerge(tmp)
continue
if tt == 264:
self.set_before_ascending(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_start_key_: res+=prefix+("start_key: %s\n" % self.DebugFormatString(self.start_key_))
cnt=0
for e in self.indexvalue_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("IndexValue%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_key_:
res+=prefix+"key <\n"
res+=self.key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_start_inclusive_: res+=prefix+("start_inclusive: %s\n" % self.DebugFormatBool(self.start_inclusive_))
if self.has_before_ascending_: res+=prefix+("before_ascending: %s\n" % self.DebugFormatBool(self.before_ascending_))
return res
class CompiledCursor(ProtocolBuffer.ProtocolMessage):
has_position_ = 0
position_ = None
has_postfix_position_ = 0
postfix_position_ = None
has_absolute_position_ = 0
absolute_position_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def position(self):
if self.position_ is None:
self.lazy_init_lock_.acquire()
try:
if self.position_ is None: self.position_ = CompiledCursor_Position()
finally:
self.lazy_init_lock_.release()
return self.position_
def mutable_position(self): self.has_position_ = 1; return self.position()
def clear_position(self):
if self.has_position_:
self.has_position_ = 0;
if self.position_ is not None: self.position_.Clear()
def has_position(self): return self.has_position_
def postfix_position(self):
if self.postfix_position_ is None:
self.lazy_init_lock_.acquire()
try:
if self.postfix_position_ is None: self.postfix_position_ = IndexPostfix()
finally:
self.lazy_init_lock_.release()
return self.postfix_position_
def mutable_postfix_position(self): self.has_postfix_position_ = 1; return self.postfix_position()
def clear_postfix_position(self):
if self.has_postfix_position_:
self.has_postfix_position_ = 0;
if self.postfix_position_ is not None: self.postfix_position_.Clear()
def has_postfix_position(self): return self.has_postfix_position_
def absolute_position(self):
if self.absolute_position_ is None:
self.lazy_init_lock_.acquire()
try:
if self.absolute_position_ is None: self.absolute_position_ = IndexPosition()
finally:
self.lazy_init_lock_.release()
return self.absolute_position_
def mutable_absolute_position(self): self.has_absolute_position_ = 1; return self.absolute_position()
def clear_absolute_position(self):
if self.has_absolute_position_:
self.has_absolute_position_ = 0;
if self.absolute_position_ is not None: self.absolute_position_.Clear()
def has_absolute_position(self): return self.has_absolute_position_
def MergeFrom(self, x):
assert x is not self
if (x.has_position()): self.mutable_position().MergeFrom(x.position())
if (x.has_postfix_position()): self.mutable_postfix_position().MergeFrom(x.postfix_position())
if (x.has_absolute_position()): self.mutable_absolute_position().MergeFrom(x.absolute_position())
def Equals(self, x):
if x is self: return 1
if self.has_position_ != x.has_position_: return 0
if self.has_position_ and self.position_ != x.position_: return 0
if self.has_postfix_position_ != x.has_postfix_position_: return 0
if self.has_postfix_position_ and self.postfix_position_ != x.postfix_position_: return 0
if self.has_absolute_position_ != x.has_absolute_position_: return 0
if self.has_absolute_position_ and self.absolute_position_ != x.absolute_position_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_position_ and not self.position_.IsInitialized(debug_strs)): initialized = 0
if (self.has_postfix_position_ and not self.postfix_position_.IsInitialized(debug_strs)): initialized = 0
if (self.has_absolute_position_ and not self.absolute_position_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_position_): n += 2 + self.position_.ByteSize()
if (self.has_postfix_position_): n += 1 + self.lengthString(self.postfix_position_.ByteSize())
if (self.has_absolute_position_): n += 1 + self.lengthString(self.absolute_position_.ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_position_): n += 2 + self.position_.ByteSizePartial()
if (self.has_postfix_position_): n += 1 + self.lengthString(self.postfix_position_.ByteSizePartial())
if (self.has_absolute_position_): n += 1 + self.lengthString(self.absolute_position_.ByteSizePartial())
return n
def Clear(self):
self.clear_position()
self.clear_postfix_position()
self.clear_absolute_position()
def OutputUnchecked(self, out):
if (self.has_postfix_position_):
out.putVarInt32(10)
out.putVarInt32(self.postfix_position_.ByteSize())
self.postfix_position_.OutputUnchecked(out)
if (self.has_position_):
out.putVarInt32(19)
self.position_.OutputUnchecked(out)
out.putVarInt32(20)
if (self.has_absolute_position_):
out.putVarInt32(26)
out.putVarInt32(self.absolute_position_.ByteSize())
self.absolute_position_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_postfix_position_):
out.putVarInt32(10)
out.putVarInt32(self.postfix_position_.ByteSizePartial())
self.postfix_position_.OutputPartial(out)
if (self.has_position_):
out.putVarInt32(19)
self.position_.OutputPartial(out)
out.putVarInt32(20)
if (self.has_absolute_position_):
out.putVarInt32(26)
out.putVarInt32(self.absolute_position_.ByteSizePartial())
self.absolute_position_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_postfix_position().TryMerge(tmp)
continue
if tt == 19:
self.mutable_position().TryMerge(d)
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_absolute_position().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_position_:
res+=prefix+"Position {\n"
res+=self.position_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_postfix_position_:
res+=prefix+"postfix_position <\n"
res+=self.postfix_position_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_absolute_position_:
res+=prefix+"absolute_position <\n"
res+=self.absolute_position_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kPositionGroup = 2
kPositionstart_key = 27
kPositionIndexValueGroup = 29
kPositionIndexValueproperty = 30
kPositionIndexValuevalue = 31
kPositionkey = 32
kPositionstart_inclusive = 28
kPositionbefore_ascending = 33
kpostfix_position = 1
kabsolute_position = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "postfix_position",
2: "Position",
3: "absolute_position",
27: "start_key",
28: "start_inclusive",
29: "IndexValue",
30: "property",
31: "value",
32: "key",
33: "before_ascending",
}, 33)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STARTGROUP,
3: ProtocolBuffer.Encoder.STRING,
27: ProtocolBuffer.Encoder.STRING,
28: ProtocolBuffer.Encoder.NUMERIC,
29: ProtocolBuffer.Encoder.STARTGROUP,
30: ProtocolBuffer.Encoder.STRING,
31: ProtocolBuffer.Encoder.STRING,
32: ProtocolBuffer.Encoder.STRING,
33: ProtocolBuffer.Encoder.NUMERIC,
}, 33, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompiledCursor'
class Cursor(ProtocolBuffer.ProtocolMessage):
has_cursor_ = 0
cursor_ = 0
has_app_ = 0
app_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def cursor(self): return self.cursor_
def set_cursor(self, x):
self.has_cursor_ = 1
self.cursor_ = x
def clear_cursor(self):
if self.has_cursor_:
self.has_cursor_ = 0
self.cursor_ = 0
def has_cursor(self): return self.has_cursor_
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def MergeFrom(self, x):
assert x is not self
if (x.has_cursor()): self.set_cursor(x.cursor())
if (x.has_app()): self.set_app(x.app())
def Equals(self, x):
if x is self: return 1
if self.has_cursor_ != x.has_cursor_: return 0
if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_cursor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: cursor not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_app_): n += 1 + self.lengthString(len(self.app_))
return n + 9
def ByteSizePartial(self):
n = 0
if (self.has_cursor_):
n += 9
if (self.has_app_): n += 1 + self.lengthString(len(self.app_))
return n
def Clear(self):
self.clear_cursor()
self.clear_app()
def OutputUnchecked(self, out):
out.putVarInt32(9)
out.put64(self.cursor_)
if (self.has_app_):
out.putVarInt32(18)
out.putPrefixedString(self.app_)
def OutputPartial(self, out):
if (self.has_cursor_):
out.putVarInt32(9)
out.put64(self.cursor_)
if (self.has_app_):
out.putVarInt32(18)
out.putPrefixedString(self.app_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 9:
self.set_cursor(d.get64())
continue
if tt == 18:
self.set_app(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cursor_: res+=prefix+("cursor: %s\n" % self.DebugFormatFixed64(self.cursor_))
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kcursor = 1
kapp = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "cursor",
2: "app",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.DOUBLE,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Cursor'
class Error(ProtocolBuffer.ProtocolMessage):
BAD_REQUEST = 1
CONCURRENT_TRANSACTION = 2
INTERNAL_ERROR = 3
NEED_INDEX = 4
TIMEOUT = 5
PERMISSION_DENIED = 6
BIGTABLE_ERROR = 7
COMMITTED_BUT_STILL_APPLYING = 8
CAPABILITY_DISABLED = 9
TRY_ALTERNATE_BACKEND = 10
SAFE_TIME_TOO_OLD = 11
_ErrorCode_NAMES = {
1: "BAD_REQUEST",
2: "CONCURRENT_TRANSACTION",
3: "INTERNAL_ERROR",
4: "NEED_INDEX",
5: "TIMEOUT",
6: "PERMISSION_DENIED",
7: "BIGTABLE_ERROR",
8: "COMMITTED_BUT_STILL_APPLYING",
9: "CAPABILITY_DISABLED",
10: "TRY_ALTERNATE_BACKEND",
11: "SAFE_TIME_TOO_OLD",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Error'
class Cost_CommitCost(ProtocolBuffer.ProtocolMessage):
has_requested_entity_puts_ = 0
requested_entity_puts_ = 0
has_requested_entity_deletes_ = 0
requested_entity_deletes_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def requested_entity_puts(self): return self.requested_entity_puts_
def set_requested_entity_puts(self, x):
self.has_requested_entity_puts_ = 1
self.requested_entity_puts_ = x
def clear_requested_entity_puts(self):
if self.has_requested_entity_puts_:
self.has_requested_entity_puts_ = 0
self.requested_entity_puts_ = 0
def has_requested_entity_puts(self): return self.has_requested_entity_puts_
def requested_entity_deletes(self): return self.requested_entity_deletes_
def set_requested_entity_deletes(self, x):
self.has_requested_entity_deletes_ = 1
self.requested_entity_deletes_ = x
def clear_requested_entity_deletes(self):
if self.has_requested_entity_deletes_:
self.has_requested_entity_deletes_ = 0
self.requested_entity_deletes_ = 0
def has_requested_entity_deletes(self): return self.has_requested_entity_deletes_
def MergeFrom(self, x):
assert x is not self
if (x.has_requested_entity_puts()): self.set_requested_entity_puts(x.requested_entity_puts())
if (x.has_requested_entity_deletes()): self.set_requested_entity_deletes(x.requested_entity_deletes())
def Equals(self, x):
if x is self: return 1
if self.has_requested_entity_puts_ != x.has_requested_entity_puts_: return 0
if self.has_requested_entity_puts_ and self.requested_entity_puts_ != x.requested_entity_puts_: return 0
if self.has_requested_entity_deletes_ != x.has_requested_entity_deletes_: return 0
if self.has_requested_entity_deletes_ and self.requested_entity_deletes_ != x.requested_entity_deletes_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_requested_entity_puts_): n += 1 + self.lengthVarInt64(self.requested_entity_puts_)
if (self.has_requested_entity_deletes_): n += 1 + self.lengthVarInt64(self.requested_entity_deletes_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_requested_entity_puts_): n += 1 + self.lengthVarInt64(self.requested_entity_puts_)
if (self.has_requested_entity_deletes_): n += 1 + self.lengthVarInt64(self.requested_entity_deletes_)
return n
def Clear(self):
self.clear_requested_entity_puts()
self.clear_requested_entity_deletes()
def OutputUnchecked(self, out):
if (self.has_requested_entity_puts_):
out.putVarInt32(48)
out.putVarInt32(self.requested_entity_puts_)
if (self.has_requested_entity_deletes_):
out.putVarInt32(56)
out.putVarInt32(self.requested_entity_deletes_)
def OutputPartial(self, out):
if (self.has_requested_entity_puts_):
out.putVarInt32(48)
out.putVarInt32(self.requested_entity_puts_)
if (self.has_requested_entity_deletes_):
out.putVarInt32(56)
out.putVarInt32(self.requested_entity_deletes_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 44: break
if tt == 48:
self.set_requested_entity_puts(d.getVarInt32())
continue
if tt == 56:
self.set_requested_entity_deletes(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_requested_entity_puts_: res+=prefix+("requested_entity_puts: %s\n" % self.DebugFormatInt32(self.requested_entity_puts_))
if self.has_requested_entity_deletes_: res+=prefix+("requested_entity_deletes: %s\n" % self.DebugFormatInt32(self.requested_entity_deletes_))
return res
class Cost(ProtocolBuffer.ProtocolMessage):
has_index_writes_ = 0
index_writes_ = 0
has_index_write_bytes_ = 0
index_write_bytes_ = 0
has_entity_writes_ = 0
entity_writes_ = 0
has_entity_write_bytes_ = 0
entity_write_bytes_ = 0
has_commitcost_ = 0
commitcost_ = None
has_approximate_storage_delta_ = 0
approximate_storage_delta_ = 0
has_id_sequence_updates_ = 0
id_sequence_updates_ = 0
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def index_writes(self): return self.index_writes_
def set_index_writes(self, x):
self.has_index_writes_ = 1
self.index_writes_ = x
def clear_index_writes(self):
if self.has_index_writes_:
self.has_index_writes_ = 0
self.index_writes_ = 0
def has_index_writes(self): return self.has_index_writes_
def index_write_bytes(self): return self.index_write_bytes_
def set_index_write_bytes(self, x):
self.has_index_write_bytes_ = 1
self.index_write_bytes_ = x
def clear_index_write_bytes(self):
if self.has_index_write_bytes_:
self.has_index_write_bytes_ = 0
self.index_write_bytes_ = 0
def has_index_write_bytes(self): return self.has_index_write_bytes_
def entity_writes(self): return self.entity_writes_
def set_entity_writes(self, x):
self.has_entity_writes_ = 1
self.entity_writes_ = x
def clear_entity_writes(self):
if self.has_entity_writes_:
self.has_entity_writes_ = 0
self.entity_writes_ = 0
def has_entity_writes(self): return self.has_entity_writes_
def entity_write_bytes(self): return self.entity_write_bytes_
def set_entity_write_bytes(self, x):
self.has_entity_write_bytes_ = 1
self.entity_write_bytes_ = x
def clear_entity_write_bytes(self):
if self.has_entity_write_bytes_:
self.has_entity_write_bytes_ = 0
self.entity_write_bytes_ = 0
def has_entity_write_bytes(self): return self.has_entity_write_bytes_
def commitcost(self):
if self.commitcost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.commitcost_ is None: self.commitcost_ = Cost_CommitCost()
finally:
self.lazy_init_lock_.release()
return self.commitcost_
def mutable_commitcost(self): self.has_commitcost_ = 1; return self.commitcost()
def clear_commitcost(self):
if self.has_commitcost_:
self.has_commitcost_ = 0;
if self.commitcost_ is not None: self.commitcost_.Clear()
def has_commitcost(self): return self.has_commitcost_
def approximate_storage_delta(self): return self.approximate_storage_delta_
def set_approximate_storage_delta(self, x):
self.has_approximate_storage_delta_ = 1
self.approximate_storage_delta_ = x
def clear_approximate_storage_delta(self):
if self.has_approximate_storage_delta_:
self.has_approximate_storage_delta_ = 0
self.approximate_storage_delta_ = 0
def has_approximate_storage_delta(self): return self.has_approximate_storage_delta_
def id_sequence_updates(self): return self.id_sequence_updates_
def set_id_sequence_updates(self, x):
self.has_id_sequence_updates_ = 1
self.id_sequence_updates_ = x
def clear_id_sequence_updates(self):
if self.has_id_sequence_updates_:
self.has_id_sequence_updates_ = 0
self.id_sequence_updates_ = 0
def has_id_sequence_updates(self): return self.has_id_sequence_updates_
def MergeFrom(self, x):
assert x is not self
if (x.has_index_writes()): self.set_index_writes(x.index_writes())
if (x.has_index_write_bytes()): self.set_index_write_bytes(x.index_write_bytes())
if (x.has_entity_writes()): self.set_entity_writes(x.entity_writes())
if (x.has_entity_write_bytes()): self.set_entity_write_bytes(x.entity_write_bytes())
if (x.has_commitcost()): self.mutable_commitcost().MergeFrom(x.commitcost())
if (x.has_approximate_storage_delta()): self.set_approximate_storage_delta(x.approximate_storage_delta())
if (x.has_id_sequence_updates()): self.set_id_sequence_updates(x.id_sequence_updates())
def Equals(self, x):
if x is self: return 1
if self.has_index_writes_ != x.has_index_writes_: return 0
if self.has_index_writes_ and self.index_writes_ != x.index_writes_: return 0
if self.has_index_write_bytes_ != x.has_index_write_bytes_: return 0
if self.has_index_write_bytes_ and self.index_write_bytes_ != x.index_write_bytes_: return 0
if self.has_entity_writes_ != x.has_entity_writes_: return 0
if self.has_entity_writes_ and self.entity_writes_ != x.entity_writes_: return 0
if self.has_entity_write_bytes_ != x.has_entity_write_bytes_: return 0
if self.has_entity_write_bytes_ and self.entity_write_bytes_ != x.entity_write_bytes_: return 0
if self.has_commitcost_ != x.has_commitcost_: return 0
if self.has_commitcost_ and self.commitcost_ != x.commitcost_: return 0
if self.has_approximate_storage_delta_ != x.has_approximate_storage_delta_: return 0
if self.has_approximate_storage_delta_ and self.approximate_storage_delta_ != x.approximate_storage_delta_: return 0
if self.has_id_sequence_updates_ != x.has_id_sequence_updates_: return 0
if self.has_id_sequence_updates_ and self.id_sequence_updates_ != x.id_sequence_updates_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_commitcost_ and not self.commitcost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_index_writes_): n += 1 + self.lengthVarInt64(self.index_writes_)
if (self.has_index_write_bytes_): n += 1 + self.lengthVarInt64(self.index_write_bytes_)
if (self.has_entity_writes_): n += 1 + self.lengthVarInt64(self.entity_writes_)
if (self.has_entity_write_bytes_): n += 1 + self.lengthVarInt64(self.entity_write_bytes_)
if (self.has_commitcost_): n += 2 + self.commitcost_.ByteSize()
if (self.has_approximate_storage_delta_): n += 1 + self.lengthVarInt64(self.approximate_storage_delta_)
if (self.has_id_sequence_updates_): n += 1 + self.lengthVarInt64(self.id_sequence_updates_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_index_writes_): n += 1 + self.lengthVarInt64(self.index_writes_)
if (self.has_index_write_bytes_): n += 1 + self.lengthVarInt64(self.index_write_bytes_)
if (self.has_entity_writes_): n += 1 + self.lengthVarInt64(self.entity_writes_)
if (self.has_entity_write_bytes_): n += 1 + self.lengthVarInt64(self.entity_write_bytes_)
if (self.has_commitcost_): n += 2 + self.commitcost_.ByteSizePartial()
if (self.has_approximate_storage_delta_): n += 1 + self.lengthVarInt64(self.approximate_storage_delta_)
if (self.has_id_sequence_updates_): n += 1 + self.lengthVarInt64(self.id_sequence_updates_)
return n
def Clear(self):
self.clear_index_writes()
self.clear_index_write_bytes()
self.clear_entity_writes()
self.clear_entity_write_bytes()
self.clear_commitcost()
self.clear_approximate_storage_delta()
self.clear_id_sequence_updates()
def OutputUnchecked(self, out):
if (self.has_index_writes_):
out.putVarInt32(8)
out.putVarInt32(self.index_writes_)
if (self.has_index_write_bytes_):
out.putVarInt32(16)
out.putVarInt32(self.index_write_bytes_)
if (self.has_entity_writes_):
out.putVarInt32(24)
out.putVarInt32(self.entity_writes_)
if (self.has_entity_write_bytes_):
out.putVarInt32(32)
out.putVarInt32(self.entity_write_bytes_)
if (self.has_commitcost_):
out.putVarInt32(43)
self.commitcost_.OutputUnchecked(out)
out.putVarInt32(44)
if (self.has_approximate_storage_delta_):
out.putVarInt32(64)
out.putVarInt32(self.approximate_storage_delta_)
if (self.has_id_sequence_updates_):
out.putVarInt32(72)
out.putVarInt32(self.id_sequence_updates_)
def OutputPartial(self, out):
if (self.has_index_writes_):
out.putVarInt32(8)
out.putVarInt32(self.index_writes_)
if (self.has_index_write_bytes_):
out.putVarInt32(16)
out.putVarInt32(self.index_write_bytes_)
if (self.has_entity_writes_):
out.putVarInt32(24)
out.putVarInt32(self.entity_writes_)
if (self.has_entity_write_bytes_):
out.putVarInt32(32)
out.putVarInt32(self.entity_write_bytes_)
if (self.has_commitcost_):
out.putVarInt32(43)
self.commitcost_.OutputPartial(out)
out.putVarInt32(44)
if (self.has_approximate_storage_delta_):
out.putVarInt32(64)
out.putVarInt32(self.approximate_storage_delta_)
if (self.has_id_sequence_updates_):
out.putVarInt32(72)
out.putVarInt32(self.id_sequence_updates_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_index_writes(d.getVarInt32())
continue
if tt == 16:
self.set_index_write_bytes(d.getVarInt32())
continue
if tt == 24:
self.set_entity_writes(d.getVarInt32())
continue
if tt == 32:
self.set_entity_write_bytes(d.getVarInt32())
continue
if tt == 43:
self.mutable_commitcost().TryMerge(d)
continue
if tt == 64:
self.set_approximate_storage_delta(d.getVarInt32())
continue
if tt == 72:
self.set_id_sequence_updates(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_index_writes_: res+=prefix+("index_writes: %s\n" % self.DebugFormatInt32(self.index_writes_))
if self.has_index_write_bytes_: res+=prefix+("index_write_bytes: %s\n" % self.DebugFormatInt32(self.index_write_bytes_))
if self.has_entity_writes_: res+=prefix+("entity_writes: %s\n" % self.DebugFormatInt32(self.entity_writes_))
if self.has_entity_write_bytes_: res+=prefix+("entity_write_bytes: %s\n" % self.DebugFormatInt32(self.entity_write_bytes_))
if self.has_commitcost_:
res+=prefix+"CommitCost {\n"
res+=self.commitcost_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_approximate_storage_delta_: res+=prefix+("approximate_storage_delta: %s\n" % self.DebugFormatInt32(self.approximate_storage_delta_))
if self.has_id_sequence_updates_: res+=prefix+("id_sequence_updates: %s\n" % self.DebugFormatInt32(self.id_sequence_updates_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kindex_writes = 1
kindex_write_bytes = 2
kentity_writes = 3
kentity_write_bytes = 4
kCommitCostGroup = 5
kCommitCostrequested_entity_puts = 6
kCommitCostrequested_entity_deletes = 7
kapproximate_storage_delta = 8
kid_sequence_updates = 9
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "index_writes",
2: "index_write_bytes",
3: "entity_writes",
4: "entity_write_bytes",
5: "CommitCost",
6: "requested_entity_puts",
7: "requested_entity_deletes",
8: "approximate_storage_delta",
9: "id_sequence_updates",
}, 9)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STARTGROUP,
6: ProtocolBuffer.Encoder.NUMERIC,
7: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.NUMERIC,
9: ProtocolBuffer.Encoder.NUMERIC,
}, 9, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Cost'
class GetRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_transaction_ = 0
transaction_ = None
has_failover_ms_ = 0
failover_ms_ = 0
has_strong_ = 0
strong_ = 0
has_allow_deferred_ = 0
allow_deferred_ = 0
def __init__(self, contents=None):
self.key_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def mutable_key(self, i):
return self.key_[i]
def add_key(self):
x = Reference()
self.key_.append(x)
return x
def clear_key(self):
self.key_ = []
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def failover_ms(self): return self.failover_ms_
def set_failover_ms(self, x):
self.has_failover_ms_ = 1
self.failover_ms_ = x
def clear_failover_ms(self):
if self.has_failover_ms_:
self.has_failover_ms_ = 0
self.failover_ms_ = 0
def has_failover_ms(self): return self.has_failover_ms_
def strong(self): return self.strong_
def set_strong(self, x):
self.has_strong_ = 1
self.strong_ = x
def clear_strong(self):
if self.has_strong_:
self.has_strong_ = 0
self.strong_ = 0
def has_strong(self): return self.has_strong_
def allow_deferred(self): return self.allow_deferred_
def set_allow_deferred(self, x):
self.has_allow_deferred_ = 1
self.allow_deferred_ = x
def clear_allow_deferred(self):
if self.has_allow_deferred_:
self.has_allow_deferred_ = 0
self.allow_deferred_ = 0
def has_allow_deferred(self): return self.has_allow_deferred_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
if (x.has_failover_ms()): self.set_failover_ms(x.failover_ms())
if (x.has_strong()): self.set_strong(x.strong())
if (x.has_allow_deferred()): self.set_allow_deferred(x.allow_deferred())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if self.has_failover_ms_ != x.has_failover_ms_: return 0
if self.has_failover_ms_ and self.failover_ms_ != x.failover_ms_: return 0
if self.has_strong_ != x.has_strong_: return 0
if self.has_strong_ and self.strong_ != x.strong_: return 0
if self.has_allow_deferred_ != x.has_allow_deferred_: return 0
if self.has_allow_deferred_ and self.allow_deferred_ != x.allow_deferred_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
for p in self.key_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
if (self.has_failover_ms_): n += 1 + self.lengthVarInt64(self.failover_ms_)
if (self.has_strong_): n += 2
if (self.has_allow_deferred_): n += 2
return n
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial())
if (self.has_failover_ms_): n += 1 + self.lengthVarInt64(self.failover_ms_)
if (self.has_strong_): n += 2
if (self.has_allow_deferred_): n += 2
return n
def Clear(self):
self.clear_header()
self.clear_key()
self.clear_transaction()
self.clear_failover_ms()
self.clear_strong()
self.clear_allow_deferred()
def OutputUnchecked(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSize())
self.key_[i].OutputUnchecked(out)
if (self.has_transaction_):
out.putVarInt32(18)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
if (self.has_failover_ms_):
out.putVarInt32(24)
out.putVarInt64(self.failover_ms_)
if (self.has_strong_):
out.putVarInt32(32)
out.putBoolean(self.strong_)
if (self.has_allow_deferred_):
out.putVarInt32(40)
out.putBoolean(self.allow_deferred_)
if (self.has_header_):
out.putVarInt32(50)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSizePartial())
self.key_[i].OutputPartial(out)
if (self.has_transaction_):
out.putVarInt32(18)
out.putVarInt32(self.transaction_.ByteSizePartial())
self.transaction_.OutputPartial(out)
if (self.has_failover_ms_):
out.putVarInt32(24)
out.putVarInt64(self.failover_ms_)
if (self.has_strong_):
out.putVarInt32(32)
out.putBoolean(self.strong_)
if (self.has_allow_deferred_):
out.putVarInt32(40)
out.putBoolean(self.allow_deferred_)
if (self.has_header_):
out.putVarInt32(50)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_key().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 24:
self.set_failover_ms(d.getVarInt64())
continue
if tt == 32:
self.set_strong(d.getBoolean())
continue
if tt == 40:
self.set_allow_deferred(d.getBoolean())
continue
if tt == 50:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_failover_ms_: res+=prefix+("failover_ms: %s\n" % self.DebugFormatInt64(self.failover_ms_))
if self.has_strong_: res+=prefix+("strong: %s\n" % self.DebugFormatBool(self.strong_))
if self.has_allow_deferred_: res+=prefix+("allow_deferred: %s\n" % self.DebugFormatBool(self.allow_deferred_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 6
kkey = 1
ktransaction = 2
kfailover_ms = 3
kstrong = 4
kallow_deferred = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "transaction",
3: "failover_ms",
4: "strong",
5: "allow_deferred",
6: "header",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.STRING,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.GetRequest'
class GetResponse_Entity(ProtocolBuffer.ProtocolMessage):
has_entity_ = 0
entity_ = None
has_key_ = 0
key_ = None
has_version_ = 0
version_ = 0
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def entity(self):
if self.entity_ is None:
self.lazy_init_lock_.acquire()
try:
if self.entity_ is None: self.entity_ = EntityProto()
finally:
self.lazy_init_lock_.release()
return self.entity_
def mutable_entity(self): self.has_entity_ = 1; return self.entity()
def clear_entity(self):
if self.has_entity_:
self.has_entity_ = 0;
if self.entity_ is not None: self.entity_.Clear()
def has_entity(self): return self.has_entity_
def key(self):
if self.key_ is None:
self.lazy_init_lock_.acquire()
try:
if self.key_ is None: self.key_ = Reference()
finally:
self.lazy_init_lock_.release()
return self.key_
def mutable_key(self): self.has_key_ = 1; return self.key()
def clear_key(self):
if self.has_key_:
self.has_key_ = 0;
if self.key_ is not None: self.key_.Clear()
def has_key(self): return self.has_key_
def version(self): return self.version_
def set_version(self, x):
self.has_version_ = 1
self.version_ = x
def clear_version(self):
if self.has_version_:
self.has_version_ = 0
self.version_ = 0
def has_version(self): return self.has_version_
def MergeFrom(self, x):
assert x is not self
if (x.has_entity()): self.mutable_entity().MergeFrom(x.entity())
if (x.has_key()): self.mutable_key().MergeFrom(x.key())
if (x.has_version()): self.set_version(x.version())
def Equals(self, x):
if x is self: return 1
if self.has_entity_ != x.has_entity_: return 0
if self.has_entity_ and self.entity_ != x.entity_: return 0
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_version_ != x.has_version_: return 0
if self.has_version_ and self.version_ != x.version_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_entity_ and not self.entity_.IsInitialized(debug_strs)): initialized = 0
if (self.has_key_ and not self.key_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_entity_): n += 1 + self.lengthString(self.entity_.ByteSize())
if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSize())
if (self.has_version_): n += 1 + self.lengthVarInt64(self.version_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_entity_): n += 1 + self.lengthString(self.entity_.ByteSizePartial())
if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSizePartial())
if (self.has_version_): n += 1 + self.lengthVarInt64(self.version_)
return n
def Clear(self):
self.clear_entity()
self.clear_key()
self.clear_version()
def OutputUnchecked(self, out):
if (self.has_entity_):
out.putVarInt32(18)
out.putVarInt32(self.entity_.ByteSize())
self.entity_.OutputUnchecked(out)
if (self.has_version_):
out.putVarInt32(24)
out.putVarInt64(self.version_)
if (self.has_key_):
out.putVarInt32(34)
out.putVarInt32(self.key_.ByteSize())
self.key_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_entity_):
out.putVarInt32(18)
out.putVarInt32(self.entity_.ByteSizePartial())
self.entity_.OutputPartial(out)
if (self.has_version_):
out.putVarInt32(24)
out.putVarInt64(self.version_)
if (self.has_key_):
out.putVarInt32(34)
out.putVarInt32(self.key_.ByteSizePartial())
self.key_.OutputPartial(out)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_entity().TryMerge(tmp)
continue
if tt == 24:
self.set_version(d.getVarInt64())
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_key().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_entity_:
res+=prefix+"entity <\n"
res+=self.entity_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_key_:
res+=prefix+"key <\n"
res+=self.key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_version_: res+=prefix+("version: %s\n" % self.DebugFormatInt64(self.version_))
return res
class GetResponse(ProtocolBuffer.ProtocolMessage):
has_in_order_ = 0
in_order_ = 1
def __init__(self, contents=None):
self.entity_ = []
self.deferred_ = []
if contents is not None: self.MergeFromString(contents)
def entity_size(self): return len(self.entity_)
def entity_list(self): return self.entity_
def entity(self, i):
return self.entity_[i]
def mutable_entity(self, i):
return self.entity_[i]
def add_entity(self):
x = GetResponse_Entity()
self.entity_.append(x)
return x
def clear_entity(self):
self.entity_ = []
def deferred_size(self): return len(self.deferred_)
def deferred_list(self): return self.deferred_
def deferred(self, i):
return self.deferred_[i]
def mutable_deferred(self, i):
return self.deferred_[i]
def add_deferred(self):
x = Reference()
self.deferred_.append(x)
return x
def clear_deferred(self):
self.deferred_ = []
def in_order(self): return self.in_order_
def set_in_order(self, x):
self.has_in_order_ = 1
self.in_order_ = x
def clear_in_order(self):
if self.has_in_order_:
self.has_in_order_ = 0
self.in_order_ = 1
def has_in_order(self): return self.has_in_order_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.entity_size()): self.add_entity().CopyFrom(x.entity(i))
for i in xrange(x.deferred_size()): self.add_deferred().CopyFrom(x.deferred(i))
if (x.has_in_order()): self.set_in_order(x.in_order())
def Equals(self, x):
if x is self: return 1
if len(self.entity_) != len(x.entity_): return 0
for e1, e2 in zip(self.entity_, x.entity_):
if e1 != e2: return 0
if len(self.deferred_) != len(x.deferred_): return 0
for e1, e2 in zip(self.deferred_, x.deferred_):
if e1 != e2: return 0
if self.has_in_order_ != x.has_in_order_: return 0
if self.has_in_order_ and self.in_order_ != x.in_order_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.entity_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.deferred_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.entity_)
for i in xrange(len(self.entity_)): n += self.entity_[i].ByteSize()
n += 1 * len(self.deferred_)
for i in xrange(len(self.deferred_)): n += self.lengthString(self.deferred_[i].ByteSize())
if (self.has_in_order_): n += 2
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.entity_)
for i in xrange(len(self.entity_)): n += self.entity_[i].ByteSizePartial()
n += 1 * len(self.deferred_)
for i in xrange(len(self.deferred_)): n += self.lengthString(self.deferred_[i].ByteSizePartial())
if (self.has_in_order_): n += 2
return n
def Clear(self):
self.clear_entity()
self.clear_deferred()
self.clear_in_order()
def OutputUnchecked(self, out):
for i in xrange(len(self.entity_)):
out.putVarInt32(11)
self.entity_[i].OutputUnchecked(out)
out.putVarInt32(12)
for i in xrange(len(self.deferred_)):
out.putVarInt32(42)
out.putVarInt32(self.deferred_[i].ByteSize())
self.deferred_[i].OutputUnchecked(out)
if (self.has_in_order_):
out.putVarInt32(48)
out.putBoolean(self.in_order_)
def OutputPartial(self, out):
for i in xrange(len(self.entity_)):
out.putVarInt32(11)
self.entity_[i].OutputPartial(out)
out.putVarInt32(12)
for i in xrange(len(self.deferred_)):
out.putVarInt32(42)
out.putVarInt32(self.deferred_[i].ByteSizePartial())
self.deferred_[i].OutputPartial(out)
if (self.has_in_order_):
out.putVarInt32(48)
out.putBoolean(self.in_order_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_entity().TryMerge(d)
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_deferred().TryMerge(tmp)
continue
if tt == 48:
self.set_in_order(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.entity_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Entity%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
cnt=0
for e in self.deferred_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("deferred%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_in_order_: res+=prefix+("in_order: %s\n" % self.DebugFormatBool(self.in_order_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kEntityGroup = 1
kEntityentity = 2
kEntitykey = 4
kEntityversion = 3
kdeferred = 5
kin_order = 6
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "Entity",
2: "entity",
3: "version",
4: "key",
5: "deferred",
6: "in_order",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.NUMERIC,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.GetResponse'
class PutRequest(ProtocolBuffer.ProtocolMessage):
CURRENT = 0
SEQUENTIAL = 1
_AutoIdPolicy_NAMES = {
0: "CURRENT",
1: "SEQUENTIAL",
}
def AutoIdPolicy_Name(cls, x): return cls._AutoIdPolicy_NAMES.get(x, "")
AutoIdPolicy_Name = classmethod(AutoIdPolicy_Name)
has_header_ = 0
header_ = None
has_transaction_ = 0
transaction_ = None
has_trusted_ = 0
trusted_ = 0
has_force_ = 0
force_ = 0
has_mark_changes_ = 0
mark_changes_ = 0
has_auto_id_policy_ = 0
auto_id_policy_ = 0
def __init__(self, contents=None):
self.entity_ = []
self.composite_index_ = []
self.snapshot_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def entity_size(self): return len(self.entity_)
def entity_list(self): return self.entity_
def entity(self, i):
return self.entity_[i]
def mutable_entity(self, i):
return self.entity_[i]
def add_entity(self):
x = EntityProto()
self.entity_.append(x)
return x
def clear_entity(self):
self.entity_ = []
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def composite_index_size(self): return len(self.composite_index_)
def composite_index_list(self): return self.composite_index_
def composite_index(self, i):
return self.composite_index_[i]
def mutable_composite_index(self, i):
return self.composite_index_[i]
def add_composite_index(self):
x = CompositeIndex()
self.composite_index_.append(x)
return x
def clear_composite_index(self):
self.composite_index_ = []
def trusted(self): return self.trusted_
def set_trusted(self, x):
self.has_trusted_ = 1
self.trusted_ = x
def clear_trusted(self):
if self.has_trusted_:
self.has_trusted_ = 0
self.trusted_ = 0
def has_trusted(self): return self.has_trusted_
def force(self): return self.force_
def set_force(self, x):
self.has_force_ = 1
self.force_ = x
def clear_force(self):
if self.has_force_:
self.has_force_ = 0
self.force_ = 0
def has_force(self): return self.has_force_
def mark_changes(self): return self.mark_changes_
def set_mark_changes(self, x):
self.has_mark_changes_ = 1
self.mark_changes_ = x
def clear_mark_changes(self):
if self.has_mark_changes_:
self.has_mark_changes_ = 0
self.mark_changes_ = 0
def has_mark_changes(self): return self.has_mark_changes_
def snapshot_size(self): return len(self.snapshot_)
def snapshot_list(self): return self.snapshot_
def snapshot(self, i):
return self.snapshot_[i]
def mutable_snapshot(self, i):
return self.snapshot_[i]
def add_snapshot(self):
x = Snapshot()
self.snapshot_.append(x)
return x
def clear_snapshot(self):
self.snapshot_ = []
def auto_id_policy(self): return self.auto_id_policy_
def set_auto_id_policy(self, x):
self.has_auto_id_policy_ = 1
self.auto_id_policy_ = x
def clear_auto_id_policy(self):
if self.has_auto_id_policy_:
self.has_auto_id_policy_ = 0
self.auto_id_policy_ = 0
def has_auto_id_policy(self): return self.has_auto_id_policy_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
for i in xrange(x.entity_size()): self.add_entity().CopyFrom(x.entity(i))
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
if (x.has_trusted()): self.set_trusted(x.trusted())
if (x.has_force()): self.set_force(x.force())
if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes())
for i in xrange(x.snapshot_size()): self.add_snapshot().CopyFrom(x.snapshot(i))
if (x.has_auto_id_policy()): self.set_auto_id_policy(x.auto_id_policy())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if len(self.entity_) != len(x.entity_): return 0
for e1, e2 in zip(self.entity_, x.entity_):
if e1 != e2: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if len(self.composite_index_) != len(x.composite_index_): return 0
for e1, e2 in zip(self.composite_index_, x.composite_index_):
if e1 != e2: return 0
if self.has_trusted_ != x.has_trusted_: return 0
if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
if self.has_force_ != x.has_force_: return 0
if self.has_force_ and self.force_ != x.force_: return 0
if self.has_mark_changes_ != x.has_mark_changes_: return 0
if self.has_mark_changes_ and self.mark_changes_ != x.mark_changes_: return 0
if len(self.snapshot_) != len(x.snapshot_): return 0
for e1, e2 in zip(self.snapshot_, x.snapshot_):
if e1 != e2: return 0
if self.has_auto_id_policy_ != x.has_auto_id_policy_: return 0
if self.has_auto_id_policy_ and self.auto_id_policy_ != x.auto_id_policy_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
for p in self.entity_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
for p in self.composite_index_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.snapshot_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += 1 * len(self.entity_)
for i in xrange(len(self.entity_)): n += self.lengthString(self.entity_[i].ByteSize())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
if (self.has_trusted_): n += 2
if (self.has_force_): n += 2
if (self.has_mark_changes_): n += 2
n += 1 * len(self.snapshot_)
for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSize())
if (self.has_auto_id_policy_): n += 1 + self.lengthVarInt64(self.auto_id_policy_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
n += 1 * len(self.entity_)
for i in xrange(len(self.entity_)): n += self.lengthString(self.entity_[i].ByteSizePartial())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial())
if (self.has_trusted_): n += 2
if (self.has_force_): n += 2
if (self.has_mark_changes_): n += 2
n += 1 * len(self.snapshot_)
for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSizePartial())
if (self.has_auto_id_policy_): n += 1 + self.lengthVarInt64(self.auto_id_policy_)
return n
def Clear(self):
self.clear_header()
self.clear_entity()
self.clear_transaction()
self.clear_composite_index()
self.clear_trusted()
self.clear_force()
self.clear_mark_changes()
self.clear_snapshot()
self.clear_auto_id_policy()
def OutputUnchecked(self, out):
for i in xrange(len(self.entity_)):
out.putVarInt32(10)
out.putVarInt32(self.entity_[i].ByteSize())
self.entity_[i].OutputUnchecked(out)
if (self.has_transaction_):
out.putVarInt32(18)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(26)
out.putVarInt32(self.composite_index_[i].ByteSize())
self.composite_index_[i].OutputUnchecked(out)
if (self.has_trusted_):
out.putVarInt32(32)
out.putBoolean(self.trusted_)
if (self.has_force_):
out.putVarInt32(56)
out.putBoolean(self.force_)
if (self.has_mark_changes_):
out.putVarInt32(64)
out.putBoolean(self.mark_changes_)
for i in xrange(len(self.snapshot_)):
out.putVarInt32(74)
out.putVarInt32(self.snapshot_[i].ByteSize())
self.snapshot_[i].OutputUnchecked(out)
if (self.has_auto_id_policy_):
out.putVarInt32(80)
out.putVarInt32(self.auto_id_policy_)
if (self.has_header_):
out.putVarInt32(90)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
for i in xrange(len(self.entity_)):
out.putVarInt32(10)
out.putVarInt32(self.entity_[i].ByteSizePartial())
self.entity_[i].OutputPartial(out)
if (self.has_transaction_):
out.putVarInt32(18)
out.putVarInt32(self.transaction_.ByteSizePartial())
self.transaction_.OutputPartial(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(26)
out.putVarInt32(self.composite_index_[i].ByteSizePartial())
self.composite_index_[i].OutputPartial(out)
if (self.has_trusted_):
out.putVarInt32(32)
out.putBoolean(self.trusted_)
if (self.has_force_):
out.putVarInt32(56)
out.putBoolean(self.force_)
if (self.has_mark_changes_):
out.putVarInt32(64)
out.putBoolean(self.mark_changes_)
for i in xrange(len(self.snapshot_)):
out.putVarInt32(74)
out.putVarInt32(self.snapshot_[i].ByteSizePartial())
self.snapshot_[i].OutputPartial(out)
if (self.has_auto_id_policy_):
out.putVarInt32(80)
out.putVarInt32(self.auto_id_policy_)
if (self.has_header_):
out.putVarInt32(90)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_entity().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_composite_index().TryMerge(tmp)
continue
if tt == 32:
self.set_trusted(d.getBoolean())
continue
if tt == 56:
self.set_force(d.getBoolean())
continue
if tt == 64:
self.set_mark_changes(d.getBoolean())
continue
if tt == 74:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_snapshot().TryMerge(tmp)
continue
if tt == 80:
self.set_auto_id_policy(d.getVarInt32())
continue
if tt == 90:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.entity_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("entity%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.composite_index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("composite_index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_))
if self.has_mark_changes_: res+=prefix+("mark_changes: %s\n" % self.DebugFormatBool(self.mark_changes_))
cnt=0
for e in self.snapshot_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("snapshot%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_auto_id_policy_: res+=prefix+("auto_id_policy: %s\n" % self.DebugFormatInt32(self.auto_id_policy_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 11
kentity = 1
ktransaction = 2
kcomposite_index = 3
ktrusted = 4
kforce = 7
kmark_changes = 8
ksnapshot = 9
kauto_id_policy = 10
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "entity",
2: "transaction",
3: "composite_index",
4: "trusted",
7: "force",
8: "mark_changes",
9: "snapshot",
10: "auto_id_policy",
11: "header",
}, 11)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
7: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.NUMERIC,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.NUMERIC,
11: ProtocolBuffer.Encoder.STRING,
}, 11, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.PutRequest'
class PutResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.key_ = []
self.version_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def mutable_key(self, i):
return self.key_[i]
def add_key(self):
x = Reference()
self.key_.append(x)
return x
def clear_key(self):
self.key_ = []
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def version_size(self): return len(self.version_)
def version_list(self): return self.version_
def version(self, i):
return self.version_[i]
def set_version(self, i, x):
self.version_[i] = x
def add_version(self, x):
self.version_.append(x)
def clear_version(self):
self.version_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
for i in xrange(x.version_size()): self.add_version(x.version(i))
def Equals(self, x):
if x is self: return 1
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
if len(self.version_) != len(x.version_): return 0
for e1, e2 in zip(self.version_, x.version_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.key_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
return n
def Clear(self):
self.clear_key()
self.clear_cost()
self.clear_version()
def OutputUnchecked(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSize())
self.key_[i].OutputUnchecked(out)
if (self.has_cost_):
out.putVarInt32(18)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
for i in xrange(len(self.version_)):
out.putVarInt32(24)
out.putVarInt64(self.version_[i])
def OutputPartial(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSizePartial())
self.key_[i].OutputPartial(out)
if (self.has_cost_):
out.putVarInt32(18)
out.putVarInt32(self.cost_.ByteSizePartial())
self.cost_.OutputPartial(out)
for i in xrange(len(self.version_)):
out.putVarInt32(24)
out.putVarInt64(self.version_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_key().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if tt == 24:
self.add_version(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.version_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("version%s: %s\n" % (elm, self.DebugFormatInt64(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kkey = 1
kcost = 2
kversion = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "cost",
3: "version",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.PutResponse'
class TouchRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_force_ = 0
force_ = 0
def __init__(self, contents=None):
self.key_ = []
self.composite_index_ = []
self.snapshot_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def mutable_key(self, i):
return self.key_[i]
def add_key(self):
x = Reference()
self.key_.append(x)
return x
def clear_key(self):
self.key_ = []
def composite_index_size(self): return len(self.composite_index_)
def composite_index_list(self): return self.composite_index_
def composite_index(self, i):
return self.composite_index_[i]
def mutable_composite_index(self, i):
return self.composite_index_[i]
def add_composite_index(self):
x = CompositeIndex()
self.composite_index_.append(x)
return x
def clear_composite_index(self):
self.composite_index_ = []
def force(self): return self.force_
def set_force(self, x):
self.has_force_ = 1
self.force_ = x
def clear_force(self):
if self.has_force_:
self.has_force_ = 0
self.force_ = 0
def has_force(self): return self.has_force_
def snapshot_size(self): return len(self.snapshot_)
def snapshot_list(self): return self.snapshot_
def snapshot(self, i):
return self.snapshot_[i]
def mutable_snapshot(self, i):
return self.snapshot_[i]
def add_snapshot(self):
x = Snapshot()
self.snapshot_.append(x)
return x
def clear_snapshot(self):
self.snapshot_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
if (x.has_force()): self.set_force(x.force())
for i in xrange(x.snapshot_size()): self.add_snapshot().CopyFrom(x.snapshot(i))
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
if len(self.composite_index_) != len(x.composite_index_): return 0
for e1, e2 in zip(self.composite_index_, x.composite_index_):
if e1 != e2: return 0
if self.has_force_ != x.has_force_: return 0
if self.has_force_ and self.force_ != x.force_: return 0
if len(self.snapshot_) != len(x.snapshot_): return 0
for e1, e2 in zip(self.snapshot_, x.snapshot_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
for p in self.key_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.composite_index_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.snapshot_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
if (self.has_force_): n += 2
n += 1 * len(self.snapshot_)
for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial())
if (self.has_force_): n += 2
n += 1 * len(self.snapshot_)
for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_header()
self.clear_key()
self.clear_composite_index()
self.clear_force()
self.clear_snapshot()
def OutputUnchecked(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSize())
self.key_[i].OutputUnchecked(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(18)
out.putVarInt32(self.composite_index_[i].ByteSize())
self.composite_index_[i].OutputUnchecked(out)
if (self.has_force_):
out.putVarInt32(24)
out.putBoolean(self.force_)
for i in xrange(len(self.snapshot_)):
out.putVarInt32(74)
out.putVarInt32(self.snapshot_[i].ByteSize())
self.snapshot_[i].OutputUnchecked(out)
if (self.has_header_):
out.putVarInt32(82)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSizePartial())
self.key_[i].OutputPartial(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(18)
out.putVarInt32(self.composite_index_[i].ByteSizePartial())
self.composite_index_[i].OutputPartial(out)
if (self.has_force_):
out.putVarInt32(24)
out.putBoolean(self.force_)
for i in xrange(len(self.snapshot_)):
out.putVarInt32(74)
out.putVarInt32(self.snapshot_[i].ByteSizePartial())
self.snapshot_[i].OutputPartial(out)
if (self.has_header_):
out.putVarInt32(82)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_key().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_composite_index().TryMerge(tmp)
continue
if tt == 24:
self.set_force(d.getBoolean())
continue
if tt == 74:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_snapshot().TryMerge(tmp)
continue
if tt == 82:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
cnt=0
for e in self.composite_index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("composite_index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_))
cnt=0
for e in self.snapshot_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("snapshot%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 10
kkey = 1
kcomposite_index = 2
kforce = 3
ksnapshot = 9
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "composite_index",
3: "force",
9: "snapshot",
10: "header",
}, 10)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.STRING,
}, 10, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.TouchRequest'
class TouchResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def MergeFrom(self, x):
assert x is not self
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
def Equals(self, x):
if x is self: return 1
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
return n
def Clear(self):
self.clear_cost()
def OutputUnchecked(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSizePartial())
self.cost_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kcost = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "cost",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.TouchResponse'
class DeleteRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_transaction_ = 0
transaction_ = None
has_trusted_ = 0
trusted_ = 0
has_force_ = 0
force_ = 0
has_mark_changes_ = 0
mark_changes_ = 0
def __init__(self, contents=None):
self.key_ = []
self.composite_index_ = []
self.snapshot_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def mutable_key(self, i):
return self.key_[i]
def add_key(self):
x = Reference()
self.key_.append(x)
return x
def clear_key(self):
self.key_ = []
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def composite_index_size(self): return len(self.composite_index_)
def composite_index_list(self): return self.composite_index_
def composite_index(self, i):
return self.composite_index_[i]
def mutable_composite_index(self, i):
return self.composite_index_[i]
def add_composite_index(self):
x = CompositeIndex()
self.composite_index_.append(x)
return x
def clear_composite_index(self):
self.composite_index_ = []
def trusted(self): return self.trusted_
def set_trusted(self, x):
self.has_trusted_ = 1
self.trusted_ = x
def clear_trusted(self):
if self.has_trusted_:
self.has_trusted_ = 0
self.trusted_ = 0
def has_trusted(self): return self.has_trusted_
def force(self): return self.force_
def set_force(self, x):
self.has_force_ = 1
self.force_ = x
def clear_force(self):
if self.has_force_:
self.has_force_ = 0
self.force_ = 0
def has_force(self): return self.has_force_
def mark_changes(self): return self.mark_changes_
def set_mark_changes(self, x):
self.has_mark_changes_ = 1
self.mark_changes_ = x
def clear_mark_changes(self):
if self.has_mark_changes_:
self.has_mark_changes_ = 0
self.mark_changes_ = 0
def has_mark_changes(self): return self.has_mark_changes_
def snapshot_size(self): return len(self.snapshot_)
def snapshot_list(self): return self.snapshot_
def snapshot(self, i):
return self.snapshot_[i]
def mutable_snapshot(self, i):
return self.snapshot_[i]
def add_snapshot(self):
x = Snapshot()
self.snapshot_.append(x)
return x
def clear_snapshot(self):
self.snapshot_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
if (x.has_trusted()): self.set_trusted(x.trusted())
if (x.has_force()): self.set_force(x.force())
if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes())
for i in xrange(x.snapshot_size()): self.add_snapshot().CopyFrom(x.snapshot(i))
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if len(self.composite_index_) != len(x.composite_index_): return 0
for e1, e2 in zip(self.composite_index_, x.composite_index_):
if e1 != e2: return 0
if self.has_trusted_ != x.has_trusted_: return 0
if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
if self.has_force_ != x.has_force_: return 0
if self.has_force_ and self.force_ != x.force_: return 0
if self.has_mark_changes_ != x.has_mark_changes_: return 0
if self.has_mark_changes_ and self.mark_changes_ != x.mark_changes_: return 0
if len(self.snapshot_) != len(x.snapshot_): return 0
for e1, e2 in zip(self.snapshot_, x.snapshot_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
for p in self.key_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
for p in self.composite_index_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.snapshot_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
if (self.has_trusted_): n += 2
if (self.has_force_): n += 2
if (self.has_mark_changes_): n += 2
n += 1 * len(self.snapshot_)
for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial())
if (self.has_trusted_): n += 2
if (self.has_force_): n += 2
if (self.has_mark_changes_): n += 2
n += 1 * len(self.snapshot_)
for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_header()
self.clear_key()
self.clear_transaction()
self.clear_composite_index()
self.clear_trusted()
self.clear_force()
self.clear_mark_changes()
self.clear_snapshot()
def OutputUnchecked(self, out):
if (self.has_trusted_):
out.putVarInt32(32)
out.putBoolean(self.trusted_)
if (self.has_transaction_):
out.putVarInt32(42)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
for i in xrange(len(self.key_)):
out.putVarInt32(50)
out.putVarInt32(self.key_[i].ByteSize())
self.key_[i].OutputUnchecked(out)
if (self.has_force_):
out.putVarInt32(56)
out.putBoolean(self.force_)
if (self.has_mark_changes_):
out.putVarInt32(64)
out.putBoolean(self.mark_changes_)
for i in xrange(len(self.snapshot_)):
out.putVarInt32(74)
out.putVarInt32(self.snapshot_[i].ByteSize())
self.snapshot_[i].OutputUnchecked(out)
if (self.has_header_):
out.putVarInt32(82)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(90)
out.putVarInt32(self.composite_index_[i].ByteSize())
self.composite_index_[i].OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_trusted_):
out.putVarInt32(32)
out.putBoolean(self.trusted_)
if (self.has_transaction_):
out.putVarInt32(42)
out.putVarInt32(self.transaction_.ByteSizePartial())
self.transaction_.OutputPartial(out)
for i in xrange(len(self.key_)):
out.putVarInt32(50)
out.putVarInt32(self.key_[i].ByteSizePartial())
self.key_[i].OutputPartial(out)
if (self.has_force_):
out.putVarInt32(56)
out.putBoolean(self.force_)
if (self.has_mark_changes_):
out.putVarInt32(64)
out.putBoolean(self.mark_changes_)
for i in xrange(len(self.snapshot_)):
out.putVarInt32(74)
out.putVarInt32(self.snapshot_[i].ByteSizePartial())
self.snapshot_[i].OutputPartial(out)
if (self.has_header_):
out.putVarInt32(82)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(90)
out.putVarInt32(self.composite_index_[i].ByteSizePartial())
self.composite_index_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 32:
self.set_trusted(d.getBoolean())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 50:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_key().TryMerge(tmp)
continue
if tt == 56:
self.set_force(d.getBoolean())
continue
if tt == 64:
self.set_mark_changes(d.getBoolean())
continue
if tt == 74:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_snapshot().TryMerge(tmp)
continue
if tt == 82:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if tt == 90:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_composite_index().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.composite_index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("composite_index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_))
if self.has_mark_changes_: res+=prefix+("mark_changes: %s\n" % self.DebugFormatBool(self.mark_changes_))
cnt=0
for e in self.snapshot_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("snapshot%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 10
kkey = 6
ktransaction = 5
kcomposite_index = 11
ktrusted = 4
kforce = 7
kmark_changes = 8
ksnapshot = 9
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
4: "trusted",
5: "transaction",
6: "key",
7: "force",
8: "mark_changes",
9: "snapshot",
10: "header",
11: "composite_index",
}, 11)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.STRING,
7: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.NUMERIC,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.STRING,
11: ProtocolBuffer.Encoder.STRING,
}, 11, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.DeleteRequest'
class DeleteResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.version_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def version_size(self): return len(self.version_)
def version_list(self): return self.version_
def version(self, i):
return self.version_[i]
def set_version(self, i, x):
self.version_[i] = x
def add_version(self, x):
self.version_.append(x)
def clear_version(self):
self.version_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
for i in xrange(x.version_size()): self.add_version(x.version(i))
def Equals(self, x):
if x is self: return 1
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
if len(self.version_) != len(x.version_): return 0
for e1, e2 in zip(self.version_, x.version_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
return n
def ByteSizePartial(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
return n
def Clear(self):
self.clear_cost()
self.clear_version()
def OutputUnchecked(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
for i in xrange(len(self.version_)):
out.putVarInt32(24)
out.putVarInt64(self.version_[i])
def OutputPartial(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSizePartial())
self.cost_.OutputPartial(out)
for i in xrange(len(self.version_)):
out.putVarInt32(24)
out.putVarInt64(self.version_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if tt == 24:
self.add_version(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.version_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("version%s: %s\n" % (elm, self.DebugFormatInt64(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kcost = 1
kversion = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "cost",
3: "version",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.DeleteResponse'
class NextRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_cursor_ = 0
has_count_ = 0
count_ = 0
has_offset_ = 0
offset_ = 0
has_compile_ = 0
compile_ = 0
def __init__(self, contents=None):
self.cursor_ = Cursor()
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def cursor(self): return self.cursor_
def mutable_cursor(self): self.has_cursor_ = 1; return self.cursor_
def clear_cursor(self):self.has_cursor_ = 0; self.cursor_.Clear()
def has_cursor(self): return self.has_cursor_
def count(self): return self.count_
def set_count(self, x):
self.has_count_ = 1
self.count_ = x
def clear_count(self):
if self.has_count_:
self.has_count_ = 0
self.count_ = 0
def has_count(self): return self.has_count_
def offset(self): return self.offset_
def set_offset(self, x):
self.has_offset_ = 1
self.offset_ = x
def clear_offset(self):
if self.has_offset_:
self.has_offset_ = 0
self.offset_ = 0
def has_offset(self): return self.has_offset_
def compile(self): return self.compile_
def set_compile(self, x):
self.has_compile_ = 1
self.compile_ = x
def clear_compile(self):
if self.has_compile_:
self.has_compile_ = 0
self.compile_ = 0
def has_compile(self): return self.has_compile_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
if (x.has_count()): self.set_count(x.count())
if (x.has_offset()): self.set_offset(x.offset())
if (x.has_compile()): self.set_compile(x.compile())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if self.has_cursor_ != x.has_cursor_: return 0
if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
if self.has_count_ != x.has_count_: return 0
if self.has_count_ and self.count_ != x.count_: return 0
if self.has_offset_ != x.has_offset_: return 0
if self.has_offset_ and self.offset_ != x.offset_: return 0
if self.has_compile_ != x.has_compile_: return 0
if self.has_compile_ and self.compile_ != x.compile_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
if (not self.has_cursor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: cursor not set.')
elif not self.cursor_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += self.lengthString(self.cursor_.ByteSize())
if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_)
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_compile_): n += 2
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
if (self.has_cursor_):
n += 1
n += self.lengthString(self.cursor_.ByteSizePartial())
if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_)
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_compile_): n += 2
return n
def Clear(self):
self.clear_header()
self.clear_cursor()
self.clear_count()
self.clear_offset()
self.clear_compile()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putVarInt32(self.cursor_.ByteSize())
self.cursor_.OutputUnchecked(out)
if (self.has_count_):
out.putVarInt32(16)
out.putVarInt32(self.count_)
if (self.has_compile_):
out.putVarInt32(24)
out.putBoolean(self.compile_)
if (self.has_offset_):
out.putVarInt32(32)
out.putVarInt32(self.offset_)
if (self.has_header_):
out.putVarInt32(42)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_cursor_):
out.putVarInt32(10)
out.putVarInt32(self.cursor_.ByteSizePartial())
self.cursor_.OutputPartial(out)
if (self.has_count_):
out.putVarInt32(16)
out.putVarInt32(self.count_)
if (self.has_compile_):
out.putVarInt32(24)
out.putBoolean(self.compile_)
if (self.has_offset_):
out.putVarInt32(32)
out.putVarInt32(self.offset_)
if (self.has_header_):
out.putVarInt32(42)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cursor().TryMerge(tmp)
continue
if tt == 16:
self.set_count(d.getVarInt32())
continue
if tt == 24:
self.set_compile(d.getBoolean())
continue
if tt == 32:
self.set_offset(d.getVarInt32())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_cursor_:
res+=prefix+"cursor <\n"
res+=self.cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
if self.has_compile_: res+=prefix+("compile: %s\n" % self.DebugFormatBool(self.compile_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 5
kcursor = 1
kcount = 2
koffset = 4
kcompile = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "cursor",
2: "count",
3: "compile",
4: "offset",
5: "header",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.NextRequest'
class QueryResult(ProtocolBuffer.ProtocolMessage):
has_cursor_ = 0
cursor_ = None
has_skipped_results_ = 0
skipped_results_ = 0
has_more_results_ = 0
more_results_ = 0
has_keys_only_ = 0
keys_only_ = 0
has_index_only_ = 0
index_only_ = 0
has_small_ops_ = 0
small_ops_ = 0
has_compiled_query_ = 0
compiled_query_ = None
has_compiled_cursor_ = 0
compiled_cursor_ = None
has_skipped_results_compiled_cursor_ = 0
skipped_results_compiled_cursor_ = None
def __init__(self, contents=None):
self.result_ = []
self.index_ = []
self.version_ = []
self.result_compiled_cursor_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def cursor(self):
if self.cursor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cursor_ is None: self.cursor_ = Cursor()
finally:
self.lazy_init_lock_.release()
return self.cursor_
def mutable_cursor(self): self.has_cursor_ = 1; return self.cursor()
def clear_cursor(self):
if self.has_cursor_:
self.has_cursor_ = 0;
if self.cursor_ is not None: self.cursor_.Clear()
def has_cursor(self): return self.has_cursor_
def result_size(self): return len(self.result_)
def result_list(self): return self.result_
def result(self, i):
return self.result_[i]
def mutable_result(self, i):
return self.result_[i]
def add_result(self):
x = EntityProto()
self.result_.append(x)
return x
def clear_result(self):
self.result_ = []
def skipped_results(self): return self.skipped_results_
def set_skipped_results(self, x):
self.has_skipped_results_ = 1
self.skipped_results_ = x
def clear_skipped_results(self):
if self.has_skipped_results_:
self.has_skipped_results_ = 0
self.skipped_results_ = 0
def has_skipped_results(self): return self.has_skipped_results_
def more_results(self): return self.more_results_
def set_more_results(self, x):
self.has_more_results_ = 1
self.more_results_ = x
def clear_more_results(self):
if self.has_more_results_:
self.has_more_results_ = 0
self.more_results_ = 0
def has_more_results(self): return self.has_more_results_
def keys_only(self): return self.keys_only_
def set_keys_only(self, x):
self.has_keys_only_ = 1
self.keys_only_ = x
def clear_keys_only(self):
if self.has_keys_only_:
self.has_keys_only_ = 0
self.keys_only_ = 0
def has_keys_only(self): return self.has_keys_only_
def index_only(self): return self.index_only_
def set_index_only(self, x):
self.has_index_only_ = 1
self.index_only_ = x
def clear_index_only(self):
if self.has_index_only_:
self.has_index_only_ = 0
self.index_only_ = 0
def has_index_only(self): return self.has_index_only_
def small_ops(self): return self.small_ops_
def set_small_ops(self, x):
self.has_small_ops_ = 1
self.small_ops_ = x
def clear_small_ops(self):
if self.has_small_ops_:
self.has_small_ops_ = 0
self.small_ops_ = 0
def has_small_ops(self): return self.has_small_ops_
def compiled_query(self):
if self.compiled_query_ is None:
self.lazy_init_lock_.acquire()
try:
if self.compiled_query_ is None: self.compiled_query_ = CompiledQuery()
finally:
self.lazy_init_lock_.release()
return self.compiled_query_
def mutable_compiled_query(self): self.has_compiled_query_ = 1; return self.compiled_query()
def clear_compiled_query(self):
if self.has_compiled_query_:
self.has_compiled_query_ = 0;
if self.compiled_query_ is not None: self.compiled_query_.Clear()
def has_compiled_query(self): return self.has_compiled_query_
def compiled_cursor(self):
if self.compiled_cursor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.compiled_cursor_ is None: self.compiled_cursor_ = CompiledCursor()
finally:
self.lazy_init_lock_.release()
return self.compiled_cursor_
def mutable_compiled_cursor(self): self.has_compiled_cursor_ = 1; return self.compiled_cursor()
def clear_compiled_cursor(self):
if self.has_compiled_cursor_:
self.has_compiled_cursor_ = 0;
if self.compiled_cursor_ is not None: self.compiled_cursor_.Clear()
def has_compiled_cursor(self): return self.has_compiled_cursor_
def index_size(self): return len(self.index_)
def index_list(self): return self.index_
def index(self, i):
return self.index_[i]
def mutable_index(self, i):
return self.index_[i]
def add_index(self):
x = CompositeIndex()
self.index_.append(x)
return x
def clear_index(self):
self.index_ = []
def version_size(self): return len(self.version_)
def version_list(self): return self.version_
def version(self, i):
return self.version_[i]
def set_version(self, i, x):
self.version_[i] = x
def add_version(self, x):
self.version_.append(x)
def clear_version(self):
self.version_ = []
def result_compiled_cursor_size(self): return len(self.result_compiled_cursor_)
def result_compiled_cursor_list(self): return self.result_compiled_cursor_
def result_compiled_cursor(self, i):
return self.result_compiled_cursor_[i]
def mutable_result_compiled_cursor(self, i):
return self.result_compiled_cursor_[i]
def add_result_compiled_cursor(self):
x = CompiledCursor()
self.result_compiled_cursor_.append(x)
return x
def clear_result_compiled_cursor(self):
self.result_compiled_cursor_ = []
def skipped_results_compiled_cursor(self):
if self.skipped_results_compiled_cursor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.skipped_results_compiled_cursor_ is None: self.skipped_results_compiled_cursor_ = CompiledCursor()
finally:
self.lazy_init_lock_.release()
return self.skipped_results_compiled_cursor_
def mutable_skipped_results_compiled_cursor(self): self.has_skipped_results_compiled_cursor_ = 1; return self.skipped_results_compiled_cursor()
def clear_skipped_results_compiled_cursor(self):
if self.has_skipped_results_compiled_cursor_:
self.has_skipped_results_compiled_cursor_ = 0;
if self.skipped_results_compiled_cursor_ is not None: self.skipped_results_compiled_cursor_.Clear()
def has_skipped_results_compiled_cursor(self): return self.has_skipped_results_compiled_cursor_
def MergeFrom(self, x):
assert x is not self
if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
for i in xrange(x.result_size()): self.add_result().CopyFrom(x.result(i))
if (x.has_skipped_results()): self.set_skipped_results(x.skipped_results())
if (x.has_more_results()): self.set_more_results(x.more_results())
if (x.has_keys_only()): self.set_keys_only(x.keys_only())
if (x.has_index_only()): self.set_index_only(x.index_only())
if (x.has_small_ops()): self.set_small_ops(x.small_ops())
if (x.has_compiled_query()): self.mutable_compiled_query().MergeFrom(x.compiled_query())
if (x.has_compiled_cursor()): self.mutable_compiled_cursor().MergeFrom(x.compiled_cursor())
for i in xrange(x.index_size()): self.add_index().CopyFrom(x.index(i))
for i in xrange(x.version_size()): self.add_version(x.version(i))
for i in xrange(x.result_compiled_cursor_size()): self.add_result_compiled_cursor().CopyFrom(x.result_compiled_cursor(i))
if (x.has_skipped_results_compiled_cursor()): self.mutable_skipped_results_compiled_cursor().MergeFrom(x.skipped_results_compiled_cursor())
def Equals(self, x):
if x is self: return 1
if self.has_cursor_ != x.has_cursor_: return 0
if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
if len(self.result_) != len(x.result_): return 0
for e1, e2 in zip(self.result_, x.result_):
if e1 != e2: return 0
if self.has_skipped_results_ != x.has_skipped_results_: return 0
if self.has_skipped_results_ and self.skipped_results_ != x.skipped_results_: return 0
if self.has_more_results_ != x.has_more_results_: return 0
if self.has_more_results_ and self.more_results_ != x.more_results_: return 0
if self.has_keys_only_ != x.has_keys_only_: return 0
if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
if self.has_index_only_ != x.has_index_only_: return 0
if self.has_index_only_ and self.index_only_ != x.index_only_: return 0
if self.has_small_ops_ != x.has_small_ops_: return 0
if self.has_small_ops_ and self.small_ops_ != x.small_ops_: return 0
if self.has_compiled_query_ != x.has_compiled_query_: return 0
if self.has_compiled_query_ and self.compiled_query_ != x.compiled_query_: return 0
if self.has_compiled_cursor_ != x.has_compiled_cursor_: return 0
if self.has_compiled_cursor_ and self.compiled_cursor_ != x.compiled_cursor_: return 0
if len(self.index_) != len(x.index_): return 0
for e1, e2 in zip(self.index_, x.index_):
if e1 != e2: return 0
if len(self.version_) != len(x.version_): return 0
for e1, e2 in zip(self.version_, x.version_):
if e1 != e2: return 0
if len(self.result_compiled_cursor_) != len(x.result_compiled_cursor_): return 0
for e1, e2 in zip(self.result_compiled_cursor_, x.result_compiled_cursor_):
if e1 != e2: return 0
if self.has_skipped_results_compiled_cursor_ != x.has_skipped_results_compiled_cursor_: return 0
if self.has_skipped_results_compiled_cursor_ and self.skipped_results_compiled_cursor_ != x.skipped_results_compiled_cursor_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_cursor_ and not self.cursor_.IsInitialized(debug_strs)): initialized = 0
for p in self.result_:
if not p.IsInitialized(debug_strs): initialized=0
if (not self.has_more_results_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: more_results not set.')
if (self.has_compiled_query_ and not self.compiled_query_.IsInitialized(debug_strs)): initialized = 0
if (self.has_compiled_cursor_ and not self.compiled_cursor_.IsInitialized(debug_strs)): initialized = 0
for p in self.index_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.result_compiled_cursor_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_skipped_results_compiled_cursor_ and not self.skipped_results_compiled_cursor_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_cursor_): n += 1 + self.lengthString(self.cursor_.ByteSize())
n += 1 * len(self.result_)
for i in xrange(len(self.result_)): n += self.lengthString(self.result_[i].ByteSize())
if (self.has_skipped_results_): n += 1 + self.lengthVarInt64(self.skipped_results_)
if (self.has_keys_only_): n += 2
if (self.has_index_only_): n += 2
if (self.has_small_ops_): n += 2
if (self.has_compiled_query_): n += 1 + self.lengthString(self.compiled_query_.ByteSize())
if (self.has_compiled_cursor_): n += 1 + self.lengthString(self.compiled_cursor_.ByteSize())
n += 1 * len(self.index_)
for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSize())
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
n += 1 * len(self.result_compiled_cursor_)
for i in xrange(len(self.result_compiled_cursor_)): n += self.lengthString(self.result_compiled_cursor_[i].ByteSize())
if (self.has_skipped_results_compiled_cursor_): n += 1 + self.lengthString(self.skipped_results_compiled_cursor_.ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_cursor_): n += 1 + self.lengthString(self.cursor_.ByteSizePartial())
n += 1 * len(self.result_)
for i in xrange(len(self.result_)): n += self.lengthString(self.result_[i].ByteSizePartial())
if (self.has_skipped_results_): n += 1 + self.lengthVarInt64(self.skipped_results_)
if (self.has_more_results_):
n += 2
if (self.has_keys_only_): n += 2
if (self.has_index_only_): n += 2
if (self.has_small_ops_): n += 2
if (self.has_compiled_query_): n += 1 + self.lengthString(self.compiled_query_.ByteSizePartial())
if (self.has_compiled_cursor_): n += 1 + self.lengthString(self.compiled_cursor_.ByteSizePartial())
n += 1 * len(self.index_)
for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSizePartial())
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
n += 1 * len(self.result_compiled_cursor_)
for i in xrange(len(self.result_compiled_cursor_)): n += self.lengthString(self.result_compiled_cursor_[i].ByteSizePartial())
if (self.has_skipped_results_compiled_cursor_): n += 1 + self.lengthString(self.skipped_results_compiled_cursor_.ByteSizePartial())
return n
def Clear(self):
self.clear_cursor()
self.clear_result()
self.clear_skipped_results()
self.clear_more_results()
self.clear_keys_only()
self.clear_index_only()
self.clear_small_ops()
self.clear_compiled_query()
self.clear_compiled_cursor()
self.clear_index()
self.clear_version()
self.clear_result_compiled_cursor()
self.clear_skipped_results_compiled_cursor()
def OutputUnchecked(self, out):
if (self.has_cursor_):
out.putVarInt32(10)
out.putVarInt32(self.cursor_.ByteSize())
self.cursor_.OutputUnchecked(out)
for i in xrange(len(self.result_)):
out.putVarInt32(18)
out.putVarInt32(self.result_[i].ByteSize())
self.result_[i].OutputUnchecked(out)
out.putVarInt32(24)
out.putBoolean(self.more_results_)
if (self.has_keys_only_):
out.putVarInt32(32)
out.putBoolean(self.keys_only_)
if (self.has_compiled_query_):
out.putVarInt32(42)
out.putVarInt32(self.compiled_query_.ByteSize())
self.compiled_query_.OutputUnchecked(out)
if (self.has_compiled_cursor_):
out.putVarInt32(50)
out.putVarInt32(self.compiled_cursor_.ByteSize())
self.compiled_cursor_.OutputUnchecked(out)
if (self.has_skipped_results_):
out.putVarInt32(56)
out.putVarInt32(self.skipped_results_)
for i in xrange(len(self.index_)):
out.putVarInt32(66)
out.putVarInt32(self.index_[i].ByteSize())
self.index_[i].OutputUnchecked(out)
if (self.has_index_only_):
out.putVarInt32(72)
out.putBoolean(self.index_only_)
if (self.has_small_ops_):
out.putVarInt32(80)
out.putBoolean(self.small_ops_)
for i in xrange(len(self.version_)):
out.putVarInt32(88)
out.putVarInt64(self.version_[i])
for i in xrange(len(self.result_compiled_cursor_)):
out.putVarInt32(98)
out.putVarInt32(self.result_compiled_cursor_[i].ByteSize())
self.result_compiled_cursor_[i].OutputUnchecked(out)
if (self.has_skipped_results_compiled_cursor_):
out.putVarInt32(106)
out.putVarInt32(self.skipped_results_compiled_cursor_.ByteSize())
self.skipped_results_compiled_cursor_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_cursor_):
out.putVarInt32(10)
out.putVarInt32(self.cursor_.ByteSizePartial())
self.cursor_.OutputPartial(out)
for i in xrange(len(self.result_)):
out.putVarInt32(18)
out.putVarInt32(self.result_[i].ByteSizePartial())
self.result_[i].OutputPartial(out)
if (self.has_more_results_):
out.putVarInt32(24)
out.putBoolean(self.more_results_)
if (self.has_keys_only_):
out.putVarInt32(32)
out.putBoolean(self.keys_only_)
if (self.has_compiled_query_):
out.putVarInt32(42)
out.putVarInt32(self.compiled_query_.ByteSizePartial())
self.compiled_query_.OutputPartial(out)
if (self.has_compiled_cursor_):
out.putVarInt32(50)
out.putVarInt32(self.compiled_cursor_.ByteSizePartial())
self.compiled_cursor_.OutputPartial(out)
if (self.has_skipped_results_):
out.putVarInt32(56)
out.putVarInt32(self.skipped_results_)
for i in xrange(len(self.index_)):
out.putVarInt32(66)
out.putVarInt32(self.index_[i].ByteSizePartial())
self.index_[i].OutputPartial(out)
if (self.has_index_only_):
out.putVarInt32(72)
out.putBoolean(self.index_only_)
if (self.has_small_ops_):
out.putVarInt32(80)
out.putBoolean(self.small_ops_)
for i in xrange(len(self.version_)):
out.putVarInt32(88)
out.putVarInt64(self.version_[i])
for i in xrange(len(self.result_compiled_cursor_)):
out.putVarInt32(98)
out.putVarInt32(self.result_compiled_cursor_[i].ByteSizePartial())
self.result_compiled_cursor_[i].OutputPartial(out)
if (self.has_skipped_results_compiled_cursor_):
out.putVarInt32(106)
out.putVarInt32(self.skipped_results_compiled_cursor_.ByteSizePartial())
self.skipped_results_compiled_cursor_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cursor().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_result().TryMerge(tmp)
continue
if tt == 24:
self.set_more_results(d.getBoolean())
continue
if tt == 32:
self.set_keys_only(d.getBoolean())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_compiled_query().TryMerge(tmp)
continue
if tt == 50:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_compiled_cursor().TryMerge(tmp)
continue
if tt == 56:
self.set_skipped_results(d.getVarInt32())
continue
if tt == 66:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_index().TryMerge(tmp)
continue
if tt == 72:
self.set_index_only(d.getBoolean())
continue
if tt == 80:
self.set_small_ops(d.getBoolean())
continue
if tt == 88:
self.add_version(d.getVarInt64())
continue
if tt == 98:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_result_compiled_cursor().TryMerge(tmp)
continue
if tt == 106:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_skipped_results_compiled_cursor().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cursor_:
res+=prefix+"cursor <\n"
res+=self.cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.result_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("result%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_skipped_results_: res+=prefix+("skipped_results: %s\n" % self.DebugFormatInt32(self.skipped_results_))
if self.has_more_results_: res+=prefix+("more_results: %s\n" % self.DebugFormatBool(self.more_results_))
if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
if self.has_index_only_: res+=prefix+("index_only: %s\n" % self.DebugFormatBool(self.index_only_))
if self.has_small_ops_: res+=prefix+("small_ops: %s\n" % self.DebugFormatBool(self.small_ops_))
if self.has_compiled_query_:
res+=prefix+"compiled_query <\n"
res+=self.compiled_query_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_compiled_cursor_:
res+=prefix+"compiled_cursor <\n"
res+=self.compiled_cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
cnt=0
for e in self.version_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("version%s: %s\n" % (elm, self.DebugFormatInt64(e)))
cnt+=1
cnt=0
for e in self.result_compiled_cursor_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("result_compiled_cursor%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_skipped_results_compiled_cursor_:
res+=prefix+"skipped_results_compiled_cursor <\n"
res+=self.skipped_results_compiled_cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kcursor = 1
kresult = 2
kskipped_results = 7
kmore_results = 3
kkeys_only = 4
kindex_only = 9
ksmall_ops = 10
kcompiled_query = 5
kcompiled_cursor = 6
kindex = 8
kversion = 11
kresult_compiled_cursor = 12
kskipped_results_compiled_cursor = 13
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "cursor",
2: "result",
3: "more_results",
4: "keys_only",
5: "compiled_query",
6: "compiled_cursor",
7: "skipped_results",
8: "index",
9: "index_only",
10: "small_ops",
11: "version",
12: "result_compiled_cursor",
13: "skipped_results_compiled_cursor",
}, 13)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.STRING,
7: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.NUMERIC,
10: ProtocolBuffer.Encoder.NUMERIC,
11: ProtocolBuffer.Encoder.NUMERIC,
12: ProtocolBuffer.Encoder.STRING,
13: ProtocolBuffer.Encoder.STRING,
}, 13, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.QueryResult'
class AllocateIdsRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_model_key_ = 0
model_key_ = None
has_size_ = 0
size_ = 0
has_max_ = 0
max_ = 0
has_trusted_ = 0
trusted_ = 0
def __init__(self, contents=None):
self.reserve_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def model_key(self):
if self.model_key_ is None:
self.lazy_init_lock_.acquire()
try:
if self.model_key_ is None: self.model_key_ = Reference()
finally:
self.lazy_init_lock_.release()
return self.model_key_
def mutable_model_key(self): self.has_model_key_ = 1; return self.model_key()
def clear_model_key(self):
if self.has_model_key_:
self.has_model_key_ = 0;
if self.model_key_ is not None: self.model_key_.Clear()
def has_model_key(self): return self.has_model_key_
def size(self): return self.size_
def set_size(self, x):
self.has_size_ = 1
self.size_ = x
def clear_size(self):
if self.has_size_:
self.has_size_ = 0
self.size_ = 0
def has_size(self): return self.has_size_
def max(self): return self.max_
def set_max(self, x):
self.has_max_ = 1
self.max_ = x
def clear_max(self):
if self.has_max_:
self.has_max_ = 0
self.max_ = 0
def has_max(self): return self.has_max_
def reserve_size(self): return len(self.reserve_)
def reserve_list(self): return self.reserve_
def reserve(self, i):
return self.reserve_[i]
def mutable_reserve(self, i):
return self.reserve_[i]
def add_reserve(self):
x = Reference()
self.reserve_.append(x)
return x
def clear_reserve(self):
self.reserve_ = []
def trusted(self): return self.trusted_
def set_trusted(self, x):
self.has_trusted_ = 1
self.trusted_ = x
def clear_trusted(self):
if self.has_trusted_:
self.has_trusted_ = 0
self.trusted_ = 0
def has_trusted(self): return self.has_trusted_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
if (x.has_model_key()): self.mutable_model_key().MergeFrom(x.model_key())
if (x.has_size()): self.set_size(x.size())
if (x.has_max()): self.set_max(x.max())
for i in xrange(x.reserve_size()): self.add_reserve().CopyFrom(x.reserve(i))
if (x.has_trusted()): self.set_trusted(x.trusted())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if self.has_model_key_ != x.has_model_key_: return 0
if self.has_model_key_ and self.model_key_ != x.model_key_: return 0
if self.has_size_ != x.has_size_: return 0
if self.has_size_ and self.size_ != x.size_: return 0
if self.has_max_ != x.has_max_: return 0
if self.has_max_ and self.max_ != x.max_: return 0
if len(self.reserve_) != len(x.reserve_): return 0
for e1, e2 in zip(self.reserve_, x.reserve_):
if e1 != e2: return 0
if self.has_trusted_ != x.has_trusted_: return 0
if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
if (self.has_model_key_ and not self.model_key_.IsInitialized(debug_strs)): initialized = 0
for p in self.reserve_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
if (self.has_model_key_): n += 1 + self.lengthString(self.model_key_.ByteSize())
if (self.has_size_): n += 1 + self.lengthVarInt64(self.size_)
if (self.has_max_): n += 1 + self.lengthVarInt64(self.max_)
n += 1 * len(self.reserve_)
for i in xrange(len(self.reserve_)): n += self.lengthString(self.reserve_[i].ByteSize())
if (self.has_trusted_): n += 2
return n
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
if (self.has_model_key_): n += 1 + self.lengthString(self.model_key_.ByteSizePartial())
if (self.has_size_): n += 1 + self.lengthVarInt64(self.size_)
if (self.has_max_): n += 1 + self.lengthVarInt64(self.max_)
n += 1 * len(self.reserve_)
for i in xrange(len(self.reserve_)): n += self.lengthString(self.reserve_[i].ByteSizePartial())
if (self.has_trusted_): n += 2
return n
def Clear(self):
self.clear_header()
self.clear_model_key()
self.clear_size()
self.clear_max()
self.clear_reserve()
self.clear_trusted()
def OutputUnchecked(self, out):
if (self.has_model_key_):
out.putVarInt32(10)
out.putVarInt32(self.model_key_.ByteSize())
self.model_key_.OutputUnchecked(out)
if (self.has_size_):
out.putVarInt32(16)
out.putVarInt64(self.size_)
if (self.has_max_):
out.putVarInt32(24)
out.putVarInt64(self.max_)
if (self.has_header_):
out.putVarInt32(34)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
for i in xrange(len(self.reserve_)):
out.putVarInt32(42)
out.putVarInt32(self.reserve_[i].ByteSize())
self.reserve_[i].OutputUnchecked(out)
if (self.has_trusted_):
out.putVarInt32(48)
out.putBoolean(self.trusted_)
def OutputPartial(self, out):
if (self.has_model_key_):
out.putVarInt32(10)
out.putVarInt32(self.model_key_.ByteSizePartial())
self.model_key_.OutputPartial(out)
if (self.has_size_):
out.putVarInt32(16)
out.putVarInt64(self.size_)
if (self.has_max_):
out.putVarInt32(24)
out.putVarInt64(self.max_)
if (self.has_header_):
out.putVarInt32(34)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
for i in xrange(len(self.reserve_)):
out.putVarInt32(42)
out.putVarInt32(self.reserve_[i].ByteSizePartial())
self.reserve_[i].OutputPartial(out)
if (self.has_trusted_):
out.putVarInt32(48)
out.putBoolean(self.trusted_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_model_key().TryMerge(tmp)
continue
if tt == 16:
self.set_size(d.getVarInt64())
continue
if tt == 24:
self.set_max(d.getVarInt64())
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_reserve().TryMerge(tmp)
continue
if tt == 48:
self.set_trusted(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_model_key_:
res+=prefix+"model_key <\n"
res+=self.model_key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_size_: res+=prefix+("size: %s\n" % self.DebugFormatInt64(self.size_))
if self.has_max_: res+=prefix+("max: %s\n" % self.DebugFormatInt64(self.max_))
cnt=0
for e in self.reserve_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("reserve%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 4
kmodel_key = 1
ksize = 2
kmax = 3
kreserve = 5
ktrusted = 6
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "model_key",
2: "size",
3: "max",
4: "header",
5: "reserve",
6: "trusted",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.NUMERIC,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AllocateIdsRequest'
class AllocateIdsResponse(ProtocolBuffer.ProtocolMessage):
has_start_ = 0
start_ = 0
has_end_ = 0
end_ = 0
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def start(self): return self.start_
def set_start(self, x):
self.has_start_ = 1
self.start_ = x
def clear_start(self):
if self.has_start_:
self.has_start_ = 0
self.start_ = 0
def has_start(self): return self.has_start_
def end(self): return self.end_
def set_end(self, x):
self.has_end_ = 1
self.end_ = x
def clear_end(self):
if self.has_end_:
self.has_end_ = 0
self.end_ = 0
def has_end(self): return self.has_end_
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def MergeFrom(self, x):
assert x is not self
if (x.has_start()): self.set_start(x.start())
if (x.has_end()): self.set_end(x.end())
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
def Equals(self, x):
if x is self: return 1
if self.has_start_ != x.has_start_: return 0
if self.has_start_ and self.start_ != x.start_: return 0
if self.has_end_ != x.has_end_: return 0
if self.has_end_ and self.end_ != x.end_: return 0
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_start_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: start not set.')
if (not self.has_end_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: end not set.')
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.start_)
n += self.lengthVarInt64(self.end_)
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_start_):
n += 1
n += self.lengthVarInt64(self.start_)
if (self.has_end_):
n += 1
n += self.lengthVarInt64(self.end_)
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
return n
def Clear(self):
self.clear_start()
self.clear_end()
self.clear_cost()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.start_)
out.putVarInt32(16)
out.putVarInt64(self.end_)
if (self.has_cost_):
out.putVarInt32(26)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_start_):
out.putVarInt32(8)
out.putVarInt64(self.start_)
if (self.has_end_):
out.putVarInt32(16)
out.putVarInt64(self.end_)
if (self.has_cost_):
out.putVarInt32(26)
out.putVarInt32(self.cost_.ByteSizePartial())
self.cost_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_start(d.getVarInt64())
continue
if tt == 16:
self.set_end(d.getVarInt64())
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_start_: res+=prefix+("start: %s\n" % self.DebugFormatInt64(self.start_))
if self.has_end_: res+=prefix+("end: %s\n" % self.DebugFormatInt64(self.end_))
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kstart = 1
kend = 2
kcost = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "start",
2: "end",
3: "cost",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AllocateIdsResponse'
class CompositeIndices(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.index_ = []
if contents is not None: self.MergeFromString(contents)
def index_size(self): return len(self.index_)
def index_list(self): return self.index_
def index(self, i):
return self.index_[i]
def mutable_index(self, i):
return self.index_[i]
def add_index(self):
x = CompositeIndex()
self.index_.append(x)
return x
def clear_index(self):
self.index_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.index_size()): self.add_index().CopyFrom(x.index(i))
def Equals(self, x):
if x is self: return 1
if len(self.index_) != len(x.index_): return 0
for e1, e2 in zip(self.index_, x.index_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.index_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.index_)
for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSize())
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.index_)
for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_index()
def OutputUnchecked(self, out):
for i in xrange(len(self.index_)):
out.putVarInt32(10)
out.putVarInt32(self.index_[i].ByteSize())
self.index_[i].OutputUnchecked(out)
def OutputPartial(self, out):
for i in xrange(len(self.index_)):
out.putVarInt32(10)
out.putVarInt32(self.index_[i].ByteSizePartial())
self.index_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_index().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kindex = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "index",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompositeIndices'
class AddActionsRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_transaction_ = 0
def __init__(self, contents=None):
self.transaction_ = Transaction()
self.action_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def transaction(self): return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction_
def clear_transaction(self):self.has_transaction_ = 0; self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def action_size(self): return len(self.action_)
def action_list(self): return self.action_
def action(self, i):
return self.action_[i]
def mutable_action(self, i):
return self.action_[i]
def add_action(self):
x = Action()
self.action_.append(x)
return x
def clear_action(self):
self.action_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
for i in xrange(x.action_size()): self.add_action().CopyFrom(x.action(i))
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if len(self.action_) != len(x.action_): return 0
for e1, e2 in zip(self.action_, x.action_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
if (not self.has_transaction_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: transaction not set.')
elif not self.transaction_.IsInitialized(debug_strs): initialized = 0
for p in self.action_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += self.lengthString(self.transaction_.ByteSize())
n += 1 * len(self.action_)
for i in xrange(len(self.action_)): n += self.lengthString(self.action_[i].ByteSize())
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
if (self.has_transaction_):
n += 1
n += self.lengthString(self.transaction_.ByteSizePartial())
n += 1 * len(self.action_)
for i in xrange(len(self.action_)): n += self.lengthString(self.action_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_header()
self.clear_transaction()
self.clear_action()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
for i in xrange(len(self.action_)):
out.putVarInt32(18)
out.putVarInt32(self.action_[i].ByteSize())
self.action_[i].OutputUnchecked(out)
if (self.has_header_):
out.putVarInt32(26)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_transaction_):
out.putVarInt32(10)
out.putVarInt32(self.transaction_.ByteSizePartial())
self.transaction_.OutputPartial(out)
for i in xrange(len(self.action_)):
out.putVarInt32(18)
out.putVarInt32(self.action_[i].ByteSizePartial())
self.action_[i].OutputPartial(out)
if (self.has_header_):
out.putVarInt32(26)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_action().TryMerge(tmp)
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.action_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("action%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 3
ktransaction = 1
kaction = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "transaction",
2: "action",
3: "header",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AddActionsRequest'
class AddActionsResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AddActionsResponse'
class BeginTransactionRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_app_ = 0
app_ = ""
has_allow_multiple_eg_ = 0
allow_multiple_eg_ = 0
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def allow_multiple_eg(self): return self.allow_multiple_eg_
def set_allow_multiple_eg(self, x):
self.has_allow_multiple_eg_ = 1
self.allow_multiple_eg_ = x
def clear_allow_multiple_eg(self):
if self.has_allow_multiple_eg_:
self.has_allow_multiple_eg_ = 0
self.allow_multiple_eg_ = 0
def has_allow_multiple_eg(self): return self.has_allow_multiple_eg_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
if (x.has_app()): self.set_app(x.app())
if (x.has_allow_multiple_eg()): self.set_allow_multiple_eg(x.allow_multiple_eg())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
if self.has_allow_multiple_eg_ != x.has_allow_multiple_eg_: return 0
if self.has_allow_multiple_eg_ and self.allow_multiple_eg_ != x.allow_multiple_eg_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
if (not self.has_app_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += self.lengthString(len(self.app_))
if (self.has_allow_multiple_eg_): n += 2
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
if (self.has_app_):
n += 1
n += self.lengthString(len(self.app_))
if (self.has_allow_multiple_eg_): n += 2
return n
def Clear(self):
self.clear_header()
self.clear_app()
self.clear_allow_multiple_eg()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_)
if (self.has_allow_multiple_eg_):
out.putVarInt32(16)
out.putBoolean(self.allow_multiple_eg_)
if (self.has_header_):
out.putVarInt32(26)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_app_):
out.putVarInt32(10)
out.putPrefixedString(self.app_)
if (self.has_allow_multiple_eg_):
out.putVarInt32(16)
out.putBoolean(self.allow_multiple_eg_)
if (self.has_header_):
out.putVarInt32(26)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app(d.getPrefixedString())
continue
if tt == 16:
self.set_allow_multiple_eg(d.getBoolean())
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
if self.has_allow_multiple_eg_: res+=prefix+("allow_multiple_eg: %s\n" % self.DebugFormatBool(self.allow_multiple_eg_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 3
kapp = 1
kallow_multiple_eg = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app",
2: "allow_multiple_eg",
3: "header",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.BeginTransactionRequest'
class CommitResponse_Version(ProtocolBuffer.ProtocolMessage):
has_root_entity_key_ = 0
has_version_ = 0
version_ = 0
def __init__(self, contents=None):
self.root_entity_key_ = Reference()
if contents is not None: self.MergeFromString(contents)
def root_entity_key(self): return self.root_entity_key_
def mutable_root_entity_key(self): self.has_root_entity_key_ = 1; return self.root_entity_key_
def clear_root_entity_key(self):self.has_root_entity_key_ = 0; self.root_entity_key_.Clear()
def has_root_entity_key(self): return self.has_root_entity_key_
def version(self): return self.version_
def set_version(self, x):
self.has_version_ = 1
self.version_ = x
def clear_version(self):
if self.has_version_:
self.has_version_ = 0
self.version_ = 0
def has_version(self): return self.has_version_
def MergeFrom(self, x):
assert x is not self
if (x.has_root_entity_key()): self.mutable_root_entity_key().MergeFrom(x.root_entity_key())
if (x.has_version()): self.set_version(x.version())
def Equals(self, x):
if x is self: return 1
if self.has_root_entity_key_ != x.has_root_entity_key_: return 0
if self.has_root_entity_key_ and self.root_entity_key_ != x.root_entity_key_: return 0
if self.has_version_ != x.has_version_: return 0
if self.has_version_ and self.version_ != x.version_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_root_entity_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: root_entity_key not set.')
elif not self.root_entity_key_.IsInitialized(debug_strs): initialized = 0
if (not self.has_version_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: version not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(self.root_entity_key_.ByteSize())
n += self.lengthVarInt64(self.version_)
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_root_entity_key_):
n += 1
n += self.lengthString(self.root_entity_key_.ByteSizePartial())
if (self.has_version_):
n += 1
n += self.lengthVarInt64(self.version_)
return n
def Clear(self):
self.clear_root_entity_key()
self.clear_version()
def OutputUnchecked(self, out):
out.putVarInt32(34)
out.putVarInt32(self.root_entity_key_.ByteSize())
self.root_entity_key_.OutputUnchecked(out)
out.putVarInt32(40)
out.putVarInt64(self.version_)
def OutputPartial(self, out):
if (self.has_root_entity_key_):
out.putVarInt32(34)
out.putVarInt32(self.root_entity_key_.ByteSizePartial())
self.root_entity_key_.OutputPartial(out)
if (self.has_version_):
out.putVarInt32(40)
out.putVarInt64(self.version_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 28: break
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_root_entity_key().TryMerge(tmp)
continue
if tt == 40:
self.set_version(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_root_entity_key_:
res+=prefix+"root_entity_key <\n"
res+=self.root_entity_key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_version_: res+=prefix+("version: %s\n" % self.DebugFormatInt64(self.version_))
return res
class CommitResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.version_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def version_size(self): return len(self.version_)
def version_list(self): return self.version_
def version(self, i):
return self.version_[i]
def mutable_version(self, i):
return self.version_[i]
def add_version(self):
x = CommitResponse_Version()
self.version_.append(x)
return x
def clear_version(self):
self.version_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
for i in xrange(x.version_size()): self.add_version().CopyFrom(x.version(i))
def Equals(self, x):
if x is self: return 1
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
if len(self.version_) != len(x.version_): return 0
for e1, e2 in zip(self.version_, x.version_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
for p in self.version_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
n += 2 * len(self.version_)
for i in xrange(len(self.version_)): n += self.version_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
n += 2 * len(self.version_)
for i in xrange(len(self.version_)): n += self.version_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_cost()
self.clear_version()
def OutputUnchecked(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
for i in xrange(len(self.version_)):
out.putVarInt32(27)
self.version_[i].OutputUnchecked(out)
out.putVarInt32(28)
def OutputPartial(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSizePartial())
self.cost_.OutputPartial(out)
for i in xrange(len(self.version_)):
out.putVarInt32(27)
self.version_[i].OutputPartial(out)
out.putVarInt32(28)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if tt == 27:
self.add_version().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.version_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Version%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kcost = 1
kVersionGroup = 3
kVersionroot_entity_key = 4
kVersionversion = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "cost",
3: "Version",
4: "root_entity_key",
5: "version",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STARTGROUP,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.NUMERIC,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CommitResponse'
if _extension_runtime:
pass
__all__ = ['InternalHeader','Transaction','Query','Query_Filter','Query_Order','CompiledQuery','CompiledQuery_PrimaryScan','CompiledQuery_MergeJoinScan','CompiledQuery_EntityFilter','CompiledCursor','CompiledCursor_PositionIndexValue','CompiledCursor_Position','Cursor','Error','Cost','Cost_CommitCost','GetRequest','GetResponse','GetResponse_Entity','PutRequest','PutResponse','TouchRequest','TouchResponse','DeleteRequest','DeleteResponse','NextRequest','QueryResult','AllocateIdsRequest','AllocateIdsResponse','CompositeIndices','AddActionsRequest','AddActionsResponse','BeginTransactionRequest','CommitResponse','CommitResponse_Version']
|
Java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.