code stringlengths 3 1.01M | repo_name stringlengths 5 116 | path stringlengths 3 311 | language stringclasses 30 values | license stringclasses 15 values | size int64 3 1.01M |
|---|---|---|---|---|---|
/*
* Copyright 2002-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* CC/LICENSE
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __THREADPOOL_H_
#define __THREADPOOL_H_
#include <vector>
#include <bgcc.h>
#include "session_thread.h"
#include "singleton.h"
namespace ims {
class threadpool_t : public bgcc::Shareable {
public:
threadpool_t():
_size(20),
_timeout(2000),
_isstart(false) {
}
threadpool_t(int32_t size, int32_t task_timeout);
~threadpool_t();
void start();
void stop();
session_thread_ptr get_prefer_thread();
void set(int32_t size, int32_t timeout) {
_size = size;
_timeout = timeout;
}
int32_t size() {
return _size;
}
private:
int32_t _size;
int32_t _timeout;
std::vector<session_thread_ptr> _threads;
bgcc::Mutex _mutex;
bool _isstart;
};
typedef singleton_t<threadpool_t> session_thrd_mgr;
}
#endif //__THREADPOOL_H_
/* vim: set ts=4 sw=4 sts=4 tw=100 noet: */
| sdgdsffdsfff/CC | platform/ims/inc/core/threadpool.h | C | apache-2.0 | 1,605 |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/iot/model/RelatedResource.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
namespace Aws
{
namespace IoT
{
namespace Model
{
RelatedResource::RelatedResource() :
m_resourceType(ResourceType::NOT_SET),
m_resourceTypeHasBeenSet(false),
m_resourceIdentifierHasBeenSet(false),
m_additionalInfoHasBeenSet(false)
{
}
RelatedResource::RelatedResource(JsonView jsonValue) :
m_resourceType(ResourceType::NOT_SET),
m_resourceTypeHasBeenSet(false),
m_resourceIdentifierHasBeenSet(false),
m_additionalInfoHasBeenSet(false)
{
*this = jsonValue;
}
RelatedResource& RelatedResource::operator =(JsonView jsonValue)
{
if(jsonValue.ValueExists("resourceType"))
{
m_resourceType = ResourceTypeMapper::GetResourceTypeForName(jsonValue.GetString("resourceType"));
m_resourceTypeHasBeenSet = true;
}
if(jsonValue.ValueExists("resourceIdentifier"))
{
m_resourceIdentifier = jsonValue.GetObject("resourceIdentifier");
m_resourceIdentifierHasBeenSet = true;
}
if(jsonValue.ValueExists("additionalInfo"))
{
Aws::Map<Aws::String, JsonView> additionalInfoJsonMap = jsonValue.GetObject("additionalInfo").GetAllObjects();
for(auto& additionalInfoItem : additionalInfoJsonMap)
{
m_additionalInfo[additionalInfoItem.first] = additionalInfoItem.second.AsString();
}
m_additionalInfoHasBeenSet = true;
}
return *this;
}
JsonValue RelatedResource::Jsonize() const
{
JsonValue payload;
if(m_resourceTypeHasBeenSet)
{
payload.WithString("resourceType", ResourceTypeMapper::GetNameForResourceType(m_resourceType));
}
if(m_resourceIdentifierHasBeenSet)
{
payload.WithObject("resourceIdentifier", m_resourceIdentifier.Jsonize());
}
if(m_additionalInfoHasBeenSet)
{
JsonValue additionalInfoJsonMap;
for(auto& additionalInfoItem : m_additionalInfo)
{
additionalInfoJsonMap.WithString(additionalInfoItem.first, additionalInfoItem.second);
}
payload.WithObject("additionalInfo", std::move(additionalInfoJsonMap));
}
return payload;
}
} // namespace Model
} // namespace IoT
} // namespace Aws
| awslabs/aws-sdk-cpp | aws-cpp-sdk-iot/source/model/RelatedResource.cpp | C++ | apache-2.0 | 2,333 |
<form name="myform" action="{:U('Category/listorder')}" method="post">
<div class="pad-lr-10">
<div class="table-list">
<table width="100%" cellspacing="0">
<thead>
<tr>
<th width="40">{:L('listorder')}</th>
<th width="40">catid</th>
<th>{:L('catname')}</th>
<th width="80">{:L('use_module')}</th>
<th width="40">{:L('ismenu')}</th>
<th width="30">{:L('fangwen')}</th>
<th width="220">{:L('manage')}</th>
</tr>
</thead>
<tbody>
{$categorys}
</tbody>
</table>
<div class="btn"><input type="submit" class="button" name="dosubmit" value="{:L('listorder')}" /></div></div>
</div>
</div>
</form></div>
| selecterskyphp/framework | yourphp/App/Tpl/Admin/Default/Category_index.html | HTML | apache-2.0 | 734 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
/**
* Test class to verify that metadata is consistent before and after a snapshot attempt.
*/
@Category({MediumTests.class, ClientTests.class})
public class TestSnapshotMetadata {
private static final Log LOG = LogFactory.getLog(TestSnapshotMetadata.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final int NUM_RS = 2;
private static final String STRING_TABLE_NAME = "TestSnapshotMetadata";
private static final String MAX_VERSIONS_FAM_STR = "fam_max_columns";
private static final byte[] MAX_VERSIONS_FAM = Bytes.toBytes(MAX_VERSIONS_FAM_STR);
private static final String COMPRESSED_FAM_STR = "fam_compressed";
private static final byte[] COMPRESSED_FAM = Bytes.toBytes(COMPRESSED_FAM_STR);
private static final String BLOCKSIZE_FAM_STR = "fam_blocksize";
private static final byte[] BLOCKSIZE_FAM = Bytes.toBytes(BLOCKSIZE_FAM_STR);
private static final String BLOOMFILTER_FAM_STR = "fam_bloomfilter";
private static final byte[] BLOOMFILTER_FAM = Bytes.toBytes(BLOOMFILTER_FAM_STR);
private static final String TEST_CONF_CUSTOM_VALUE = "TestCustomConf";
private static final String TEST_CUSTOM_VALUE = "TestCustomValue";
private static final byte[][] families = {
MAX_VERSIONS_FAM, BLOOMFILTER_FAM, COMPRESSED_FAM, BLOCKSIZE_FAM
};
private static final DataBlockEncoding DATA_BLOCK_ENCODING_TYPE = DataBlockEncoding.FAST_DIFF;
private static final BloomType BLOOM_TYPE = BloomType.ROW;
private static final int BLOCK_SIZE = 98;
private static final int MAX_VERSIONS = 8;
private Admin admin;
private String originalTableDescription;
private HTableDescriptor originalTableDescriptor;
TableName originalTableName;
private static FileSystem fs;
private static Path rootDir;
@BeforeClass
public static void setupCluster() throws Exception {
setupConf(UTIL.getConfiguration());
UTIL.startMiniCluster(NUM_RS);
fs = UTIL.getHBaseCluster().getMaster().getMasterFileSystem().getFileSystem();
rootDir = UTIL.getHBaseCluster().getMaster().getMasterFileSystem().getRootDir();
}
@AfterClass
public static void cleanupTest() throws Exception {
try {
UTIL.shutdownMiniCluster();
} catch (Exception e) {
LOG.warn("failure shutting down cluster", e);
}
}
private static void setupConf(Configuration conf) {
// enable snapshot support
conf.setBoolean(SnapshotManager.HBASE_SNAPSHOT_ENABLED, true);
// disable the ui
conf.setInt("hbase.regionsever.info.port", -1);
// change the flush size to a small amount, regulating number of store files
conf.setInt("hbase.hregion.memstore.flush.size", 25000);
// so make sure we get a compaction when doing a load, but keep around
// some files in the store
conf.setInt("hbase.hstore.compaction.min", 10);
conf.setInt("hbase.hstore.compactionThreshold", 10);
// block writes if we get to 12 store files
conf.setInt("hbase.hstore.blockingStoreFiles", 12);
conf.setInt("hbase.regionserver.msginterval", 100);
conf.setBoolean("hbase.master.enabletable.roundrobin", true);
// Avoid potentially aggressive splitting which would cause snapshot to fail
conf.set(HConstants.HBASE_REGION_SPLIT_POLICY_KEY,
ConstantSizeRegionSplitPolicy.class.getName());
}
@Before
public void setup() throws Exception {
admin = UTIL.getAdmin();
createTableWithNonDefaultProperties();
}
@After
public void tearDown() throws Exception {
SnapshotTestingUtils.deleteAllSnapshots(admin);
}
/*
* Create a table that has non-default properties so we can see if they hold
*/
private void createTableWithNonDefaultProperties() throws Exception {
final long startTime = System.currentTimeMillis();
final String sourceTableNameAsString = STRING_TABLE_NAME + startTime;
originalTableName = TableName.valueOf(sourceTableNameAsString);
// enable replication on a column family
HColumnDescriptor maxVersionsColumn = new HColumnDescriptor(MAX_VERSIONS_FAM);
HColumnDescriptor bloomFilterColumn = new HColumnDescriptor(BLOOMFILTER_FAM);
HColumnDescriptor dataBlockColumn = new HColumnDescriptor(COMPRESSED_FAM);
HColumnDescriptor blockSizeColumn = new HColumnDescriptor(BLOCKSIZE_FAM);
maxVersionsColumn.setMaxVersions(MAX_VERSIONS);
bloomFilterColumn.setBloomFilterType(BLOOM_TYPE);
dataBlockColumn.setDataBlockEncoding(DATA_BLOCK_ENCODING_TYPE);
blockSizeColumn.setBlocksize(BLOCK_SIZE);
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(sourceTableNameAsString));
htd.addFamily(maxVersionsColumn);
htd.addFamily(bloomFilterColumn);
htd.addFamily(dataBlockColumn);
htd.addFamily(blockSizeColumn);
htd.setValue(TEST_CUSTOM_VALUE, TEST_CUSTOM_VALUE);
htd.setConfiguration(TEST_CONF_CUSTOM_VALUE, TEST_CONF_CUSTOM_VALUE);
assertTrue(htd.getConfiguration().size() > 0);
admin.createTable(htd);
Table original = UTIL.getConnection().getTable(originalTableName);
originalTableName = TableName.valueOf(sourceTableNameAsString);
originalTableDescriptor = admin.getTableDescriptor(originalTableName);
originalTableDescription = originalTableDescriptor.toStringCustomizedValues();
original.close();
}
/**
* Verify that the describe for a cloned table matches the describe from the original.
*/
@Test (timeout=300000)
public void testDescribeMatchesAfterClone() throws Exception {
// Clone the original table
final String clonedTableNameAsString = "clone" + originalTableName;
final TableName clonedTableName = TableName.valueOf(clonedTableNameAsString);
final String snapshotNameAsString = "snapshot" + originalTableName
+ System.currentTimeMillis();
final byte[] snapshotName = Bytes.toBytes(snapshotNameAsString);
// restore the snapshot into a cloned table and examine the output
List<byte[]> familiesList = new ArrayList<>();
Collections.addAll(familiesList, families);
// Create a snapshot in which all families are empty
SnapshotTestingUtils.createSnapshotAndValidate(admin, originalTableName, null,
familiesList, snapshotNameAsString, rootDir, fs, /* onlineSnapshot= */ false);
admin.cloneSnapshot(snapshotName, clonedTableName);
Table clonedTable = UTIL.getConnection().getTable(clonedTableName);
HTableDescriptor cloneHtd = admin.getTableDescriptor(clonedTableName);
assertEquals(
originalTableDescription.replace(originalTableName.getNameAsString(),clonedTableNameAsString),
cloneHtd.toStringCustomizedValues());
// Verify the custom fields
assertEquals(originalTableDescriptor.getValues().size(),
cloneHtd.getValues().size());
assertEquals(originalTableDescriptor.getConfiguration().size(),
cloneHtd.getConfiguration().size());
assertEquals(cloneHtd.getValue(TEST_CUSTOM_VALUE), TEST_CUSTOM_VALUE);
assertEquals(cloneHtd.getConfigurationValue(TEST_CONF_CUSTOM_VALUE), TEST_CONF_CUSTOM_VALUE);
assertEquals(originalTableDescriptor.getValues(), cloneHtd.getValues());
assertEquals(originalTableDescriptor.getConfiguration(), cloneHtd.getConfiguration());
admin.enableTable(originalTableName);
clonedTable.close();
}
/**
* Verify that the describe for a restored table matches the describe for one the original.
*/
@Test (timeout=300000)
public void testDescribeMatchesAfterRestore() throws Exception {
runRestoreWithAdditionalMetadata(false);
}
/**
* Verify that if metadata changed after a snapshot was taken, that the old metadata replaces the
* new metadata during a restore
*/
@Test (timeout=300000)
public void testDescribeMatchesAfterMetadataChangeAndRestore() throws Exception {
runRestoreWithAdditionalMetadata(true);
}
/**
* Verify that when the table is empty, making metadata changes after the restore does not affect
* the restored table's original metadata
* @throws Exception
*/
@Test (timeout=300000)
public void testDescribeOnEmptyTableMatchesAfterMetadataChangeAndRestore() throws Exception {
runRestoreWithAdditionalMetadata(true, false);
}
private void runRestoreWithAdditionalMetadata(boolean changeMetadata) throws Exception {
runRestoreWithAdditionalMetadata(changeMetadata, true);
}
private void runRestoreWithAdditionalMetadata(boolean changeMetadata, boolean addData)
throws Exception {
if (admin.isTableDisabled(originalTableName)) {
admin.enableTable(originalTableName);
}
// populate it with data
final byte[] familyForUpdate = BLOCKSIZE_FAM;
List<byte[]> familiesWithDataList = new ArrayList<>();
List<byte[]> emptyFamiliesList = new ArrayList<>();
if (addData) {
Table original = UTIL.getConnection().getTable(originalTableName);
UTIL.loadTable(original, familyForUpdate); // family arbitrarily chosen
original.close();
for (byte[] family : families) {
if (family != familyForUpdate) {
emptyFamiliesList.add(family);
}
}
familiesWithDataList.add(familyForUpdate);
} else {
Collections.addAll(emptyFamiliesList, families);
}
// take a "disabled" snapshot
final String snapshotNameAsString = "snapshot" + originalTableName
+ System.currentTimeMillis();
final byte[] snapshotName = Bytes.toBytes(snapshotNameAsString);
SnapshotTestingUtils.createSnapshotAndValidate(admin, originalTableName,
familiesWithDataList, emptyFamiliesList, snapshotNameAsString, rootDir, fs,
/* onlineSnapshot= */ false);
admin.enableTable(originalTableName);
if (changeMetadata) {
final String newFamilyNameAsString = "newFamily" + System.currentTimeMillis();
final byte[] newFamilyName = Bytes.toBytes(newFamilyNameAsString);
admin.disableTable(originalTableName);
HColumnDescriptor hcd = new HColumnDescriptor(newFamilyName);
admin.addColumnFamily(originalTableName, hcd);
assertTrue("New column family was not added.",
admin.getTableDescriptor(originalTableName).toString().contains(newFamilyNameAsString));
}
// restore it
if (!admin.isTableDisabled(originalTableName)) {
admin.disableTable(originalTableName);
}
admin.restoreSnapshot(snapshotName);
admin.enableTable(originalTableName);
// verify that the descrption is reverted
Table original = UTIL.getConnection().getTable(originalTableName);
try {
assertTrue(originalTableDescriptor.equals(admin.getTableDescriptor(originalTableName)));
assertTrue(originalTableDescriptor.equals(original.getTableDescriptor()));
} finally {
original.close();
}
}
}
| gustavoanatoly/hbase | hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java | Java | apache-2.0 | 12,884 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.config;
import org.apache.flink.annotation.PublicEvolving;
import org.apache.flink.annotation.docs.Documentation;
import org.apache.flink.configuration.ConfigOption;
import org.apache.flink.table.api.SqlDialect;
import static org.apache.flink.configuration.ConfigOptions.key;
/**
* This class holds {@link org.apache.flink.configuration.ConfigOption}s used by
* table planner.
*
* <p>NOTE: All option keys in this class must start with "table".
*/
@PublicEvolving
public class TableConfigOptions {
private TableConfigOptions() {}
@Documentation.TableOption(execMode = Documentation.ExecMode.BATCH_STREAMING)
public static final ConfigOption<Boolean> TABLE_DYNAMIC_TABLE_OPTIONS_ENABLED =
key("table.dynamic-table-options.enabled")
.booleanType()
.defaultValue(false)
.withDescription("Enable or disable the OPTIONS hint used to specify table options " +
"dynamically, if disabled, an exception would be thrown " +
"if any OPTIONS hint is specified");
@Documentation.TableOption(execMode = Documentation.ExecMode.BATCH_STREAMING)
public static final ConfigOption<String> TABLE_SQL_DIALECT = key("table.sql-dialect")
.stringType()
.defaultValue(SqlDialect.DEFAULT.name().toLowerCase())
.withDescription("The SQL dialect defines how to parse a SQL query. " +
"A different SQL dialect may support different SQL grammar. " +
"Currently supported dialects are: default and hive");
@Documentation.TableOption(execMode = Documentation.ExecMode.BATCH_STREAMING)
public static final ConfigOption<String> LOCAL_TIME_ZONE = key("table.local-time-zone")
.stringType()
// special value to decide whether to use ZoneId.systemDefault() in TableConfig.getLocalTimeZone()
.defaultValue("default")
.withDescription("The local time zone defines current session time zone id. It is used when converting to/from " +
"<code>TIMESTAMP WITH LOCAL TIME ZONE</code>. Internally, timestamps with local time zone are always represented in the UTC time zone. " +
"However, when converting to data types that don't include a time zone (e.g. TIMESTAMP, TIME, or simply STRING), " +
"the session time zone is used during conversion. The input of option is either an abbreviation such as \"PST\", a full name " +
"such as \"America/Los_Angeles\", or a custom timezone id such as \"GMT-8:00\".");
@Documentation.TableOption(execMode = Documentation.ExecMode.BATCH_STREAMING)
public static final ConfigOption<Integer> MAX_LENGTH_GENERATED_CODE =
key("table.generated-code.max-length")
.intType()
.defaultValue(64000)
.withDescription("Specifies a threshold where generated code will be split into sub-function calls. " +
"Java has a maximum method length of 64 KB. This setting allows for finer granularity if necessary.");
}
| darionyaphet/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/config/TableConfigOptions.java | Java | apache-2.0 | 3,646 |
def check_resource_count(expected_count):
test.assertEqual(expected_count, len(reality.all_resources()))
example_template = Template({
'A': RsrcDef({}, []),
'B': RsrcDef({'a': '4alpha'}, ['A']),
'C': RsrcDef({'a': 'foo'}, ['B']),
'D': RsrcDef({'a': 'bar'}, ['C']),
})
engine.create_stack('foo', example_template)
engine.noop(1)
example_template2 = Template({
'A': RsrcDef({}, []),
'B': RsrcDef({'a': '4alpha'}, ['A']),
'C': RsrcDef({'a': 'blarg'}, ['B']),
'D': RsrcDef({'a': 'wibble'}, ['C']),
})
engine.update_stack('foo', example_template2)
engine.call(check_resource_count, 2)
engine.noop(11)
engine.call(verify, example_template2)
| zaneb/heat-convergence-prototype | scenarios/update_interrupt_create.py | Python | apache-2.0 | 673 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.deltaspike.test.core.api.config;
import org.apache.deltaspike.core.api.config.ConfigResolver;
import org.apache.deltaspike.core.spi.config.ConfigSource;
import java.util.Arrays;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Consumer;
/**
* A ConfigSource which is backed by a ThreadLocal.
* So it can be dynamically configured even for parallel tests.
*
* Note that you MUST call the {@link #clear()} method at the end of a method which uses this ConfigSource.
*/
public class ConfigurableTestConfigSource implements ConfigSource
{
private static ThreadLocal<Map<String, String>> props = new ThreadLocal<>();
private Consumer<Set<String>> reportAttributeChange;
@Override
public int getOrdinal()
{
return 500;
}
public static ConfigurableTestConfigSource instance() {
return (ConfigurableTestConfigSource) Arrays.stream(ConfigResolver.getConfig().getConfigSources())
.filter(cs -> cs instanceof ConfigurableTestConfigSource)
.findFirst()
.get();
}
@Override
public Map<String, String> getProperties()
{
Map<String, String> propMap = props.get();
if (propMap == null)
{
propMap = new ConcurrentHashMap<>();
props.set(propMap);
}
return propMap;
}
@Override
public String getPropertyValue(String key)
{
return getProperties().get(key);
}
@Override
public String getConfigName()
{
return this.getClass().getSimpleName();
}
@Override
public boolean isScannable()
{
return true;
}
public void clear()
{
props.set(null);
props.remove();
}
public void setValues(Map<String, String> values)
{
getProperties().putAll(values);
// now notify our Config that some values got changed
reportAttributeChange.accept(values.keySet());
}
@Override
public void setOnAttributeChange(Consumer<Set<String>> reportAttributeChange)
{
this.reportAttributeChange = reportAttributeChange;
}
}
| struberg/deltaspike | deltaspike/core/impl/src/test/java/org/apache/deltaspike/test/core/api/config/ConfigurableTestConfigSource.java | Java | apache-2.0 | 3,013 |
<?php
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/analytics/admin/v1alpha/analytics_admin.proto
namespace Google\Analytics\Admin\V1alpha;
use Google\Protobuf\Internal\GPBType;
use Google\Protobuf\Internal\RepeatedField;
use Google\Protobuf\Internal\GPBUtil;
/**
* Request message for ListConversionEvents RPC
*
* Generated from protobuf message <code>google.analytics.admin.v1alpha.ListConversionEventsRequest</code>
*/
class ListConversionEventsRequest extends \Google\Protobuf\Internal\Message
{
/**
* Required. The resource name of the parent property.
* Example: 'properties/123'
*
* Generated from protobuf field <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = {</code>
*/
private $parent = '';
/**
* The maximum number of resources to return.
* If unspecified, at most 50 resources will be returned.
* The maximum value is 200; (higher values will be coerced to the maximum)
*
* Generated from protobuf field <code>int32 page_size = 2;</code>
*/
private $page_size = 0;
/**
* A page token, received from a previous `ListConversionEvents` call.
* Provide this to retrieve the subsequent page.
* When paginating, all other parameters provided to `ListConversionEvents`
* must match the call that provided the page token.
*
* Generated from protobuf field <code>string page_token = 3;</code>
*/
private $page_token = '';
/**
* Constructor.
*
* @param array $data {
* Optional. Data for populating the Message object.
*
* @type string $parent
* Required. The resource name of the parent property.
* Example: 'properties/123'
* @type int $page_size
* The maximum number of resources to return.
* If unspecified, at most 50 resources will be returned.
* The maximum value is 200; (higher values will be coerced to the maximum)
* @type string $page_token
* A page token, received from a previous `ListConversionEvents` call.
* Provide this to retrieve the subsequent page.
* When paginating, all other parameters provided to `ListConversionEvents`
* must match the call that provided the page token.
* }
*/
public function __construct($data = NULL) {
\GPBMetadata\Google\Analytics\Admin\V1Alpha\AnalyticsAdmin::initOnce();
parent::__construct($data);
}
/**
* Required. The resource name of the parent property.
* Example: 'properties/123'
*
* Generated from protobuf field <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = {</code>
* @return string
*/
public function getParent()
{
return $this->parent;
}
/**
* Required. The resource name of the parent property.
* Example: 'properties/123'
*
* Generated from protobuf field <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = {</code>
* @param string $var
* @return $this
*/
public function setParent($var)
{
GPBUtil::checkString($var, True);
$this->parent = $var;
return $this;
}
/**
* The maximum number of resources to return.
* If unspecified, at most 50 resources will be returned.
* The maximum value is 200; (higher values will be coerced to the maximum)
*
* Generated from protobuf field <code>int32 page_size = 2;</code>
* @return int
*/
public function getPageSize()
{
return $this->page_size;
}
/**
* The maximum number of resources to return.
* If unspecified, at most 50 resources will be returned.
* The maximum value is 200; (higher values will be coerced to the maximum)
*
* Generated from protobuf field <code>int32 page_size = 2;</code>
* @param int $var
* @return $this
*/
public function setPageSize($var)
{
GPBUtil::checkInt32($var);
$this->page_size = $var;
return $this;
}
/**
* A page token, received from a previous `ListConversionEvents` call.
* Provide this to retrieve the subsequent page.
* When paginating, all other parameters provided to `ListConversionEvents`
* must match the call that provided the page token.
*
* Generated from protobuf field <code>string page_token = 3;</code>
* @return string
*/
public function getPageToken()
{
return $this->page_token;
}
/**
* A page token, received from a previous `ListConversionEvents` call.
* Provide this to retrieve the subsequent page.
* When paginating, all other parameters provided to `ListConversionEvents`
* must match the call that provided the page token.
*
* Generated from protobuf field <code>string page_token = 3;</code>
* @param string $var
* @return $this
*/
public function setPageToken($var)
{
GPBUtil::checkString($var, True);
$this->page_token = $var;
return $this;
}
}
| googleapis/php-analytics-admin | src/V1alpha/ListConversionEventsRequest.php | PHP | apache-2.0 | 5,285 |
/**
* Copyright 2015-2017 Linagora, Université Joseph Fourier, Floralis
*
* The present code is developed in the scope of the joint LINAGORA -
* Université Joseph Fourier - Floralis research program and is designated
* as a "Result" pursuant to the terms and conditions of the LINAGORA
* - Université Joseph Fourier - Floralis research program. Each copyright
* holder of Results enumerated here above fully & independently holds complete
* ownership of the complete Intellectual Property rights applicable to the whole
* of said Results, and may freely exploit it in any manner which does not infringe
* the moral rights of the other copyright holders.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.roboconf.core.commands;
import static net.roboconf.core.errors.ErrorDetails.instruction;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import net.roboconf.core.errors.ErrorCode;
import net.roboconf.core.model.ParsingError;
import net.roboconf.core.model.beans.AbstractApplication;
import net.roboconf.core.utils.Utils;
/**
* @author Vincent Zurczak - Linagora
*/
public class CommandsParser {
private final Logger logger = Logger.getLogger( getClass().getName());
private final List<ParsingError> parsingErrors = new ArrayList<> ();
private final Context context;
final List<AbstractCommandInstruction> instructions = new ArrayList<> ();
/**
* Constructor.
* @param app an application (not null)
* @param commandsFile a file containing commands (not null)
*/
public CommandsParser( AbstractApplication app, File commandsFile ) {
this.context = new Context( app, commandsFile );
parse();
}
/**
* Constructor.
* @param app an application (not null)
* @param instructionsText instructions text (not null)
*/
public CommandsParser( AbstractApplication app, String instructionsText ) {
this.context = new Context( app, null );
if( instructionsText != null )
parse( instructionsText );
}
/**
* @return a non-null list of errors
*/
public List<ParsingError> getParsingErrors() {
List<ParsingError> result = new ArrayList<>( this.parsingErrors );
if( this.context.getCommandFile() != null && ! this.context.getCommandFile().exists())
result.add( 0, new ParsingError( ErrorCode.CMD_INEXISTING_COMMAND_FILE, this.context.getCommandFile(), 1 ));
else if( this.instructions.isEmpty())
result.add( 0, new ParsingError( ErrorCode.CMD_NO_INSTRUCTION, this.context.getCommandFile(), 1 ));
return result;
}
/**
* @return the instructions
*/
public List<AbstractCommandInstruction> getInstructions() {
return this.instructions;
}
/**
* Injects (valid) context variables in commands text.
* @param line a non-null line
* @param context a non-null map considered as the context
* @return the line, after it was updated
* <p>
* All the <code>$(sth)</code> variables will have been replaced by the value
* associated with the <i>sth</i> key in <code>context</code>.
* </p>
*/
public static String injectContextVariables( String line, Map<String,String> context ) {
String result = line;
for( Map.Entry<String,String> entry : context.entrySet())
result = result.replace( "$(" + entry.getKey() + ")", entry.getValue());
return result;
}
/**
* Parses the whole file and extracts instructions.
*/
private void parse() {
try {
// We assume these files are not that big.
String fileContent = Utils.readFileContent( this.context.getCommandFile());
parse( fileContent );
} catch( IOException e ) {
this.logger.severe( "A commands file could not be read. File path: " + this.context.getName());
}
}
/**
* Parses the whole file and extracts instructions.
* @param instructionsText a non-null string to parse
*/
private void parse( String instructionsText ) {
// Allow line breaks in commands. But we must keep the lines count.
// So, we replace escaped line breaks by a particular separator.
// They will be used to count lines.
final String sep = "!@!";
instructionsText = instructionsText.replaceAll( "\\\\\n\\s*", sep );
// Parse line by line.
int lineNumber = 0;
for( String string : Utils.splitNicely( instructionsText, "\n" ) ) {
String line = string.trim();
lineNumber ++;
// Remove comments
line = line.replaceFirst( "#.*", "" );
// Skip empty lines
if( line.isEmpty())
continue;
// Update lines count
int lineLength = line.length();
line = line.replace( sep, "" );
int lineCountOffset = (lineLength - line.length()) / sep.length();
// Verify disabled variables.
boolean disabled = false;
for( String disabledVariableName : this.context.disabledVariables ) {
if( line.contains( "$(" + disabledVariableName + ")" )) {
disabled = true;
break;
}
}
// Update the line with variables
line = injectContextVariables( line, this.context.variables );
// Find the instruction
AbstractCommandInstruction instr = parse( line, lineNumber );
if( instr != null ) {
instr.setDisabled( disabled );
// Being disabled should not make the validation fail.
// Query variables that were not resolved were "mocked" for the validation.
List<ParsingError> errors = instr.validate();
if( errors.isEmpty())
instr.updateContext();
else
this.parsingErrors.addAll( errors );
this.instructions.add( instr );
} else {
this.logger.severe( "An invalid instruction was found in " + this.context.getName() + ": " + line );
this.parsingErrors.add( new ParsingError(
ErrorCode.CMD_UNRECOGNIZED_INSTRUCTION,
this.context.getCommandFile(),
lineNumber,
instruction( line )));
}
// Update the line number
lineNumber += lineCountOffset;
}
}
/**
* Parses a single line and extracts an instructions when possible.
* @param line a text line
* @param lineNumber the line number
* @return an instruction, or null if none could be recognized
*/
private AbstractCommandInstruction parse( String line, int lineNumber ) {
AbstractCommandInstruction result = null;
String toLowerCase = line.toLowerCase();
if( toLowerCase.startsWith( AssociateTargetCommandInstruction.PREFIX ))
result = new AssociateTargetCommandInstruction( this.context, line, lineNumber );
else if( toLowerCase.startsWith( ChangeStateCommandInstruction.PREFIX ))
result = new ChangeStateCommandInstruction( this.context, line, lineNumber );
else if( BulkCommandInstructions.isBulkInstruction( toLowerCase ))
result = new BulkCommandInstructions( this.context, line, lineNumber );
else if( toLowerCase.startsWith( EmailCommandInstruction.PREFIX ))
result = new EmailCommandInstruction( this.context, line, lineNumber );
else if( toLowerCase.startsWith( DefineVariableCommandInstruction.PREFIX ))
result = new DefineVariableCommandInstruction( this.context, line, lineNumber );
else if( toLowerCase.startsWith( CreateInstanceCommandInstruction.PREFIX ))
result = new CreateInstanceCommandInstruction( this.context, line, lineNumber );
else if( toLowerCase.startsWith( ReplicateCommandInstruction.PREFIX ))
result = new ReplicateCommandInstruction( this.context, line, lineNumber );
else if( toLowerCase.startsWith( RenameCommandInstruction.PREFIX ))
result = new RenameCommandInstruction( this.context, line, lineNumber );
else if( toLowerCase.startsWith( WriteCommandInstruction.WRITE_PREFIX ))
result = new WriteCommandInstruction( this.context, line, lineNumber );
else if( toLowerCase.startsWith( AppendCommandInstruction.APPEND_PREFIX ))
result = new AppendCommandInstruction( this.context, line, lineNumber );
else if( toLowerCase.startsWith( ExecuteCommandInstruction.PREFIX ))
result = new ExecuteCommandInstruction( this.context, line, lineNumber );
return result;
}
}
| gibello/roboconf | core/roboconf-core/src/main/java/net/roboconf/core/commands/CommandsParser.java | Java | apache-2.0 | 8,460 |
// ------------------------------------------------------------------------------------------
// Copyright 2015 Sitecore Corporation A/S
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
// -------------------------------------------------------------------------------------------
jQuery(function ($) {
/* Remove this code after we would have decision about Credit Card payment methods */
$("input[id*='btnConfirm']").live("click", function (event) {
var paymentCode = $("select[id*='ddlPaymentMethods'] option:selected").val();
if (paymentCode == "nonSelected" || paymentCode == "Visa" ||
paymentCode == "MasterCard" || paymentCode == "AmericanExpress") {
event.preventDefault();
return false;
}
else {
return true;
}
});
var checkoutTitle;
$("input[name*='termsOfConditions']").click(function () {
var checked = $(this).attr("checked");
var elem = $(".proceedToCheckout");
if (checked) {
elem.removeAttr('disabled');
elem.parent().attr("class", "btnContainer");
checkoutTitle = $(".proceedToCheckout").attr('title');
$(".proceedToCheckout").attr('title', '');
}
else {
elem.attr('disabled', 'disabled');
elem.parent().attr("class", "btnContainer disabled");
$(".proceedToCheckout").attr('title', checkoutTitle);
}
});
$(".proceedToCheckout").live("click", function (event) {
if ($("input[name*='termsOfConditions']").attr("checked")) {
return true;
}
else {
event.preventDefault();
}
});
$("#btnCreateAccount").live("click", function () {
var container = $(this).parent();
$.ajax({
type: "POST",
url: "/layouts/ecommerce/Examples/ajax.asmx/CreateCustomerAccount",
data: "{}",
contentType: "application/json; charset=utf-8",
dataType: "json",
success: function (msg) {
LoadSublayout("Ecommerce/Examples/Check Out Process/CreateAccount", null, function (msg) {
container.animate({ opacity: 0.3 }, 200).replaceWith(msg.d);
container.css({ opacity: 0.3 }).animate({ opacity: 1 }, 600);
});
LoadSublayout("Ecommerce/Examples/Check Out Process/LoginPanel", null, function (msg) {
$("#ph_login").animate({ opacity: 0.3 }, 200).replaceWith(msg.d);
$("#ph_login").css({ opacity: 0.3 }).animate({ opacity: 1 }, 600);
});
}
});
});
$("input[name*='152BA8ACD8704019A7D01281B549761B']").blur(function () {
$("input[name$='19848439AFDB4649BCD2ACAF7085B322']").val($(this).val());
});
// Create Account Page
$("input[name*='C30A6C21E4EA44FE9B40CD70D290293B']").blur(function () {
$("input[name$='D121B8508B6A47349B0DBA2099FF2D98']").val($(this).val());
});
$(document).ready(function () {
$("input[name*='19848439AFDB4649BCD2ACAF7085B322']").val($("input[name*='152BA8ACD8704019A7D01281B549761B']").val());
$("input[name$='D121B8508B6A47349B0DBA2099FF2D98']").val($("input[name*='C30A6C21E4EA44FE9B40CD70D290293B']").val());
});
$("a[id*='btnLogOut']").live("click", function () {
$.ajax({
type: "POST",
url: "/layouts/ecommerce/Examples/ajax.asmx/LogOutCurrentUser",
data: "{}",
contentType: "application/json; charset=utf-8",
dataType: "json",
success: function (msg) {
window.location.href = window.location.href;
return false;
}
});
});
$("a[id*='btnLogIn']").live("click", function () {
$.ajax({
type: "POST",
url: "/layouts/ecommerce/Examples/ajax.asmx/LoginUser",
data: "{}",
contentType: "application/json; charset=utf-8",
dataType: "json",
success: function (msg) {
return true;
}
});
});
$("a[id*='changeDestination']").click(function () {
var maxWidth = 350;
var maxHeight = 300;
var w = ($(window).width() < maxWidth) ? $(window).width() * 0.7 : maxWidth;
var h = ($(window).height() < maxHeight) ? $(window).height() * 0.7 : maxHeight;
var lPos = (($(document).width() - w) / 2); // +$(document).scrollLeft();
var tPos = (($(window).height() - h) / 2) + $(document).scrollTop();
$("#ShippingAdressForm-overlay").css({ 'position': 'absolute', 'top': '0px', 'left': '0px', 'z-index': '0', 'background': '#000' });
$("#ShippingAdressForm-overlay").css({ width: $(document).width() + 'px', height: $(document).height() + 'px', opacity: 0.7 }).show(); //.fadeIn();
$("#ShippingAdressForm").css({ position: 'absolute', top: '200px', left: '200px', display: 'none', width: '600px', 'z-index': '1', 'background-color': 'Transparent', overflow: 'auto' });
$("#ShippingAdressForm").css({ 'left': lPos + 'px', 'width': w + 'px', 'top': tPos + 'px', 'height': h + 'px', 'background-color': 'Transparent' }).fadeIn();
$("select[id*='ddlShippingCountries']").change(function () {
var countryCode = $(this).val();
$.ajax({
type: "POST",
url: "/layouts/ecommerce/Examples/ajax.asmx/GetCountryStates",
data: "{countryCode:'" + countryCode + "'}",
contentType: "application/json; charset=utf-8",
dataType: "json",
success: function (msg) {
$("select[id*='ddlShippingStates']").html(msg.d);
}
});
});
$("input[id*='confirmShippingForm']").click(function () {
var countryCode = $("select[id*='ddlShippingCountries']").val();
var state = $("select[id*='ddlShippingStates']").val();
if (countryCode == 'NotSelected') {
alert("Please select country!");
return false;
}
if (state == null) {
state = "";
}
$.ajax({
type: "POST",
url: "/layouts/ecommerce/Examples/ajax.asmx/SaveShippingAddress",
data: "{countryCode:'" + countryCode + "',state:'" + state + "'}",
contentType: "application/json; charset=utf-8",
dataType: "json",
success: function (msg) {
window.location.href = window.location.href;
}
});
});
$("input[id*='cancelShippingForm']").click(function () {
$("#ShippingAdressForm-overlay").fadeOut();
$("#ShippingAdressForm").fadeOut();
});
return false;
});
});
| Sitecore/SES-7.5 | code/Examples/Sitecore.Ecommerce.WebSite/layouts/Ecommerce/Examples/Check Out Process/jscripts/CheckOut.js | JavaScript | apache-2.0 | 6,911 |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using Desktop.CSharp.Analyzers;
using Desktop.VisualBasic.Analyzers;
using Microsoft.CodeAnalysis.CodeFixes;
using Microsoft.CodeAnalysis.Diagnostics;
using Test.Utilities;
namespace Desktop.Analyzers.UnitTests
{
public class DoNotMarkServicedComponentsWithWebMethodFixerTests : CodeFixTestBase
{
protected override DiagnosticAnalyzer GetBasicDiagnosticAnalyzer()
{
return new BasicDoNotMarkServicedComponentsWithWebMethodAnalyzer();
}
protected override DiagnosticAnalyzer GetCSharpDiagnosticAnalyzer()
{
return new CSharpDoNotMarkServicedComponentsWithWebMethodAnalyzer();
}
protected override CodeFixProvider GetBasicCodeFixProvider()
{
return new BasicDoNotMarkServicedComponentsWithWebMethodFixer();
}
protected override CodeFixProvider GetCSharpCodeFixProvider()
{
return new CSharpDoNotMarkServicedComponentsWithWebMethodFixer();
}
}
} | genlu/roslyn-analyzers | src/Desktop.Analyzers/UnitTests/DoNotMarkServicedComponentsWithWebMethodTests.Fixer.cs | C# | apache-2.0 | 1,164 |
<!DOCTYPE html>
<html>
<head>
<title>Test 37b</title>
</head>
<body style="background:#f00;">
<h1>Fail</h1>
<p>If you can see this, the test 37b has failed. To pass, the user agent must fail to validate the widget.</p>
</body>
</html>
| krishnabangalore/Webinos-Platform | webinos/web_root/tests/widget_dig_sig_tests/test-cases/ta-37/37b/index.html | HTML | apache-2.0 | 237 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from datetime import datetime
import argparse
import kudu
from kudu.client import Partitioning
# Parse arguments
parser = argparse.ArgumentParser(description='Basic Example for Kudu Python.')
parser.add_argument('--masters', '-m', nargs='+', default='localhost',
help='The master address(es) to connect to Kudu.')
parser.add_argument('--ports', '-p', nargs='+', default='7051',
help='The master server port(s) to connect to Kudu.')
args = parser.parse_args()
# Connect to Kudu master server(s).
client = kudu.connect(host=args.masters, port=args.ports)
# Define a schema for a new table.
builder = kudu.schema_builder()
builder.add_column('key').type(kudu.int64).nullable(False).primary_key()
builder.add_column('ts_val', type_=kudu.unixtime_micros, nullable=False, compression='lz4')
schema = builder.build()
# Define the partitioning schema.
partitioning = Partitioning().add_hash_partitions(column_names=['key'], num_buckets=3)
# Delete table if it already exists.
if client.table_exists('python-example'):
client.delete_table('python-example')
# Create a new table.
client.create_table('python-example', schema, partitioning)
# Open a table.
table = client.table('python-example')
# Create a new session so that we can apply write operations.
session = client.new_session()
# Insert a row.
op = table.new_insert({'key': 1, 'ts_val': datetime.utcnow()})
session.apply(op)
# Upsert a row.
op = table.new_upsert({'key': 2, 'ts_val': "2016-01-01T00:00:00.000000"})
session.apply(op)
# Update a row.
op = table.new_update({'key': 1, 'ts_val': ("2017-01-01", "%Y-%m-%d")})
session.apply(op)
# Delete a row.
op = table.new_delete({'key': 2})
session.apply(op)
# Flush write operations, if failures occur, print them.
try:
session.flush()
except kudu.KuduBadStatus:
print(session.get_pending_errors())
# Create a scanner and add a predicate.
scanner = table.scanner()
scanner.add_predicate(table['ts_val'] == datetime(2017, 1, 1))
# Open scanner and print all tuples.
# Note: This doesn't scale for large scans
# The expected output: [(1, datetime.datetime(2017, 1, 1, 0, 0, tzinfo=<UTC>))]
print(scanner.open().read_all_tuples())
| helifu/kudu | examples/python/basic-python-example/basic_example.py | Python | apache-2.0 | 2,977 |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/email/model/CreateReceiptRuleSetRequest.h>
#include <aws/core/utils/StringUtils.h>
#include <aws/core/utils/memory/stl/AWSStringStream.h>
using namespace Aws::SES::Model;
using namespace Aws::Utils;
CreateReceiptRuleSetRequest::CreateReceiptRuleSetRequest() :
m_ruleSetNameHasBeenSet(false)
{
}
Aws::String CreateReceiptRuleSetRequest::SerializePayload() const
{
Aws::StringStream ss;
ss << "Action=CreateReceiptRuleSet&";
if(m_ruleSetNameHasBeenSet)
{
ss << "RuleSetName=" << StringUtils::URLEncode(m_ruleSetName.c_str()) << "&";
}
ss << "Version=2010-12-01";
return ss.str();
}
void CreateReceiptRuleSetRequest::DumpBodyToUrl(Aws::Http::URI& uri ) const
{
uri.SetQueryString(SerializePayload());
}
| jt70471/aws-sdk-cpp | aws-cpp-sdk-email/source/model/CreateReceiptRuleSetRequest.cpp | C++ | apache-2.0 | 868 |
class CreateGadgetsOauthClients < ActiveRecord::Migration
def self.up
create_table :gadgets_oauth_clients do |t|
t.string :gadget_url
t.string :client_id
t.string :client_secret
t.string :service_name
t.string :redirect_uri
t.timestamps
end
end
def self.down
drop_table :gadgets_oauth_clients
end
end
| varshavaradarajan/functional-tests | gadget_renderer/vendor/plugins/tw_studios_gadgets/db/migrate/20100714215915_create_gadgets_oauth_clients.rb | Ruby | apache-2.0 | 359 |
/*
* Copyright 2020 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.client.endpoint;
import com.linecorp.armeria.client.Endpoint;
import com.linecorp.armeria.common.Flags;
import com.linecorp.armeria.common.annotation.Nullable;
/**
* An {@link EndpointGroupException} raised when the resolution of an {@link EndpointGroup} fails
* because there are no {@link Endpoint}s in the {@link EndpointGroup}.
*/
public final class EmptyEndpointGroupException extends EndpointGroupException {
private static final long serialVersionUID = 7595286618131200852L;
static final EmptyEndpointGroupException INSTANCE = new EmptyEndpointGroupException(false);
/**
* Returns an {@link EmptyEndpointGroupException} which may be a singleton or a new instance, depending on
* {@link Flags#verboseExceptionSampler()}'s decision. If {@code endpointGroup} is non-null, a new
* instance is always returned.
*/
public static EmptyEndpointGroupException get(@Nullable EndpointGroup endpointGroup) {
if (endpointGroup != null) {
return new EmptyEndpointGroupException(endpointGroup);
}
return get();
}
/**
* Returns an {@link EmptyEndpointGroupException} which may be a singleton or a new instance, depending on
* {@link Flags#verboseExceptionSampler()}'s decision.
*/
public static EmptyEndpointGroupException get() {
return Flags.verboseExceptionSampler().isSampled(EmptyEndpointGroupException.class) ?
new EmptyEndpointGroupException() : INSTANCE;
}
private EmptyEndpointGroupException() {}
private EmptyEndpointGroupException(EndpointGroup endpointGroup) {
super("Unable to select endpoints from: " + endpointGroup);
}
private EmptyEndpointGroupException(@SuppressWarnings("unused") boolean dummy) {
super(null, null, false, false);
}
}
| line/armeria | core/src/main/java/com/linecorp/armeria/client/endpoint/EmptyEndpointGroupException.java | Java | apache-2.0 | 2,481 |
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include <unistd.h>
#include "paddle/legacy/utils/Logging.h"
#include "paddle/legacy/utils/Flags.h"
#include "SparseParameterDistribution.h"
DEFINE_bool(check_sparse_distribution_in_pserver,
false,
"check whether sparse parameter exhibts balanced distribution at "
"all pservers");
DEFINE_bool(show_check_sparse_distribution_log,
false,
"show logs details for sparse parameter distribution in pserver");
DEFINE_int32(check_sparse_distribution_batches,
100,
"run sparse parameter distribution check for N batches");
DEFINE_double(
check_sparse_distribution_ratio,
0.6,
"if parameters dispatched to different pservers exhibit unbalanced "
" distribution for check_sparse_distribution_ratio * "
" check_sparse_distribution_batches times, crash program");
DEFINE_double(check_sparse_distribution_unbalance_degree,
2.0,
"the ratio of maximum data size and minimun data size for "
"different pserver");
namespace paddle {
SparseParameterDistribution::SparseParameterDistribution(size_t serviceNum) {
totBytes_ = 0;
data_.resize(serviceNum);
batchPassed_ = 0;
unbalanceCnt_ = 0;
}
void SparseParameterDistribution::probeDistribution(int serverId,
size_t dataSize) {
if (!FLAGS_check_sparse_distribution_in_pserver ||
batchPassed_ > FLAGS_check_sparse_distribution_batches) {
return;
}
CHECK_LT((size_t)serverId, data_.size())
<< "invalid sparse parameter distribution probe";
data_[serverId] += dataSize;
totBytes_ += dataSize;
}
void SparseParameterDistribution::checkAndResetDistribution() {
if (!FLAGS_check_sparse_distribution_in_pserver ||
batchPassed_ >= FLAGS_check_sparse_distribution_batches) {
return;
}
/// at runtime, prepareSendData is called by many contexts,
/// so need to check if data is avaiable.
if (!totBytes_) {
return;
}
/// check if distribution is balanced
auto avgSize = totBytes_ / data_.size();
auto unbalanceDegree = FLAGS_check_sparse_distribution_unbalance_degree;
for (auto& dataSize : data_) {
if (dataSize > unbalanceDegree * avgSize ||
dataSize * unbalanceDegree < avgSize) {
unbalanceCnt_++;
break;
}
}
auto printData = [&]() {
std::stringstream ss;
for (auto& dataSize : data_) {
ss << dataSize * 0.001 << "KB ";
}
ss << std::endl;
LOG(INFO) << ss.str();
};
/// show all sparse data size for different pserver
if (FLAGS_show_check_sparse_distribution_log) {
LOG(INFO) << "sparse distribution:";
printData();
}
totBytes_ = 0;
batchPassed_++;
if (batchPassed_ == FLAGS_check_sparse_distribution_batches) {
LOG(INFO) << "show last parameter distribution sample:";
printData();
LOG(INFO) << "total unbalanced batches: " << unbalanceCnt_
<< " in passed batches: " << batchPassed_;
CHECK_LE((float)unbalanceCnt_ / (float)batchPassed_,
FLAGS_check_sparse_distribution_ratio)
<< "unbalanced sparse parameter distribution for different pserver. "
<< "it could be caused by unbalanced sparse ids distribution, try "
<< "to shuffle dimensions in input samples";
}
std::fill(data_.begin(), data_.end(), 0);
}
} // namespace paddle
| QiJune/Paddle | paddle/legacy/pserver/SparseParameterDistribution.cpp | C++ | apache-2.0 | 3,981 |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
// Automatically generated file; DO NOT EDIT.
#pragma once
#include <cstdint>
#include <cstring>
#include <xsimd/xsimd.hpp>
#include "arrow/util/dispatch.h"
#include "arrow/util/ubsan.h"
namespace arrow {
namespace internal {
namespace {
using ::arrow::util::SafeLoad;
template <DispatchLevel level>
struct UnpackBits128 {
using simd_batch = xsimd::batch<uint32_t, 4>;
inline static const uint32_t* unpack0_32(const uint32_t* in, uint32_t* out) {
memset(out, 0x0, 32 * sizeof(*out));
out += 32;
return in;
}
inline static const uint32_t* unpack1_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x1;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 1-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 0, 1, 2, 3 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 1-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 4, 5, 6, 7 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 1-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 8, 9, 10, 11 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 1-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 12, 13, 14, 15 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 1-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 16, 17, 18, 19 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 1-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 20, 21, 22, 23 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 1-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 24, 25, 26, 27 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 1-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 28, 29, 30, 31 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 1;
return in;
}
inline static const uint32_t* unpack2_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x3;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 2-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 0, 2, 4, 6 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 2-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 8, 10, 12, 14 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 2-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 16, 18, 20, 22 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 2-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 24, 26, 28, 30 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 2-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 0, 2, 4, 6 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 2-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 8, 10, 12, 14 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 2-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 16, 18, 20, 22 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 2-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 24, 26, 28, 30 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 2;
return in;
}
inline static const uint32_t* unpack3_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x7;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 3-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 0, 3, 6, 9 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 3-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 12, 15, 18, 21 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 3-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 30 | SafeLoad<uint32_t>(in + 1) << 2, SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 24, 27, 0, 1 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 3-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 4, 7, 10, 13 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 3-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 16, 19, 22, 25 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 3-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) >> 31 | SafeLoad<uint32_t>(in + 2) << 1, SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) };
shifts = simd_batch{ 28, 0, 2, 5 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 3-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) };
shifts = simd_batch{ 8, 11, 14, 17 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 3-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) };
shifts = simd_batch{ 20, 23, 26, 29 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 3;
return in;
}
inline static const uint32_t* unpack4_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0xf;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 4-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 0, 4, 8, 12 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 4-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 16, 20, 24, 28 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 4-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 0, 4, 8, 12 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 4-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 16, 20, 24, 28 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 4-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) };
shifts = simd_batch{ 0, 4, 8, 12 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 4-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) };
shifts = simd_batch{ 16, 20, 24, 28 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 4-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) };
shifts = simd_batch{ 0, 4, 8, 12 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 4-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) };
shifts = simd_batch{ 16, 20, 24, 28 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 4;
return in;
}
inline static const uint32_t* unpack5_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x1f;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 5-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 0, 5, 10, 15 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 5-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 30 | SafeLoad<uint32_t>(in + 1) << 2, SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 20, 25, 0, 3 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 5-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 8, 13, 18, 23 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 5-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 1) >> 28 | SafeLoad<uint32_t>(in + 2) << 4, SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) };
shifts = simd_batch{ 0, 1, 6, 11 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 5-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) >> 31 | SafeLoad<uint32_t>(in + 3) << 1 };
shifts = simd_batch{ 16, 21, 26, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 5-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) };
shifts = simd_batch{ 4, 9, 14, 19 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 5-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) >> 29 | SafeLoad<uint32_t>(in + 4) << 3, SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4) };
shifts = simd_batch{ 24, 0, 2, 7 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 5-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4) };
shifts = simd_batch{ 12, 17, 22, 27 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 5;
return in;
}
inline static const uint32_t* unpack6_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x3f;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 6-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 0, 6, 12, 18 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 6-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 30 | SafeLoad<uint32_t>(in + 1) << 2, SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 24, 0, 4, 10 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 6-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) >> 28 | SafeLoad<uint32_t>(in + 2) << 4, SafeLoad<uint32_t>(in + 2) };
shifts = simd_batch{ 16, 22, 0, 2 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 6-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) };
shifts = simd_batch{ 8, 14, 20, 26 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 6-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) };
shifts = simd_batch{ 0, 6, 12, 18 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 6-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) >> 30 | SafeLoad<uint32_t>(in + 4) << 2, SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4) };
shifts = simd_batch{ 24, 0, 4, 10 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 6-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4) >> 28 | SafeLoad<uint32_t>(in + 5) << 4, SafeLoad<uint32_t>(in + 5) };
shifts = simd_batch{ 16, 22, 0, 2 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 6-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5) };
shifts = simd_batch{ 8, 14, 20, 26 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 6;
return in;
}
inline static const uint32_t* unpack7_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x7f;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 7-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 0, 7, 14, 21 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 7-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 0) >> 28 | SafeLoad<uint32_t>(in + 1) << 4, SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 0, 3, 10, 17 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 7-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) >> 31 | SafeLoad<uint32_t>(in + 2) << 1, SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) };
shifts = simd_batch{ 24, 0, 6, 13 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 7-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) >> 27 | SafeLoad<uint32_t>(in + 3) << 5, SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) };
shifts = simd_batch{ 20, 0, 2, 9 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 7-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) >> 30 | SafeLoad<uint32_t>(in + 4) << 2, SafeLoad<uint32_t>(in + 4) };
shifts = simd_batch{ 16, 23, 0, 5 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 7-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4) >> 26 | SafeLoad<uint32_t>(in + 5) << 6, SafeLoad<uint32_t>(in + 5) };
shifts = simd_batch{ 12, 19, 0, 1 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 7-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5) >> 29 | SafeLoad<uint32_t>(in + 6) << 3 };
shifts = simd_batch{ 8, 15, 22, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 7-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6) };
shifts = simd_batch{ 4, 11, 18, 25 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 7;
return in;
}
inline static const uint32_t* unpack8_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0xff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 8-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) };
shifts = simd_batch{ 0, 8, 16, 24 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 8-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 0, 8, 16, 24 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 8-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) };
shifts = simd_batch{ 0, 8, 16, 24 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 8-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) };
shifts = simd_batch{ 0, 8, 16, 24 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 8-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4) };
shifts = simd_batch{ 0, 8, 16, 24 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 8-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5) };
shifts = simd_batch{ 0, 8, 16, 24 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 8-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6) };
shifts = simd_batch{ 0, 8, 16, 24 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 8-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7) };
shifts = simd_batch{ 0, 8, 16, 24 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 8;
return in;
}
inline static const uint32_t* unpack9_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x1ff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 9-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 27 | SafeLoad<uint32_t>(in + 1) << 5 };
shifts = simd_batch{ 0, 9, 18, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 9-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) >> 31 | SafeLoad<uint32_t>(in + 2) << 1 };
shifts = simd_batch{ 4, 13, 22, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 9-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) >> 26 | SafeLoad<uint32_t>(in + 3) << 6, SafeLoad<uint32_t>(in + 3) };
shifts = simd_batch{ 8, 17, 0, 3 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 9-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) >> 30 | SafeLoad<uint32_t>(in + 4) << 2, SafeLoad<uint32_t>(in + 4) };
shifts = simd_batch{ 12, 21, 0, 7 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 9-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4) >> 25 | SafeLoad<uint32_t>(in + 5) << 7, SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5) };
shifts = simd_batch{ 16, 0, 2, 11 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 9-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5) >> 29 | SafeLoad<uint32_t>(in + 6) << 3, SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6) };
shifts = simd_batch{ 20, 0, 6, 15 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 9-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 6) >> 24 | SafeLoad<uint32_t>(in + 7) << 8, SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7) };
shifts = simd_batch{ 0, 1, 10, 19 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 9-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 7) >> 28 | SafeLoad<uint32_t>(in + 8) << 4, SafeLoad<uint32_t>(in + 8), SafeLoad<uint32_t>(in + 8), SafeLoad<uint32_t>(in + 8) };
shifts = simd_batch{ 0, 5, 14, 23 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 9;
return in;
}
inline static const uint32_t* unpack10_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x3ff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 10-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 30 | SafeLoad<uint32_t>(in + 1) << 2 };
shifts = simd_batch{ 0, 10, 20, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 10-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) >> 28 | SafeLoad<uint32_t>(in + 2) << 4, SafeLoad<uint32_t>(in + 2) };
shifts = simd_batch{ 8, 18, 0, 6 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 10-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) >> 26 | SafeLoad<uint32_t>(in + 3) << 6, SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) };
shifts = simd_batch{ 16, 0, 4, 14 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 10-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 3) >> 24 | SafeLoad<uint32_t>(in + 4) << 8, SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4) };
shifts = simd_batch{ 0, 2, 12, 22 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 10-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5) >> 30 | SafeLoad<uint32_t>(in + 6) << 2 };
shifts = simd_batch{ 0, 10, 20, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 10-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6) >> 28 | SafeLoad<uint32_t>(in + 7) << 4, SafeLoad<uint32_t>(in + 7) };
shifts = simd_batch{ 8, 18, 0, 6 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 10-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7) >> 26 | SafeLoad<uint32_t>(in + 8) << 6, SafeLoad<uint32_t>(in + 8), SafeLoad<uint32_t>(in + 8) };
shifts = simd_batch{ 16, 0, 4, 14 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 10-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 8) >> 24 | SafeLoad<uint32_t>(in + 9) << 8, SafeLoad<uint32_t>(in + 9), SafeLoad<uint32_t>(in + 9), SafeLoad<uint32_t>(in + 9) };
shifts = simd_batch{ 0, 2, 12, 22 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 10;
return in;
}
inline static const uint32_t* unpack11_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x7ff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 11-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 22 | SafeLoad<uint32_t>(in + 1) << 10, SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 0, 11, 0, 1 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 11-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) >> 23 | SafeLoad<uint32_t>(in + 2) << 9, SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) };
shifts = simd_batch{ 12, 0, 2, 13 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 11-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 2) >> 24 | SafeLoad<uint32_t>(in + 3) << 8, SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) >> 25 | SafeLoad<uint32_t>(in + 4) << 7 };
shifts = simd_batch{ 0, 3, 14, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 11-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4) >> 26 | SafeLoad<uint32_t>(in + 5) << 6, SafeLoad<uint32_t>(in + 5) };
shifts = simd_batch{ 4, 15, 0, 5 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 11-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5) >> 27 | SafeLoad<uint32_t>(in + 6) << 5, SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6) };
shifts = simd_batch{ 16, 0, 6, 17 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 11-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 6) >> 28 | SafeLoad<uint32_t>(in + 7) << 4, SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7) >> 29 | SafeLoad<uint32_t>(in + 8) << 3 };
shifts = simd_batch{ 0, 7, 18, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 11-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 8), SafeLoad<uint32_t>(in + 8), SafeLoad<uint32_t>(in + 8) >> 30 | SafeLoad<uint32_t>(in + 9) << 2, SafeLoad<uint32_t>(in + 9) };
shifts = simd_batch{ 8, 19, 0, 9 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 11-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 9), SafeLoad<uint32_t>(in + 9) >> 31 | SafeLoad<uint32_t>(in + 10) << 1, SafeLoad<uint32_t>(in + 10), SafeLoad<uint32_t>(in + 10) };
shifts = simd_batch{ 20, 0, 10, 21 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 11;
return in;
}
inline static const uint32_t* unpack12_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0xfff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 12-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 24 | SafeLoad<uint32_t>(in + 1) << 8, SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 0, 12, 0, 4 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 12-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) >> 28 | SafeLoad<uint32_t>(in + 2) << 4, SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) };
shifts = simd_batch{ 16, 0, 8, 20 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 12-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) >> 24 | SafeLoad<uint32_t>(in + 4) << 8, SafeLoad<uint32_t>(in + 4) };
shifts = simd_batch{ 0, 12, 0, 4 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 12-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4) >> 28 | SafeLoad<uint32_t>(in + 5) << 4, SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5) };
shifts = simd_batch{ 16, 0, 8, 20 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 12-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6) >> 24 | SafeLoad<uint32_t>(in + 7) << 8, SafeLoad<uint32_t>(in + 7) };
shifts = simd_batch{ 0, 12, 0, 4 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 12-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7) >> 28 | SafeLoad<uint32_t>(in + 8) << 4, SafeLoad<uint32_t>(in + 8), SafeLoad<uint32_t>(in + 8) };
shifts = simd_batch{ 16, 0, 8, 20 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 12-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 9), SafeLoad<uint32_t>(in + 9), SafeLoad<uint32_t>(in + 9) >> 24 | SafeLoad<uint32_t>(in + 10) << 8, SafeLoad<uint32_t>(in + 10) };
shifts = simd_batch{ 0, 12, 0, 4 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 12-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 10), SafeLoad<uint32_t>(in + 10) >> 28 | SafeLoad<uint32_t>(in + 11) << 4, SafeLoad<uint32_t>(in + 11), SafeLoad<uint32_t>(in + 11) };
shifts = simd_batch{ 16, 0, 8, 20 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 12;
return in;
}
inline static const uint32_t* unpack13_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x1fff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 13-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 26 | SafeLoad<uint32_t>(in + 1) << 6, SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 0, 13, 0, 7 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 13-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 1) >> 20 | SafeLoad<uint32_t>(in + 2) << 12, SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) >> 27 | SafeLoad<uint32_t>(in + 3) << 5 };
shifts = simd_batch{ 0, 1, 14, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 13-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) >> 21 | SafeLoad<uint32_t>(in + 4) << 11, SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4) };
shifts = simd_batch{ 8, 0, 2, 15 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 13-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 4) >> 28 | SafeLoad<uint32_t>(in + 5) << 4, SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5) >> 22 | SafeLoad<uint32_t>(in + 6) << 10, SafeLoad<uint32_t>(in + 6) };
shifts = simd_batch{ 0, 9, 0, 3 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 13-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6) >> 29 | SafeLoad<uint32_t>(in + 7) << 3, SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7) >> 23 | SafeLoad<uint32_t>(in + 8) << 9 };
shifts = simd_batch{ 16, 0, 10, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 13-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 8), SafeLoad<uint32_t>(in + 8), SafeLoad<uint32_t>(in + 8) >> 30 | SafeLoad<uint32_t>(in + 9) << 2, SafeLoad<uint32_t>(in + 9) };
shifts = simd_batch{ 4, 17, 0, 11 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 13-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 9) >> 24 | SafeLoad<uint32_t>(in + 10) << 8, SafeLoad<uint32_t>(in + 10), SafeLoad<uint32_t>(in + 10), SafeLoad<uint32_t>(in + 10) >> 31 | SafeLoad<uint32_t>(in + 11) << 1 };
shifts = simd_batch{ 0, 5, 18, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 13-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 11), SafeLoad<uint32_t>(in + 11) >> 25 | SafeLoad<uint32_t>(in + 12) << 7, SafeLoad<uint32_t>(in + 12), SafeLoad<uint32_t>(in + 12) };
shifts = simd_batch{ 12, 0, 6, 19 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 13;
return in;
}
inline static const uint32_t* unpack14_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x3fff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 14-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 28 | SafeLoad<uint32_t>(in + 1) << 4, SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 0, 14, 0, 10 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 14-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 1) >> 24 | SafeLoad<uint32_t>(in + 2) << 8, SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) >> 20 | SafeLoad<uint32_t>(in + 3) << 12, SafeLoad<uint32_t>(in + 3) };
shifts = simd_batch{ 0, 6, 0, 2 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 14-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) >> 30 | SafeLoad<uint32_t>(in + 4) << 2, SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4) >> 26 | SafeLoad<uint32_t>(in + 5) << 6 };
shifts = simd_batch{ 16, 0, 12, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 14-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5) >> 22 | SafeLoad<uint32_t>(in + 6) << 10, SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6) };
shifts = simd_batch{ 8, 0, 4, 18 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 14-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7) >> 28 | SafeLoad<uint32_t>(in + 8) << 4, SafeLoad<uint32_t>(in + 8) };
shifts = simd_batch{ 0, 14, 0, 10 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 14-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 8) >> 24 | SafeLoad<uint32_t>(in + 9) << 8, SafeLoad<uint32_t>(in + 9), SafeLoad<uint32_t>(in + 9) >> 20 | SafeLoad<uint32_t>(in + 10) << 12, SafeLoad<uint32_t>(in + 10) };
shifts = simd_batch{ 0, 6, 0, 2 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 14-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 10), SafeLoad<uint32_t>(in + 10) >> 30 | SafeLoad<uint32_t>(in + 11) << 2, SafeLoad<uint32_t>(in + 11), SafeLoad<uint32_t>(in + 11) >> 26 | SafeLoad<uint32_t>(in + 12) << 6 };
shifts = simd_batch{ 16, 0, 12, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 14-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 12), SafeLoad<uint32_t>(in + 12) >> 22 | SafeLoad<uint32_t>(in + 13) << 10, SafeLoad<uint32_t>(in + 13), SafeLoad<uint32_t>(in + 13) };
shifts = simd_batch{ 8, 0, 4, 18 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 14;
return in;
}
inline static const uint32_t* unpack15_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x7fff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 15-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 30 | SafeLoad<uint32_t>(in + 1) << 2, SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 0, 15, 0, 13 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 15-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 1) >> 28 | SafeLoad<uint32_t>(in + 2) << 4, SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) >> 26 | SafeLoad<uint32_t>(in + 3) << 6, SafeLoad<uint32_t>(in + 3) };
shifts = simd_batch{ 0, 11, 0, 9 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 15-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 3) >> 24 | SafeLoad<uint32_t>(in + 4) << 8, SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4) >> 22 | SafeLoad<uint32_t>(in + 5) << 10, SafeLoad<uint32_t>(in + 5) };
shifts = simd_batch{ 0, 7, 0, 5 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 15-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 5) >> 20 | SafeLoad<uint32_t>(in + 6) << 12, SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6) >> 18 | SafeLoad<uint32_t>(in + 7) << 14, SafeLoad<uint32_t>(in + 7) };
shifts = simd_batch{ 0, 3, 0, 1 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 15-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7) >> 31 | SafeLoad<uint32_t>(in + 8) << 1, SafeLoad<uint32_t>(in + 8), SafeLoad<uint32_t>(in + 8) >> 29 | SafeLoad<uint32_t>(in + 9) << 3 };
shifts = simd_batch{ 16, 0, 14, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 15-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 9), SafeLoad<uint32_t>(in + 9) >> 27 | SafeLoad<uint32_t>(in + 10) << 5, SafeLoad<uint32_t>(in + 10), SafeLoad<uint32_t>(in + 10) >> 25 | SafeLoad<uint32_t>(in + 11) << 7 };
shifts = simd_batch{ 12, 0, 10, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 15-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 11), SafeLoad<uint32_t>(in + 11) >> 23 | SafeLoad<uint32_t>(in + 12) << 9, SafeLoad<uint32_t>(in + 12), SafeLoad<uint32_t>(in + 12) >> 21 | SafeLoad<uint32_t>(in + 13) << 11 };
shifts = simd_batch{ 8, 0, 6, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 15-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 13), SafeLoad<uint32_t>(in + 13) >> 19 | SafeLoad<uint32_t>(in + 14) << 13, SafeLoad<uint32_t>(in + 14), SafeLoad<uint32_t>(in + 14) };
shifts = simd_batch{ 4, 0, 2, 17 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 15;
return in;
}
inline static const uint32_t* unpack16_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0xffff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 16-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) };
shifts = simd_batch{ 0, 16, 0, 16 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 16-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) };
shifts = simd_batch{ 0, 16, 0, 16 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 16-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5) };
shifts = simd_batch{ 0, 16, 0, 16 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 16-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7) };
shifts = simd_batch{ 0, 16, 0, 16 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 16-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 8), SafeLoad<uint32_t>(in + 8), SafeLoad<uint32_t>(in + 9), SafeLoad<uint32_t>(in + 9) };
shifts = simd_batch{ 0, 16, 0, 16 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 16-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 10), SafeLoad<uint32_t>(in + 10), SafeLoad<uint32_t>(in + 11), SafeLoad<uint32_t>(in + 11) };
shifts = simd_batch{ 0, 16, 0, 16 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 16-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 12), SafeLoad<uint32_t>(in + 12), SafeLoad<uint32_t>(in + 13), SafeLoad<uint32_t>(in + 13) };
shifts = simd_batch{ 0, 16, 0, 16 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 16-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 14), SafeLoad<uint32_t>(in + 14), SafeLoad<uint32_t>(in + 15), SafeLoad<uint32_t>(in + 15) };
shifts = simd_batch{ 0, 16, 0, 16 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 16;
return in;
}
inline static const uint32_t* unpack17_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x1ffff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 17-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 17 | SafeLoad<uint32_t>(in + 1) << 15, SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) >> 19 | SafeLoad<uint32_t>(in + 2) << 13 };
shifts = simd_batch{ 0, 0, 2, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 17-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) >> 21 | SafeLoad<uint32_t>(in + 3) << 11, SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) >> 23 | SafeLoad<uint32_t>(in + 4) << 9 };
shifts = simd_batch{ 4, 0, 6, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 17-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4) >> 25 | SafeLoad<uint32_t>(in + 5) << 7, SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5) >> 27 | SafeLoad<uint32_t>(in + 6) << 5 };
shifts = simd_batch{ 8, 0, 10, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 17-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6) >> 29 | SafeLoad<uint32_t>(in + 7) << 3, SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7) >> 31 | SafeLoad<uint32_t>(in + 8) << 1 };
shifts = simd_batch{ 12, 0, 14, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 17-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 8) >> 16 | SafeLoad<uint32_t>(in + 9) << 16, SafeLoad<uint32_t>(in + 9), SafeLoad<uint32_t>(in + 9) >> 18 | SafeLoad<uint32_t>(in + 10) << 14, SafeLoad<uint32_t>(in + 10) };
shifts = simd_batch{ 0, 1, 0, 3 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 17-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 10) >> 20 | SafeLoad<uint32_t>(in + 11) << 12, SafeLoad<uint32_t>(in + 11), SafeLoad<uint32_t>(in + 11) >> 22 | SafeLoad<uint32_t>(in + 12) << 10, SafeLoad<uint32_t>(in + 12) };
shifts = simd_batch{ 0, 5, 0, 7 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 17-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 12) >> 24 | SafeLoad<uint32_t>(in + 13) << 8, SafeLoad<uint32_t>(in + 13), SafeLoad<uint32_t>(in + 13) >> 26 | SafeLoad<uint32_t>(in + 14) << 6, SafeLoad<uint32_t>(in + 14) };
shifts = simd_batch{ 0, 9, 0, 11 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 17-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 14) >> 28 | SafeLoad<uint32_t>(in + 15) << 4, SafeLoad<uint32_t>(in + 15), SafeLoad<uint32_t>(in + 15) >> 30 | SafeLoad<uint32_t>(in + 16) << 2, SafeLoad<uint32_t>(in + 16) };
shifts = simd_batch{ 0, 13, 0, 15 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 17;
return in;
}
inline static const uint32_t* unpack18_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x3ffff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 18-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 18 | SafeLoad<uint32_t>(in + 1) << 14, SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) >> 22 | SafeLoad<uint32_t>(in + 2) << 10 };
shifts = simd_batch{ 0, 0, 4, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 18-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) >> 26 | SafeLoad<uint32_t>(in + 3) << 6, SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) >> 30 | SafeLoad<uint32_t>(in + 4) << 2 };
shifts = simd_batch{ 8, 0, 12, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 18-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 4) >> 16 | SafeLoad<uint32_t>(in + 5) << 16, SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5) >> 20 | SafeLoad<uint32_t>(in + 6) << 12, SafeLoad<uint32_t>(in + 6) };
shifts = simd_batch{ 0, 2, 0, 6 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 18-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 6) >> 24 | SafeLoad<uint32_t>(in + 7) << 8, SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7) >> 28 | SafeLoad<uint32_t>(in + 8) << 4, SafeLoad<uint32_t>(in + 8) };
shifts = simd_batch{ 0, 10, 0, 14 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 18-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 9), SafeLoad<uint32_t>(in + 9) >> 18 | SafeLoad<uint32_t>(in + 10) << 14, SafeLoad<uint32_t>(in + 10), SafeLoad<uint32_t>(in + 10) >> 22 | SafeLoad<uint32_t>(in + 11) << 10 };
shifts = simd_batch{ 0, 0, 4, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 18-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 11), SafeLoad<uint32_t>(in + 11) >> 26 | SafeLoad<uint32_t>(in + 12) << 6, SafeLoad<uint32_t>(in + 12), SafeLoad<uint32_t>(in + 12) >> 30 | SafeLoad<uint32_t>(in + 13) << 2 };
shifts = simd_batch{ 8, 0, 12, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 18-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 13) >> 16 | SafeLoad<uint32_t>(in + 14) << 16, SafeLoad<uint32_t>(in + 14), SafeLoad<uint32_t>(in + 14) >> 20 | SafeLoad<uint32_t>(in + 15) << 12, SafeLoad<uint32_t>(in + 15) };
shifts = simd_batch{ 0, 2, 0, 6 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 18-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 15) >> 24 | SafeLoad<uint32_t>(in + 16) << 8, SafeLoad<uint32_t>(in + 16), SafeLoad<uint32_t>(in + 16) >> 28 | SafeLoad<uint32_t>(in + 17) << 4, SafeLoad<uint32_t>(in + 17) };
shifts = simd_batch{ 0, 10, 0, 14 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 18;
return in;
}
inline static const uint32_t* unpack19_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x7ffff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 19-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 19 | SafeLoad<uint32_t>(in + 1) << 13, SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) >> 25 | SafeLoad<uint32_t>(in + 2) << 7 };
shifts = simd_batch{ 0, 0, 6, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 19-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 2), SafeLoad<uint32_t>(in + 2) >> 31 | SafeLoad<uint32_t>(in + 3) << 1, SafeLoad<uint32_t>(in + 3) >> 18 | SafeLoad<uint32_t>(in + 4) << 14, SafeLoad<uint32_t>(in + 4) };
shifts = simd_batch{ 12, 0, 0, 5 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 19-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 4) >> 24 | SafeLoad<uint32_t>(in + 5) << 8, SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5) >> 30 | SafeLoad<uint32_t>(in + 6) << 2, SafeLoad<uint32_t>(in + 6) >> 17 | SafeLoad<uint32_t>(in + 7) << 15 };
shifts = simd_batch{ 0, 11, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 19-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7) >> 23 | SafeLoad<uint32_t>(in + 8) << 9, SafeLoad<uint32_t>(in + 8), SafeLoad<uint32_t>(in + 8) >> 29 | SafeLoad<uint32_t>(in + 9) << 3 };
shifts = simd_batch{ 4, 0, 10, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 19-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 9) >> 16 | SafeLoad<uint32_t>(in + 10) << 16, SafeLoad<uint32_t>(in + 10), SafeLoad<uint32_t>(in + 10) >> 22 | SafeLoad<uint32_t>(in + 11) << 10, SafeLoad<uint32_t>(in + 11) };
shifts = simd_batch{ 0, 3, 0, 9 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 19-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 11) >> 28 | SafeLoad<uint32_t>(in + 12) << 4, SafeLoad<uint32_t>(in + 12) >> 15 | SafeLoad<uint32_t>(in + 13) << 17, SafeLoad<uint32_t>(in + 13), SafeLoad<uint32_t>(in + 13) >> 21 | SafeLoad<uint32_t>(in + 14) << 11 };
shifts = simd_batch{ 0, 0, 2, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 19-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 14), SafeLoad<uint32_t>(in + 14) >> 27 | SafeLoad<uint32_t>(in + 15) << 5, SafeLoad<uint32_t>(in + 15) >> 14 | SafeLoad<uint32_t>(in + 16) << 18, SafeLoad<uint32_t>(in + 16) };
shifts = simd_batch{ 8, 0, 0, 1 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 19-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 16) >> 20 | SafeLoad<uint32_t>(in + 17) << 12, SafeLoad<uint32_t>(in + 17), SafeLoad<uint32_t>(in + 17) >> 26 | SafeLoad<uint32_t>(in + 18) << 6, SafeLoad<uint32_t>(in + 18) };
shifts = simd_batch{ 0, 7, 0, 13 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 19;
return in;
}
inline static const uint32_t* unpack20_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0xfffff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 20-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 20 | SafeLoad<uint32_t>(in + 1) << 12, SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) >> 28 | SafeLoad<uint32_t>(in + 2) << 4 };
shifts = simd_batch{ 0, 0, 8, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 20-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 2) >> 16 | SafeLoad<uint32_t>(in + 3) << 16, SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) >> 24 | SafeLoad<uint32_t>(in + 4) << 8, SafeLoad<uint32_t>(in + 4) };
shifts = simd_batch{ 0, 4, 0, 12 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 20-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5) >> 20 | SafeLoad<uint32_t>(in + 6) << 12, SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6) >> 28 | SafeLoad<uint32_t>(in + 7) << 4 };
shifts = simd_batch{ 0, 0, 8, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 20-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 7) >> 16 | SafeLoad<uint32_t>(in + 8) << 16, SafeLoad<uint32_t>(in + 8), SafeLoad<uint32_t>(in + 8) >> 24 | SafeLoad<uint32_t>(in + 9) << 8, SafeLoad<uint32_t>(in + 9) };
shifts = simd_batch{ 0, 4, 0, 12 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 20-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 10), SafeLoad<uint32_t>(in + 10) >> 20 | SafeLoad<uint32_t>(in + 11) << 12, SafeLoad<uint32_t>(in + 11), SafeLoad<uint32_t>(in + 11) >> 28 | SafeLoad<uint32_t>(in + 12) << 4 };
shifts = simd_batch{ 0, 0, 8, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 20-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 12) >> 16 | SafeLoad<uint32_t>(in + 13) << 16, SafeLoad<uint32_t>(in + 13), SafeLoad<uint32_t>(in + 13) >> 24 | SafeLoad<uint32_t>(in + 14) << 8, SafeLoad<uint32_t>(in + 14) };
shifts = simd_batch{ 0, 4, 0, 12 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 20-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 15), SafeLoad<uint32_t>(in + 15) >> 20 | SafeLoad<uint32_t>(in + 16) << 12, SafeLoad<uint32_t>(in + 16), SafeLoad<uint32_t>(in + 16) >> 28 | SafeLoad<uint32_t>(in + 17) << 4 };
shifts = simd_batch{ 0, 0, 8, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 20-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 17) >> 16 | SafeLoad<uint32_t>(in + 18) << 16, SafeLoad<uint32_t>(in + 18), SafeLoad<uint32_t>(in + 18) >> 24 | SafeLoad<uint32_t>(in + 19) << 8, SafeLoad<uint32_t>(in + 19) };
shifts = simd_batch{ 0, 4, 0, 12 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 20;
return in;
}
inline static const uint32_t* unpack21_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x1fffff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 21-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 21 | SafeLoad<uint32_t>(in + 1) << 11, SafeLoad<uint32_t>(in + 1), SafeLoad<uint32_t>(in + 1) >> 31 | SafeLoad<uint32_t>(in + 2) << 1 };
shifts = simd_batch{ 0, 0, 10, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 21-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 2) >> 20 | SafeLoad<uint32_t>(in + 3) << 12, SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) >> 30 | SafeLoad<uint32_t>(in + 4) << 2, SafeLoad<uint32_t>(in + 4) >> 19 | SafeLoad<uint32_t>(in + 5) << 13 };
shifts = simd_batch{ 0, 9, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 21-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5) >> 29 | SafeLoad<uint32_t>(in + 6) << 3, SafeLoad<uint32_t>(in + 6) >> 18 | SafeLoad<uint32_t>(in + 7) << 14, SafeLoad<uint32_t>(in + 7) };
shifts = simd_batch{ 8, 0, 0, 7 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 21-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 7) >> 28 | SafeLoad<uint32_t>(in + 8) << 4, SafeLoad<uint32_t>(in + 8) >> 17 | SafeLoad<uint32_t>(in + 9) << 15, SafeLoad<uint32_t>(in + 9), SafeLoad<uint32_t>(in + 9) >> 27 | SafeLoad<uint32_t>(in + 10) << 5 };
shifts = simd_batch{ 0, 0, 6, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 21-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 10) >> 16 | SafeLoad<uint32_t>(in + 11) << 16, SafeLoad<uint32_t>(in + 11), SafeLoad<uint32_t>(in + 11) >> 26 | SafeLoad<uint32_t>(in + 12) << 6, SafeLoad<uint32_t>(in + 12) >> 15 | SafeLoad<uint32_t>(in + 13) << 17 };
shifts = simd_batch{ 0, 5, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 21-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 13), SafeLoad<uint32_t>(in + 13) >> 25 | SafeLoad<uint32_t>(in + 14) << 7, SafeLoad<uint32_t>(in + 14) >> 14 | SafeLoad<uint32_t>(in + 15) << 18, SafeLoad<uint32_t>(in + 15) };
shifts = simd_batch{ 4, 0, 0, 3 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 21-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 15) >> 24 | SafeLoad<uint32_t>(in + 16) << 8, SafeLoad<uint32_t>(in + 16) >> 13 | SafeLoad<uint32_t>(in + 17) << 19, SafeLoad<uint32_t>(in + 17), SafeLoad<uint32_t>(in + 17) >> 23 | SafeLoad<uint32_t>(in + 18) << 9 };
shifts = simd_batch{ 0, 0, 2, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 21-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 18) >> 12 | SafeLoad<uint32_t>(in + 19) << 20, SafeLoad<uint32_t>(in + 19), SafeLoad<uint32_t>(in + 19) >> 22 | SafeLoad<uint32_t>(in + 20) << 10, SafeLoad<uint32_t>(in + 20) };
shifts = simd_batch{ 0, 1, 0, 11 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 21;
return in;
}
inline static const uint32_t* unpack22_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x3fffff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 22-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 22 | SafeLoad<uint32_t>(in + 1) << 10, SafeLoad<uint32_t>(in + 1) >> 12 | SafeLoad<uint32_t>(in + 2) << 20, SafeLoad<uint32_t>(in + 2) };
shifts = simd_batch{ 0, 0, 0, 2 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 22-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 2) >> 24 | SafeLoad<uint32_t>(in + 3) << 8, SafeLoad<uint32_t>(in + 3) >> 14 | SafeLoad<uint32_t>(in + 4) << 18, SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4) >> 26 | SafeLoad<uint32_t>(in + 5) << 6 };
shifts = simd_batch{ 0, 0, 4, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 22-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 5) >> 16 | SafeLoad<uint32_t>(in + 6) << 16, SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6) >> 28 | SafeLoad<uint32_t>(in + 7) << 4, SafeLoad<uint32_t>(in + 7) >> 18 | SafeLoad<uint32_t>(in + 8) << 14 };
shifts = simd_batch{ 0, 6, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 22-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 8), SafeLoad<uint32_t>(in + 8) >> 30 | SafeLoad<uint32_t>(in + 9) << 2, SafeLoad<uint32_t>(in + 9) >> 20 | SafeLoad<uint32_t>(in + 10) << 12, SafeLoad<uint32_t>(in + 10) };
shifts = simd_batch{ 8, 0, 0, 10 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 22-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 11), SafeLoad<uint32_t>(in + 11) >> 22 | SafeLoad<uint32_t>(in + 12) << 10, SafeLoad<uint32_t>(in + 12) >> 12 | SafeLoad<uint32_t>(in + 13) << 20, SafeLoad<uint32_t>(in + 13) };
shifts = simd_batch{ 0, 0, 0, 2 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 22-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 13) >> 24 | SafeLoad<uint32_t>(in + 14) << 8, SafeLoad<uint32_t>(in + 14) >> 14 | SafeLoad<uint32_t>(in + 15) << 18, SafeLoad<uint32_t>(in + 15), SafeLoad<uint32_t>(in + 15) >> 26 | SafeLoad<uint32_t>(in + 16) << 6 };
shifts = simd_batch{ 0, 0, 4, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 22-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 16) >> 16 | SafeLoad<uint32_t>(in + 17) << 16, SafeLoad<uint32_t>(in + 17), SafeLoad<uint32_t>(in + 17) >> 28 | SafeLoad<uint32_t>(in + 18) << 4, SafeLoad<uint32_t>(in + 18) >> 18 | SafeLoad<uint32_t>(in + 19) << 14 };
shifts = simd_batch{ 0, 6, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 22-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 19), SafeLoad<uint32_t>(in + 19) >> 30 | SafeLoad<uint32_t>(in + 20) << 2, SafeLoad<uint32_t>(in + 20) >> 20 | SafeLoad<uint32_t>(in + 21) << 12, SafeLoad<uint32_t>(in + 21) };
shifts = simd_batch{ 8, 0, 0, 10 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 22;
return in;
}
inline static const uint32_t* unpack23_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x7fffff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 23-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 23 | SafeLoad<uint32_t>(in + 1) << 9, SafeLoad<uint32_t>(in + 1) >> 14 | SafeLoad<uint32_t>(in + 2) << 18, SafeLoad<uint32_t>(in + 2) };
shifts = simd_batch{ 0, 0, 0, 5 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 23-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 2) >> 28 | SafeLoad<uint32_t>(in + 3) << 4, SafeLoad<uint32_t>(in + 3) >> 19 | SafeLoad<uint32_t>(in + 4) << 13, SafeLoad<uint32_t>(in + 4) >> 10 | SafeLoad<uint32_t>(in + 5) << 22, SafeLoad<uint32_t>(in + 5) };
shifts = simd_batch{ 0, 0, 0, 1 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 23-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 5) >> 24 | SafeLoad<uint32_t>(in + 6) << 8, SafeLoad<uint32_t>(in + 6) >> 15 | SafeLoad<uint32_t>(in + 7) << 17, SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7) >> 29 | SafeLoad<uint32_t>(in + 8) << 3 };
shifts = simd_batch{ 0, 0, 6, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 23-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 8) >> 20 | SafeLoad<uint32_t>(in + 9) << 12, SafeLoad<uint32_t>(in + 9) >> 11 | SafeLoad<uint32_t>(in + 10) << 21, SafeLoad<uint32_t>(in + 10), SafeLoad<uint32_t>(in + 10) >> 25 | SafeLoad<uint32_t>(in + 11) << 7 };
shifts = simd_batch{ 0, 0, 2, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 23-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 11) >> 16 | SafeLoad<uint32_t>(in + 12) << 16, SafeLoad<uint32_t>(in + 12), SafeLoad<uint32_t>(in + 12) >> 30 | SafeLoad<uint32_t>(in + 13) << 2, SafeLoad<uint32_t>(in + 13) >> 21 | SafeLoad<uint32_t>(in + 14) << 11 };
shifts = simd_batch{ 0, 7, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 23-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 14) >> 12 | SafeLoad<uint32_t>(in + 15) << 20, SafeLoad<uint32_t>(in + 15), SafeLoad<uint32_t>(in + 15) >> 26 | SafeLoad<uint32_t>(in + 16) << 6, SafeLoad<uint32_t>(in + 16) >> 17 | SafeLoad<uint32_t>(in + 17) << 15 };
shifts = simd_batch{ 0, 3, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 23-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 17), SafeLoad<uint32_t>(in + 17) >> 31 | SafeLoad<uint32_t>(in + 18) << 1, SafeLoad<uint32_t>(in + 18) >> 22 | SafeLoad<uint32_t>(in + 19) << 10, SafeLoad<uint32_t>(in + 19) >> 13 | SafeLoad<uint32_t>(in + 20) << 19 };
shifts = simd_batch{ 8, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 23-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 20), SafeLoad<uint32_t>(in + 20) >> 27 | SafeLoad<uint32_t>(in + 21) << 5, SafeLoad<uint32_t>(in + 21) >> 18 | SafeLoad<uint32_t>(in + 22) << 14, SafeLoad<uint32_t>(in + 22) };
shifts = simd_batch{ 4, 0, 0, 9 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 23;
return in;
}
inline static const uint32_t* unpack24_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0xffffff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 24-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 24 | SafeLoad<uint32_t>(in + 1) << 8, SafeLoad<uint32_t>(in + 1) >> 16 | SafeLoad<uint32_t>(in + 2) << 16, SafeLoad<uint32_t>(in + 2) };
shifts = simd_batch{ 0, 0, 0, 8 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 24-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) >> 24 | SafeLoad<uint32_t>(in + 4) << 8, SafeLoad<uint32_t>(in + 4) >> 16 | SafeLoad<uint32_t>(in + 5) << 16, SafeLoad<uint32_t>(in + 5) };
shifts = simd_batch{ 0, 0, 0, 8 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 24-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 6), SafeLoad<uint32_t>(in + 6) >> 24 | SafeLoad<uint32_t>(in + 7) << 8, SafeLoad<uint32_t>(in + 7) >> 16 | SafeLoad<uint32_t>(in + 8) << 16, SafeLoad<uint32_t>(in + 8) };
shifts = simd_batch{ 0, 0, 0, 8 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 24-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 9), SafeLoad<uint32_t>(in + 9) >> 24 | SafeLoad<uint32_t>(in + 10) << 8, SafeLoad<uint32_t>(in + 10) >> 16 | SafeLoad<uint32_t>(in + 11) << 16, SafeLoad<uint32_t>(in + 11) };
shifts = simd_batch{ 0, 0, 0, 8 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 24-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 12), SafeLoad<uint32_t>(in + 12) >> 24 | SafeLoad<uint32_t>(in + 13) << 8, SafeLoad<uint32_t>(in + 13) >> 16 | SafeLoad<uint32_t>(in + 14) << 16, SafeLoad<uint32_t>(in + 14) };
shifts = simd_batch{ 0, 0, 0, 8 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 24-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 15), SafeLoad<uint32_t>(in + 15) >> 24 | SafeLoad<uint32_t>(in + 16) << 8, SafeLoad<uint32_t>(in + 16) >> 16 | SafeLoad<uint32_t>(in + 17) << 16, SafeLoad<uint32_t>(in + 17) };
shifts = simd_batch{ 0, 0, 0, 8 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 24-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 18), SafeLoad<uint32_t>(in + 18) >> 24 | SafeLoad<uint32_t>(in + 19) << 8, SafeLoad<uint32_t>(in + 19) >> 16 | SafeLoad<uint32_t>(in + 20) << 16, SafeLoad<uint32_t>(in + 20) };
shifts = simd_batch{ 0, 0, 0, 8 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 24-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 21), SafeLoad<uint32_t>(in + 21) >> 24 | SafeLoad<uint32_t>(in + 22) << 8, SafeLoad<uint32_t>(in + 22) >> 16 | SafeLoad<uint32_t>(in + 23) << 16, SafeLoad<uint32_t>(in + 23) };
shifts = simd_batch{ 0, 0, 0, 8 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 24;
return in;
}
inline static const uint32_t* unpack25_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x1ffffff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 25-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 25 | SafeLoad<uint32_t>(in + 1) << 7, SafeLoad<uint32_t>(in + 1) >> 18 | SafeLoad<uint32_t>(in + 2) << 14, SafeLoad<uint32_t>(in + 2) >> 11 | SafeLoad<uint32_t>(in + 3) << 21 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 25-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 3), SafeLoad<uint32_t>(in + 3) >> 29 | SafeLoad<uint32_t>(in + 4) << 3, SafeLoad<uint32_t>(in + 4) >> 22 | SafeLoad<uint32_t>(in + 5) << 10, SafeLoad<uint32_t>(in + 5) >> 15 | SafeLoad<uint32_t>(in + 6) << 17 };
shifts = simd_batch{ 4, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 25-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 6) >> 8 | SafeLoad<uint32_t>(in + 7) << 24, SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7) >> 26 | SafeLoad<uint32_t>(in + 8) << 6, SafeLoad<uint32_t>(in + 8) >> 19 | SafeLoad<uint32_t>(in + 9) << 13 };
shifts = simd_batch{ 0, 1, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 25-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 9) >> 12 | SafeLoad<uint32_t>(in + 10) << 20, SafeLoad<uint32_t>(in + 10), SafeLoad<uint32_t>(in + 10) >> 30 | SafeLoad<uint32_t>(in + 11) << 2, SafeLoad<uint32_t>(in + 11) >> 23 | SafeLoad<uint32_t>(in + 12) << 9 };
shifts = simd_batch{ 0, 5, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 25-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 12) >> 16 | SafeLoad<uint32_t>(in + 13) << 16, SafeLoad<uint32_t>(in + 13) >> 9 | SafeLoad<uint32_t>(in + 14) << 23, SafeLoad<uint32_t>(in + 14), SafeLoad<uint32_t>(in + 14) >> 27 | SafeLoad<uint32_t>(in + 15) << 5 };
shifts = simd_batch{ 0, 0, 2, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 25-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 15) >> 20 | SafeLoad<uint32_t>(in + 16) << 12, SafeLoad<uint32_t>(in + 16) >> 13 | SafeLoad<uint32_t>(in + 17) << 19, SafeLoad<uint32_t>(in + 17), SafeLoad<uint32_t>(in + 17) >> 31 | SafeLoad<uint32_t>(in + 18) << 1 };
shifts = simd_batch{ 0, 0, 6, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 25-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 18) >> 24 | SafeLoad<uint32_t>(in + 19) << 8, SafeLoad<uint32_t>(in + 19) >> 17 | SafeLoad<uint32_t>(in + 20) << 15, SafeLoad<uint32_t>(in + 20) >> 10 | SafeLoad<uint32_t>(in + 21) << 22, SafeLoad<uint32_t>(in + 21) };
shifts = simd_batch{ 0, 0, 0, 3 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 25-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 21) >> 28 | SafeLoad<uint32_t>(in + 22) << 4, SafeLoad<uint32_t>(in + 22) >> 21 | SafeLoad<uint32_t>(in + 23) << 11, SafeLoad<uint32_t>(in + 23) >> 14 | SafeLoad<uint32_t>(in + 24) << 18, SafeLoad<uint32_t>(in + 24) };
shifts = simd_batch{ 0, 0, 0, 7 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 25;
return in;
}
inline static const uint32_t* unpack26_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x3ffffff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 26-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 26 | SafeLoad<uint32_t>(in + 1) << 6, SafeLoad<uint32_t>(in + 1) >> 20 | SafeLoad<uint32_t>(in + 2) << 12, SafeLoad<uint32_t>(in + 2) >> 14 | SafeLoad<uint32_t>(in + 3) << 18 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 26-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 3) >> 8 | SafeLoad<uint32_t>(in + 4) << 24, SafeLoad<uint32_t>(in + 4), SafeLoad<uint32_t>(in + 4) >> 28 | SafeLoad<uint32_t>(in + 5) << 4, SafeLoad<uint32_t>(in + 5) >> 22 | SafeLoad<uint32_t>(in + 6) << 10 };
shifts = simd_batch{ 0, 2, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 26-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 6) >> 16 | SafeLoad<uint32_t>(in + 7) << 16, SafeLoad<uint32_t>(in + 7) >> 10 | SafeLoad<uint32_t>(in + 8) << 22, SafeLoad<uint32_t>(in + 8), SafeLoad<uint32_t>(in + 8) >> 30 | SafeLoad<uint32_t>(in + 9) << 2 };
shifts = simd_batch{ 0, 0, 4, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 26-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 9) >> 24 | SafeLoad<uint32_t>(in + 10) << 8, SafeLoad<uint32_t>(in + 10) >> 18 | SafeLoad<uint32_t>(in + 11) << 14, SafeLoad<uint32_t>(in + 11) >> 12 | SafeLoad<uint32_t>(in + 12) << 20, SafeLoad<uint32_t>(in + 12) };
shifts = simd_batch{ 0, 0, 0, 6 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 26-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 13), SafeLoad<uint32_t>(in + 13) >> 26 | SafeLoad<uint32_t>(in + 14) << 6, SafeLoad<uint32_t>(in + 14) >> 20 | SafeLoad<uint32_t>(in + 15) << 12, SafeLoad<uint32_t>(in + 15) >> 14 | SafeLoad<uint32_t>(in + 16) << 18 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 26-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 16) >> 8 | SafeLoad<uint32_t>(in + 17) << 24, SafeLoad<uint32_t>(in + 17), SafeLoad<uint32_t>(in + 17) >> 28 | SafeLoad<uint32_t>(in + 18) << 4, SafeLoad<uint32_t>(in + 18) >> 22 | SafeLoad<uint32_t>(in + 19) << 10 };
shifts = simd_batch{ 0, 2, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 26-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 19) >> 16 | SafeLoad<uint32_t>(in + 20) << 16, SafeLoad<uint32_t>(in + 20) >> 10 | SafeLoad<uint32_t>(in + 21) << 22, SafeLoad<uint32_t>(in + 21), SafeLoad<uint32_t>(in + 21) >> 30 | SafeLoad<uint32_t>(in + 22) << 2 };
shifts = simd_batch{ 0, 0, 4, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 26-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 22) >> 24 | SafeLoad<uint32_t>(in + 23) << 8, SafeLoad<uint32_t>(in + 23) >> 18 | SafeLoad<uint32_t>(in + 24) << 14, SafeLoad<uint32_t>(in + 24) >> 12 | SafeLoad<uint32_t>(in + 25) << 20, SafeLoad<uint32_t>(in + 25) };
shifts = simd_batch{ 0, 0, 0, 6 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 26;
return in;
}
inline static const uint32_t* unpack27_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x7ffffff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 27-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 27 | SafeLoad<uint32_t>(in + 1) << 5, SafeLoad<uint32_t>(in + 1) >> 22 | SafeLoad<uint32_t>(in + 2) << 10, SafeLoad<uint32_t>(in + 2) >> 17 | SafeLoad<uint32_t>(in + 3) << 15 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 27-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 3) >> 12 | SafeLoad<uint32_t>(in + 4) << 20, SafeLoad<uint32_t>(in + 4) >> 7 | SafeLoad<uint32_t>(in + 5) << 25, SafeLoad<uint32_t>(in + 5), SafeLoad<uint32_t>(in + 5) >> 29 | SafeLoad<uint32_t>(in + 6) << 3 };
shifts = simd_batch{ 0, 0, 2, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 27-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 6) >> 24 | SafeLoad<uint32_t>(in + 7) << 8, SafeLoad<uint32_t>(in + 7) >> 19 | SafeLoad<uint32_t>(in + 8) << 13, SafeLoad<uint32_t>(in + 8) >> 14 | SafeLoad<uint32_t>(in + 9) << 18, SafeLoad<uint32_t>(in + 9) >> 9 | SafeLoad<uint32_t>(in + 10) << 23 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 27-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 10), SafeLoad<uint32_t>(in + 10) >> 31 | SafeLoad<uint32_t>(in + 11) << 1, SafeLoad<uint32_t>(in + 11) >> 26 | SafeLoad<uint32_t>(in + 12) << 6, SafeLoad<uint32_t>(in + 12) >> 21 | SafeLoad<uint32_t>(in + 13) << 11 };
shifts = simd_batch{ 4, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 27-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 13) >> 16 | SafeLoad<uint32_t>(in + 14) << 16, SafeLoad<uint32_t>(in + 14) >> 11 | SafeLoad<uint32_t>(in + 15) << 21, SafeLoad<uint32_t>(in + 15) >> 6 | SafeLoad<uint32_t>(in + 16) << 26, SafeLoad<uint32_t>(in + 16) };
shifts = simd_batch{ 0, 0, 0, 1 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 27-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 16) >> 28 | SafeLoad<uint32_t>(in + 17) << 4, SafeLoad<uint32_t>(in + 17) >> 23 | SafeLoad<uint32_t>(in + 18) << 9, SafeLoad<uint32_t>(in + 18) >> 18 | SafeLoad<uint32_t>(in + 19) << 14, SafeLoad<uint32_t>(in + 19) >> 13 | SafeLoad<uint32_t>(in + 20) << 19 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 27-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 20) >> 8 | SafeLoad<uint32_t>(in + 21) << 24, SafeLoad<uint32_t>(in + 21), SafeLoad<uint32_t>(in + 21) >> 30 | SafeLoad<uint32_t>(in + 22) << 2, SafeLoad<uint32_t>(in + 22) >> 25 | SafeLoad<uint32_t>(in + 23) << 7 };
shifts = simd_batch{ 0, 3, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 27-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 23) >> 20 | SafeLoad<uint32_t>(in + 24) << 12, SafeLoad<uint32_t>(in + 24) >> 15 | SafeLoad<uint32_t>(in + 25) << 17, SafeLoad<uint32_t>(in + 25) >> 10 | SafeLoad<uint32_t>(in + 26) << 22, SafeLoad<uint32_t>(in + 26) };
shifts = simd_batch{ 0, 0, 0, 5 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 27;
return in;
}
inline static const uint32_t* unpack28_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0xfffffff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 28-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 28 | SafeLoad<uint32_t>(in + 1) << 4, SafeLoad<uint32_t>(in + 1) >> 24 | SafeLoad<uint32_t>(in + 2) << 8, SafeLoad<uint32_t>(in + 2) >> 20 | SafeLoad<uint32_t>(in + 3) << 12 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 28-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 3) >> 16 | SafeLoad<uint32_t>(in + 4) << 16, SafeLoad<uint32_t>(in + 4) >> 12 | SafeLoad<uint32_t>(in + 5) << 20, SafeLoad<uint32_t>(in + 5) >> 8 | SafeLoad<uint32_t>(in + 6) << 24, SafeLoad<uint32_t>(in + 6) };
shifts = simd_batch{ 0, 0, 0, 4 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 28-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 7), SafeLoad<uint32_t>(in + 7) >> 28 | SafeLoad<uint32_t>(in + 8) << 4, SafeLoad<uint32_t>(in + 8) >> 24 | SafeLoad<uint32_t>(in + 9) << 8, SafeLoad<uint32_t>(in + 9) >> 20 | SafeLoad<uint32_t>(in + 10) << 12 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 28-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 10) >> 16 | SafeLoad<uint32_t>(in + 11) << 16, SafeLoad<uint32_t>(in + 11) >> 12 | SafeLoad<uint32_t>(in + 12) << 20, SafeLoad<uint32_t>(in + 12) >> 8 | SafeLoad<uint32_t>(in + 13) << 24, SafeLoad<uint32_t>(in + 13) };
shifts = simd_batch{ 0, 0, 0, 4 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 28-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 14), SafeLoad<uint32_t>(in + 14) >> 28 | SafeLoad<uint32_t>(in + 15) << 4, SafeLoad<uint32_t>(in + 15) >> 24 | SafeLoad<uint32_t>(in + 16) << 8, SafeLoad<uint32_t>(in + 16) >> 20 | SafeLoad<uint32_t>(in + 17) << 12 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 28-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 17) >> 16 | SafeLoad<uint32_t>(in + 18) << 16, SafeLoad<uint32_t>(in + 18) >> 12 | SafeLoad<uint32_t>(in + 19) << 20, SafeLoad<uint32_t>(in + 19) >> 8 | SafeLoad<uint32_t>(in + 20) << 24, SafeLoad<uint32_t>(in + 20) };
shifts = simd_batch{ 0, 0, 0, 4 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 28-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 21), SafeLoad<uint32_t>(in + 21) >> 28 | SafeLoad<uint32_t>(in + 22) << 4, SafeLoad<uint32_t>(in + 22) >> 24 | SafeLoad<uint32_t>(in + 23) << 8, SafeLoad<uint32_t>(in + 23) >> 20 | SafeLoad<uint32_t>(in + 24) << 12 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 28-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 24) >> 16 | SafeLoad<uint32_t>(in + 25) << 16, SafeLoad<uint32_t>(in + 25) >> 12 | SafeLoad<uint32_t>(in + 26) << 20, SafeLoad<uint32_t>(in + 26) >> 8 | SafeLoad<uint32_t>(in + 27) << 24, SafeLoad<uint32_t>(in + 27) };
shifts = simd_batch{ 0, 0, 0, 4 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 28;
return in;
}
inline static const uint32_t* unpack29_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x1fffffff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 29-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 29 | SafeLoad<uint32_t>(in + 1) << 3, SafeLoad<uint32_t>(in + 1) >> 26 | SafeLoad<uint32_t>(in + 2) << 6, SafeLoad<uint32_t>(in + 2) >> 23 | SafeLoad<uint32_t>(in + 3) << 9 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 29-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 3) >> 20 | SafeLoad<uint32_t>(in + 4) << 12, SafeLoad<uint32_t>(in + 4) >> 17 | SafeLoad<uint32_t>(in + 5) << 15, SafeLoad<uint32_t>(in + 5) >> 14 | SafeLoad<uint32_t>(in + 6) << 18, SafeLoad<uint32_t>(in + 6) >> 11 | SafeLoad<uint32_t>(in + 7) << 21 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 29-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 7) >> 8 | SafeLoad<uint32_t>(in + 8) << 24, SafeLoad<uint32_t>(in + 8) >> 5 | SafeLoad<uint32_t>(in + 9) << 27, SafeLoad<uint32_t>(in + 9), SafeLoad<uint32_t>(in + 9) >> 31 | SafeLoad<uint32_t>(in + 10) << 1 };
shifts = simd_batch{ 0, 0, 2, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 29-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 10) >> 28 | SafeLoad<uint32_t>(in + 11) << 4, SafeLoad<uint32_t>(in + 11) >> 25 | SafeLoad<uint32_t>(in + 12) << 7, SafeLoad<uint32_t>(in + 12) >> 22 | SafeLoad<uint32_t>(in + 13) << 10, SafeLoad<uint32_t>(in + 13) >> 19 | SafeLoad<uint32_t>(in + 14) << 13 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 29-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 14) >> 16 | SafeLoad<uint32_t>(in + 15) << 16, SafeLoad<uint32_t>(in + 15) >> 13 | SafeLoad<uint32_t>(in + 16) << 19, SafeLoad<uint32_t>(in + 16) >> 10 | SafeLoad<uint32_t>(in + 17) << 22, SafeLoad<uint32_t>(in + 17) >> 7 | SafeLoad<uint32_t>(in + 18) << 25 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 29-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 18) >> 4 | SafeLoad<uint32_t>(in + 19) << 28, SafeLoad<uint32_t>(in + 19), SafeLoad<uint32_t>(in + 19) >> 30 | SafeLoad<uint32_t>(in + 20) << 2, SafeLoad<uint32_t>(in + 20) >> 27 | SafeLoad<uint32_t>(in + 21) << 5 };
shifts = simd_batch{ 0, 1, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 29-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 21) >> 24 | SafeLoad<uint32_t>(in + 22) << 8, SafeLoad<uint32_t>(in + 22) >> 21 | SafeLoad<uint32_t>(in + 23) << 11, SafeLoad<uint32_t>(in + 23) >> 18 | SafeLoad<uint32_t>(in + 24) << 14, SafeLoad<uint32_t>(in + 24) >> 15 | SafeLoad<uint32_t>(in + 25) << 17 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 29-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 25) >> 12 | SafeLoad<uint32_t>(in + 26) << 20, SafeLoad<uint32_t>(in + 26) >> 9 | SafeLoad<uint32_t>(in + 27) << 23, SafeLoad<uint32_t>(in + 27) >> 6 | SafeLoad<uint32_t>(in + 28) << 26, SafeLoad<uint32_t>(in + 28) };
shifts = simd_batch{ 0, 0, 0, 3 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 29;
return in;
}
inline static const uint32_t* unpack30_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x3fffffff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 30-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 30 | SafeLoad<uint32_t>(in + 1) << 2, SafeLoad<uint32_t>(in + 1) >> 28 | SafeLoad<uint32_t>(in + 2) << 4, SafeLoad<uint32_t>(in + 2) >> 26 | SafeLoad<uint32_t>(in + 3) << 6 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 30-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 3) >> 24 | SafeLoad<uint32_t>(in + 4) << 8, SafeLoad<uint32_t>(in + 4) >> 22 | SafeLoad<uint32_t>(in + 5) << 10, SafeLoad<uint32_t>(in + 5) >> 20 | SafeLoad<uint32_t>(in + 6) << 12, SafeLoad<uint32_t>(in + 6) >> 18 | SafeLoad<uint32_t>(in + 7) << 14 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 30-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 7) >> 16 | SafeLoad<uint32_t>(in + 8) << 16, SafeLoad<uint32_t>(in + 8) >> 14 | SafeLoad<uint32_t>(in + 9) << 18, SafeLoad<uint32_t>(in + 9) >> 12 | SafeLoad<uint32_t>(in + 10) << 20, SafeLoad<uint32_t>(in + 10) >> 10 | SafeLoad<uint32_t>(in + 11) << 22 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 30-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 11) >> 8 | SafeLoad<uint32_t>(in + 12) << 24, SafeLoad<uint32_t>(in + 12) >> 6 | SafeLoad<uint32_t>(in + 13) << 26, SafeLoad<uint32_t>(in + 13) >> 4 | SafeLoad<uint32_t>(in + 14) << 28, SafeLoad<uint32_t>(in + 14) };
shifts = simd_batch{ 0, 0, 0, 2 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 30-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 15), SafeLoad<uint32_t>(in + 15) >> 30 | SafeLoad<uint32_t>(in + 16) << 2, SafeLoad<uint32_t>(in + 16) >> 28 | SafeLoad<uint32_t>(in + 17) << 4, SafeLoad<uint32_t>(in + 17) >> 26 | SafeLoad<uint32_t>(in + 18) << 6 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 30-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 18) >> 24 | SafeLoad<uint32_t>(in + 19) << 8, SafeLoad<uint32_t>(in + 19) >> 22 | SafeLoad<uint32_t>(in + 20) << 10, SafeLoad<uint32_t>(in + 20) >> 20 | SafeLoad<uint32_t>(in + 21) << 12, SafeLoad<uint32_t>(in + 21) >> 18 | SafeLoad<uint32_t>(in + 22) << 14 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 30-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 22) >> 16 | SafeLoad<uint32_t>(in + 23) << 16, SafeLoad<uint32_t>(in + 23) >> 14 | SafeLoad<uint32_t>(in + 24) << 18, SafeLoad<uint32_t>(in + 24) >> 12 | SafeLoad<uint32_t>(in + 25) << 20, SafeLoad<uint32_t>(in + 25) >> 10 | SafeLoad<uint32_t>(in + 26) << 22 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 30-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 26) >> 8 | SafeLoad<uint32_t>(in + 27) << 24, SafeLoad<uint32_t>(in + 27) >> 6 | SafeLoad<uint32_t>(in + 28) << 26, SafeLoad<uint32_t>(in + 28) >> 4 | SafeLoad<uint32_t>(in + 29) << 28, SafeLoad<uint32_t>(in + 29) };
shifts = simd_batch{ 0, 0, 0, 2 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 30;
return in;
}
inline static const uint32_t* unpack31_32(const uint32_t* in, uint32_t* out) {
uint32_t mask = 0x7fffffff;
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
// extract 31-bit bundles 0 to 3
words = simd_batch{ SafeLoad<uint32_t>(in + 0), SafeLoad<uint32_t>(in + 0) >> 31 | SafeLoad<uint32_t>(in + 1) << 1, SafeLoad<uint32_t>(in + 1) >> 30 | SafeLoad<uint32_t>(in + 2) << 2, SafeLoad<uint32_t>(in + 2) >> 29 | SafeLoad<uint32_t>(in + 3) << 3 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 31-bit bundles 4 to 7
words = simd_batch{ SafeLoad<uint32_t>(in + 3) >> 28 | SafeLoad<uint32_t>(in + 4) << 4, SafeLoad<uint32_t>(in + 4) >> 27 | SafeLoad<uint32_t>(in + 5) << 5, SafeLoad<uint32_t>(in + 5) >> 26 | SafeLoad<uint32_t>(in + 6) << 6, SafeLoad<uint32_t>(in + 6) >> 25 | SafeLoad<uint32_t>(in + 7) << 7 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 31-bit bundles 8 to 11
words = simd_batch{ SafeLoad<uint32_t>(in + 7) >> 24 | SafeLoad<uint32_t>(in + 8) << 8, SafeLoad<uint32_t>(in + 8) >> 23 | SafeLoad<uint32_t>(in + 9) << 9, SafeLoad<uint32_t>(in + 9) >> 22 | SafeLoad<uint32_t>(in + 10) << 10, SafeLoad<uint32_t>(in + 10) >> 21 | SafeLoad<uint32_t>(in + 11) << 11 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 31-bit bundles 12 to 15
words = simd_batch{ SafeLoad<uint32_t>(in + 11) >> 20 | SafeLoad<uint32_t>(in + 12) << 12, SafeLoad<uint32_t>(in + 12) >> 19 | SafeLoad<uint32_t>(in + 13) << 13, SafeLoad<uint32_t>(in + 13) >> 18 | SafeLoad<uint32_t>(in + 14) << 14, SafeLoad<uint32_t>(in + 14) >> 17 | SafeLoad<uint32_t>(in + 15) << 15 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 31-bit bundles 16 to 19
words = simd_batch{ SafeLoad<uint32_t>(in + 15) >> 16 | SafeLoad<uint32_t>(in + 16) << 16, SafeLoad<uint32_t>(in + 16) >> 15 | SafeLoad<uint32_t>(in + 17) << 17, SafeLoad<uint32_t>(in + 17) >> 14 | SafeLoad<uint32_t>(in + 18) << 18, SafeLoad<uint32_t>(in + 18) >> 13 | SafeLoad<uint32_t>(in + 19) << 19 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 31-bit bundles 20 to 23
words = simd_batch{ SafeLoad<uint32_t>(in + 19) >> 12 | SafeLoad<uint32_t>(in + 20) << 20, SafeLoad<uint32_t>(in + 20) >> 11 | SafeLoad<uint32_t>(in + 21) << 21, SafeLoad<uint32_t>(in + 21) >> 10 | SafeLoad<uint32_t>(in + 22) << 22, SafeLoad<uint32_t>(in + 22) >> 9 | SafeLoad<uint32_t>(in + 23) << 23 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 31-bit bundles 24 to 27
words = simd_batch{ SafeLoad<uint32_t>(in + 23) >> 8 | SafeLoad<uint32_t>(in + 24) << 24, SafeLoad<uint32_t>(in + 24) >> 7 | SafeLoad<uint32_t>(in + 25) << 25, SafeLoad<uint32_t>(in + 25) >> 6 | SafeLoad<uint32_t>(in + 26) << 26, SafeLoad<uint32_t>(in + 26) >> 5 | SafeLoad<uint32_t>(in + 27) << 27 };
shifts = simd_batch{ 0, 0, 0, 0 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
// extract 31-bit bundles 28 to 31
words = simd_batch{ SafeLoad<uint32_t>(in + 27) >> 4 | SafeLoad<uint32_t>(in + 28) << 28, SafeLoad<uint32_t>(in + 28) >> 3 | SafeLoad<uint32_t>(in + 29) << 29, SafeLoad<uint32_t>(in + 29) >> 2 | SafeLoad<uint32_t>(in + 30) << 30, SafeLoad<uint32_t>(in + 30) };
shifts = simd_batch{ 0, 0, 0, 1 };
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += 4;
in += 31;
return in;
}
inline static const uint32_t* unpack32_32(const uint32_t* in, uint32_t* out) {
memcpy(out, in, 32 * sizeof(*out));
in += 32;
out += 32;
return in;
}
}; // struct UnpackBits128
} // namespace
} // namespace internal
} // namespace arrow
| cpcloud/arrow | cpp/src/arrow/util/bpacking_simd128_generated.h | C | apache-2.0 | 98,418 |
/*
* Copyright (c) 2008 - 2012, Andy Bierman, All Rights Reserved.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/* FILE: mgr_hello.c
Handle the NETCONF <hello> (top-level) element.
*********************************************************************
* *
* C H A N G E H I S T O R Y *
* *
*********************************************************************
date init comment
----------------------------------------------------------------------
15jan07 abb begun
*********************************************************************
* *
* I N C L U D E F I L E S *
* *
*********************************************************************/
#include <stdio.h>
#include <stdlib.h>
#ifndef _H_procdefs
#include "procdefs.h"
#endif
#ifndef _H_cap
#include "cap.h"
#endif
#ifndef _H_log
#include "log.h"
#endif
#ifndef _H_mgr
#include "mgr.h"
#endif
#ifndef _H_mgr_cap
#include "mgr_cap.h"
#endif
#ifndef _H_mgr_hello
#include "mgr_hello.h"
#endif
#ifndef _H_mgr_ses
#include "mgr_ses.h"
#endif
#ifndef _H_mgr_val_parse
#include "mgr_val_parse.h"
#endif
#ifndef _H_ncx
#include "ncx.h"
#endif
#ifndef _H_op
#include "op.h"
#endif
#ifndef _H_ses
#include "ses.h"
#endif
#ifndef _H_status
#include "status.h"
#endif
#ifndef _H_top
#include "top.h"
#endif
#ifndef _H_val
#include "val.h"
#endif
#ifndef _H_xml_util
#include "xml_util.h"
#endif
#ifndef _H_xml_wr
#include "xml_wr.h"
#endif
/********************************************************************
* *
* C O N S T A N T S *
* *
*********************************************************************/
#ifdef DEBUG
#define MGR_HELLO_DEBUG 1
#endif
#define MGR_SERVER_HELLO_OBJ ((const xmlChar *)"server-hello")
/********************************************************************
* *
* V A R I A B L E S *
* *
*********************************************************************/
static boolean mgr_hello_init_done = FALSE;
/********************************************************************
* FUNCTION process_server_hello
*
* Process the NETCONF server <hello> contents
*
* 1) Protocol capabilities
* 2) Module capabilities
* 3) Unrecognized capabilities
*
* INPUTS:
* scb == session control block to set
* hello == value struct for the hello message to check
*
* OUTPUTS:
* server caps in the scb->mgrcb is set
*
* RETURNS:
* status
*********************************************************************/
static status_t
process_server_hello (ses_cb_t *scb,
val_value_t *hello)
{
val_value_t *caps, *sidval, *cap;
mgr_scb_t *mscb;
boolean c1, c2;
status_t res;
mscb = mgr_ses_get_mscb(scb);
/* make sure the capabilities element is present
* This should not fail, since already parsed this far
*/
caps = val_find_child(hello, NC_MODULE, NCX_EL_CAPABILITIES);
if (!caps || caps->res != NO_ERR) {
log_error("\nError: no <capabilities> found in server <hello>");
return ERR_NCX_MISSING_VAL_INST;
}
/* make sure the session-id element is present
* This should not fail, since already parsed this far
*/
sidval = val_find_child(hello, NC_MODULE, NCX_EL_SESSION_ID);
if (!sidval || sidval->res != NO_ERR) {
log_error("\nError: no <session-id> found in server <hello>");
return ERR_NCX_MISSING_VAL_INST;
} else {
mscb->agtsid = VAL_UINT(sidval);
}
/* go through the capability nodes and construct a caplist */
for (cap = val_find_child(caps, NC_MODULE, NCX_EL_CAPABILITY);
cap != NULL;
cap = val_find_next_child(caps,
NC_MODULE,
NCX_EL_CAPABILITY,
cap)) {
if (cap->res != NO_ERR) {
continue;
}
res = cap_add_std_string(&mscb->caplist, VAL_STR(cap));
if (res == ERR_NCX_SKIPPED) {
res = cap_add_module_string(&mscb->caplist, VAL_STR(cap));
if (res == ERR_NCX_SKIPPED) {
/*
* if (ncx_warning_enabled(ERR_NCX_RCV_UNKNOWN_CAP)) {
* log_warn("\nWarning: received unknown capability '%s'",
* VAL_STR(cap));
* }
*/
if (LOGDEBUG2) {
log_debug2("\nmgr: Got enterprise capability %s",
VAL_STR(cap));
}
/* hack: check for juniper 1.0 server
* change the useprefix mode to TRUE to get
* <rpc> operations to work with this server
*/
if (!xml_strcmp(VAL_STR(cap), CAP_JUNOS)) {
if (LOGDEBUG) {
log_debug("\nUsing XML prefixes to work "
"with Junos 1.0 server\n");
}
ncx_set_useprefix(TRUE);
}
res = cap_add_ent(&mscb->caplist, VAL_STR(cap));
if (res != NO_ERR) {
return res;
}
}
}
}
/* check if the mandatory base protocol capability was set */
res = NO_ERR;
c1 = cap_std_set(&mscb->caplist, CAP_STDID_V1);
c2 = cap_std_set(&mscb->caplist, CAP_STDID_V11);
if (c1 && c2) {
if (LOGDEBUG2) {
log_debug2("\nmgr_hello: server supports "
"base:1.0 and base:1.1");
}
if (ses_protocol_requested(scb, NCX_PROTO_NETCONF11)) {
if (LOGDEBUG2) {
log_debug2("\nmgr_hello: set protocol to base:1.1 "
"for session '%d'",
scb->sid);
}
ses_set_protocol(scb, NCX_PROTO_NETCONF11);
} else if (ses_protocol_requested(scb, NCX_PROTO_NETCONF10)) {
if (LOGDEBUG2) {
log_debug2("\nmgr_hello: set protocol to base:1.0 "
"for session '%d'",
scb->sid);
}
ses_set_protocol(scb, NCX_PROTO_NETCONF10);
} else {
log_error("\nError: Internal: no protocols requested, "
"dropping session '%d'",
scb->sid);
res = ERR_NCX_MISSING_VAL_INST;
}
} else if (c1) {
if (LOGDEBUG2) {
log_debug2("\nmgr_hello: server supports "
"base:1.0 only");
}
if (ses_protocol_requested(scb, NCX_PROTO_NETCONF10)) {
if (LOGDEBUG2) {
log_debug2("\nmgr_hello: set protocol to base:1.0 "
"for session '%d'",
scb->sid);
}
ses_set_protocol(scb, NCX_PROTO_NETCONF10);
} else {
log_error("\nError: Server supports base:1.0 only;"
"\n Protocol 'netconf1.0' not enabled, "
"dropping session '%d'",
scb->sid);
res = ERR_NCX_MISSING_VAL_INST;
}
} else if (c2) {
if (LOGDEBUG2) {
log_debug2("\nmgr_hello: server supports "
"base:1.1 only");
}
if (ses_protocol_requested(scb, NCX_PROTO_NETCONF11)) {
if (LOGDEBUG2) {
log_debug2("\nmgr_hello: set protocol to base:1.1 "
"for session '%d'",
scb->sid);
}
ses_set_protocol(scb, NCX_PROTO_NETCONF11);
} else {
log_error("\nError: Server supports base:1.1 only;"
"\n Protocol 'netconf1.1' not enabled, "
"dropping session '%d'",
scb->sid);
res = ERR_NCX_MISSING_VAL_INST;
}
} else {
log_error("\nError: no support for base:1.0 "
"or base:1.1 found in server <hello>;"
"\n dropping session '%d'",
scb->sid);
return ERR_NCX_MISSING_VAL_INST;
}
/* set target type var in the manager session control block */
c1 = cap_std_set(&mscb->caplist, CAP_STDID_WRITE_RUNNING);
c2 = cap_std_set(&mscb->caplist, CAP_STDID_CANDIDATE);
if (c1 && c2) {
mscb->targtyp = NCX_AGT_TARG_CAND_RUNNING;
} else if (c1) {
mscb->targtyp = NCX_AGT_TARG_RUNNING;
} else if (c2) {
mscb->targtyp = NCX_AGT_TARG_CANDIDATE;
} else {
mscb->targtyp = NCX_AGT_TARG_NONE;
if (LOGINFO) {
log_info("\nmgr_hello: no writable target found for"
" session %u (a:%u)",
scb->sid,
mscb->agtsid);
}
}
/* set the startup type in the mscb */
if (cap_std_set(&mscb->caplist, CAP_STDID_STARTUP)) {
mscb->starttyp = NCX_AGT_START_DISTINCT;
} else {
mscb->starttyp = NCX_AGT_START_MIRROR;
}
return NO_ERR;
} /* process_server_hello */
/************** E X T E R N A L F U N C T I O N S **********/
/********************************************************************
* FUNCTION mgr_hello_init
*
* Initialize the mgr_hello module
* Adds the mgr_hello_dispatch function as the handler
* for the NETCONF <hello> top-level element.
*
* INPUTS:
* none
* RETURNS:
* NO_ERR if all okay, the minimum spare requests will be malloced
*********************************************************************/
status_t
mgr_hello_init (void)
{
status_t res;
if (!mgr_hello_init_done) {
res = top_register_node(NC_MODULE, NCX_EL_HELLO,
mgr_hello_dispatch);
if (res != NO_ERR) {
return res;
}
mgr_hello_init_done = TRUE;
}
return NO_ERR;
} /* mgr_hello_init */
/********************************************************************
* FUNCTION mgr_hello_cleanup
*
* Cleanup the mgr_hello module.
* Unregister the top-level NETCONF <hello> element
*
*********************************************************************/
void
mgr_hello_cleanup (void)
{
if (mgr_hello_init_done) {
top_unregister_node(NC_MODULE, NCX_EL_HELLO);
mgr_hello_init_done = FALSE;
}
} /* mgr_hello_cleanup */
/********************************************************************
* FUNCTION mgr_hello_dispatch
*
* Handle an incoming <hello> message from the client
*
* INPUTS:
* scb == session control block
* top == top element descriptor
*********************************************************************/
void
mgr_hello_dispatch (ses_cb_t *scb,
xml_node_t *top)
{
val_value_t *val;
ncx_module_t *mod;
obj_template_t *obj;
mgr_scb_t *mscb;
xml_msg_hdr_t msg;
status_t res;
#ifdef DEBUG
if (!scb || !top) {
SET_ERROR(ERR_INTERNAL_PTR);
return;
}
#endif
#ifdef MGR_HELLO_DEBUG
if (LOGDEBUG) {
log_debug("\nmgr_hello got node");
}
if (LOGDEBUG2) {
xml_dump_node(top);
}
#endif
mscb = mgr_ses_get_mscb(scb);
/* only process this message in hello wait state */
if (scb->state != SES_ST_HELLO_WAIT) {
/* TBD: stats update */
if (LOGINFO) {
log_info("\nmgr_hello dropped, wrong state for session %d",
scb->sid);
}
return;
}
/* init local vars */
res = NO_ERR;
val = NULL;
obj = NULL;
xml_msg_init_hdr(&msg);
/* get a value struct to hold the server hello msg */
val = val_new_value();
if (!val) {
res = ERR_INTERNAL_MEM;
}
/* get the type definition from the registry */
if (res == NO_ERR) {
mod = ncx_find_module(NC_MODULE, NULL);
if (mod) {
obj = ncx_find_object(mod, MGR_SERVER_HELLO_OBJ);
}
if (!obj) {
/* netconf module should have loaded this definition */
res = SET_ERROR(ERR_INTERNAL_PTR);
}
}
/* parse an server hello message */
if (res == NO_ERR) {
res = mgr_val_parse(scb, obj, top, val);
}
/* examine the server capability list
* and it matches the server protocol version
*/
if (res == NO_ERR) {
res = process_server_hello(scb, val);
}
/* report first error and close session */
if (res != NO_ERR) {
if (LOGINFO) {
log_info("\nmgr_connect error (%s)\n dropping session %u (a:%u)",
get_error_string(res),
scb->sid,
mscb->agtsid,
res);
}
} else {
scb->state = SES_ST_IDLE;
if (LOGDEBUG) {
log_debug("\nmgr_hello manager hello ok");
}
}
if (val) {
val_free_value(val);
}
} /* mgr_hello_dispatch */
/********************************************************************
* FUNCTION mgr_hello_send
*
* Send the manager <hello> message to the server on the
* specified session
*
* INPUTS:
* scb == session control block
*
* RETURNS:
* status
*********************************************************************/
status_t
mgr_hello_send (ses_cb_t *scb)
{
val_value_t *mycaps;
xml_msg_hdr_t msg;
status_t res;
xml_attrs_t attrs;
boolean anyout;
xmlns_id_t nc_id;
#ifdef DEBUG
if (!scb) {
return SET_ERROR(ERR_INTERNAL_PTR);
}
#endif
#ifdef MGR_HELLO_DEBUG
if (LOGDEBUG2) {
log_debug2("\nmgr sending hello on session %d", scb->sid);
}
#endif
res = NO_ERR;
anyout = FALSE;
xml_msg_init_hdr(&msg);
xml_init_attrs(&attrs);
nc_id = xmlns_nc_id();
/* get my client caps, custom made for this session */
mycaps = mgr_cap_get_ses_capsval(scb);
if (!mycaps) {
res = SET_ERROR(ERR_INTERNAL_PTR);
}
/* setup the prefix map with the NETCONF namespace */
if (res == NO_ERR) {
res = xml_msg_build_prefix_map(&msg, &attrs, TRUE, FALSE);
}
/* send the <?xml?> directive */
if (res == NO_ERR) {
res = ses_start_msg(scb);
}
/* start the hello element */
if (res == NO_ERR) {
anyout = TRUE;
xml_wr_begin_elem_ex(scb,
&msg,
0,
nc_id,
NCX_EL_HELLO,
&attrs,
ATTRQ,
0,
START);
}
/* send the capabilities list */
if (res == NO_ERR) {
xml_wr_full_val(scb, &msg, mycaps, NCX_DEF_INDENT);
}
/* finish the hello element */
if (res == NO_ERR) {
xml_wr_end_elem(scb, &msg, nc_id, NCX_EL_HELLO, 0);
}
/* finish the message */
if (anyout) {
ses_finish_msg(scb);
}
xml_clean_attrs(&attrs);
xml_msg_clean_hdr(&msg);
if (mycaps != NULL) {
val_free_value(mycaps);
}
return res;
} /* mgr_hello_send */
/* END file mgr_hello.c */
| hsnlab/escape | OpenYuma/netconf/src/mgr/mgr_hello.c | C | apache-2.0 | 16,228 |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.testsuite.model;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.keycloak.common.util.Time;
import org.keycloak.models.ClientModel;
import org.keycloak.models.ClientSessionModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.RealmModel;
import org.keycloak.models.UserLoginFailureModel;
import org.keycloak.models.UserModel;
import org.keycloak.models.UserSessionModel;
import org.keycloak.protocol.oidc.OIDCLoginProtocol;
import org.keycloak.services.managers.UserManager;
import org.keycloak.testsuite.rule.KeycloakRule;
import java.util.Arrays;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
/**
* @author <a href="mailto:sthorger@redhat.com">Stian Thorgersen</a>
*/
public class UserSessionProviderTest {
@ClassRule
public static KeycloakRule kc = new KeycloakRule();
private KeycloakSession session;
private RealmModel realm;
@Before
public void before() {
session = kc.startSession();
realm = session.realms().getRealm("test");
session.users().addUser(realm, "user1").setEmail("user1@localhost");
session.users().addUser(realm, "user2").setEmail("user2@localhost");
}
@After
public void after() {
resetSession();
session.sessions().removeUserSessions(realm);
UserModel user1 = session.users().getUserByUsername("user1", realm);
UserModel user2 = session.users().getUserByUsername("user2", realm);
UserManager um = new UserManager(session);
um.removeUser(realm, user1);
um.removeUser(realm, user2);
kc.stopSession(session, true);
}
@Test
public void testCreateSessions() {
int started = Time.currentTime();
UserSessionModel[] sessions = createSessions();
assertSession(session.sessions().getUserSession(realm, sessions[0].getId()), session.users().getUserByUsername("user1", realm), "127.0.0.1", started, started, "test-app", "third-party");
assertSession(session.sessions().getUserSession(realm, sessions[1].getId()), session.users().getUserByUsername("user1", realm), "127.0.0.2", started, started, "test-app");
assertSession(session.sessions().getUserSession(realm, sessions[2].getId()), session.users().getUserByUsername("user2", realm), "127.0.0.3", started, started, "test-app");
}
@Test
public void testUpdateSession() {
UserSessionModel[] sessions = createSessions();
session.sessions().getUserSession(realm, sessions[0].getId()).setLastSessionRefresh(1000);
resetSession();
assertEquals(1000, session.sessions().getUserSession(realm, sessions[0].getId()).getLastSessionRefresh());
}
@Test
public void testCreateClientSession() {
UserSessionModel[] sessions = createSessions();
List<ClientSessionModel> clientSessions = session.sessions().getUserSession(realm, sessions[0].getId()).getClientSessions();
assertEquals(2, clientSessions.size());
String client1 = realm.getClientByClientId("test-app").getId();
ClientSessionModel session1;
if (clientSessions.get(0).getClient().getId().equals(client1)) {
session1 = clientSessions.get(0);
} else {
session1 = clientSessions.get(1);
}
assertEquals(null, session1.getAction());
assertEquals(realm.getClientByClientId("test-app").getClientId(), session1.getClient().getClientId());
assertEquals(sessions[0].getId(), session1.getUserSession().getId());
assertEquals("http://redirect", session1.getRedirectUri());
assertEquals("state", session1.getNote(OIDCLoginProtocol.STATE_PARAM));
assertEquals(2, session1.getRoles().size());
assertTrue(session1.getRoles().contains("one"));
assertTrue(session1.getRoles().contains("two"));
assertEquals(2, session1.getProtocolMappers().size());
assertTrue(session1.getProtocolMappers().contains("mapper-one"));
assertTrue(session1.getProtocolMappers().contains("mapper-two"));
}
@Test
public void testUpdateClientSession() {
UserSessionModel[] sessions = createSessions();
String id = sessions[0].getClientSessions().get(0).getId();
ClientSessionModel clientSession = session.sessions().getClientSession(realm, id);
int time = clientSession.getTimestamp();
assertEquals(null, clientSession.getAction());
clientSession.setAction(ClientSessionModel.Action.CODE_TO_TOKEN.name());
clientSession.setTimestamp(time + 10);
kc.stopSession(session, true);
session = kc.startSession();
ClientSessionModel updated = session.sessions().getClientSession(realm, id);
assertEquals(ClientSessionModel.Action.CODE_TO_TOKEN.name(), updated.getAction());
assertEquals(time + 10, updated.getTimestamp());
}
@Test
public void testGetUserSessions() {
UserSessionModel[] sessions = createSessions();
assertSessions(session.sessions().getUserSessions(realm, session.users().getUserByUsername("user1", realm)), sessions[0], sessions[1]);
assertSessions(session.sessions().getUserSessions(realm, session.users().getUserByUsername("user2", realm)), sessions[2]);
}
@Test
public void testRemoveUserSessionsByUser() {
UserSessionModel[] sessions = createSessions();
List<String> clientSessionsRemoved = new LinkedList<String>();
List<String> clientSessionsKept = new LinkedList<String>();
for (UserSessionModel s : sessions) {
s = session.sessions().getUserSession(realm, s.getId());
for (ClientSessionModel c : s.getClientSessions()) {
if (c.getUserSession().getUser().getUsername().equals("user1")) {
clientSessionsRemoved.add(c.getId());
} else {
clientSessionsKept.add(c.getId());
}
}
}
session.sessions().removeUserSessions(realm, session.users().getUserByUsername("user1", realm));
resetSession();
assertTrue(session.sessions().getUserSessions(realm, session.users().getUserByUsername("user1", realm)).isEmpty());
assertFalse(session.sessions().getUserSessions(realm, session.users().getUserByUsername("user2", realm)).isEmpty());
for (String c : clientSessionsRemoved) {
assertNull(session.sessions().getClientSession(realm, c));
}
for (String c : clientSessionsKept) {
assertNotNull(session.sessions().getClientSession(realm, c));
}
}
@Test
public void testRemoveUserSession() {
UserSessionModel userSession = createSessions()[0];
List<String> clientSessionsRemoved = new LinkedList<String>();
for (ClientSessionModel c : userSession.getClientSessions()) {
clientSessionsRemoved.add(c.getId());
}
session.sessions().removeUserSession(realm, userSession);
resetSession();
assertNull(session.sessions().getUserSession(realm, userSession.getId()));
for (String c : clientSessionsRemoved) {
assertNull(session.sessions().getClientSession(realm, c));
}
}
@Test
public void testRemoveUserSessionsByRealm() {
UserSessionModel[] sessions = createSessions();
List<ClientSessionModel> clientSessions = new LinkedList<ClientSessionModel>();
for (UserSessionModel s : sessions) {
clientSessions.addAll(s.getClientSessions());
}
session.sessions().removeUserSessions(realm);
resetSession();
assertTrue(session.sessions().getUserSessions(realm, session.users().getUserByUsername("user1", realm)).isEmpty());
assertTrue(session.sessions().getUserSessions(realm, session.users().getUserByUsername("user2", realm)).isEmpty());
for (ClientSessionModel c : clientSessions) {
assertNull(session.sessions().getClientSession(realm, c.getId()));
}
}
@Test
public void testOnClientRemoved() {
UserSessionModel[] sessions = createSessions();
List<String> clientSessionsRemoved = new LinkedList<String>();
List<String> clientSessionsKept = new LinkedList<String>();
for (UserSessionModel s : sessions) {
s = session.sessions().getUserSession(realm, s.getId());
for (ClientSessionModel c : s.getClientSessions()) {
if (c.getClient().getClientId().equals("third-party")) {
clientSessionsRemoved.add(c.getId());
} else {
clientSessionsKept.add(c.getId());
}
}
}
session.sessions().onClientRemoved(realm, realm.getClientByClientId("third-party"));
resetSession();
for (String c : clientSessionsRemoved) {
assertNull(session.sessions().getClientSession(realm, c));
}
for (String c : clientSessionsKept) {
assertNotNull(session.sessions().getClientSession(realm, c));
}
session.sessions().onClientRemoved(realm, realm.getClientByClientId("test-app"));
resetSession();
for (String c : clientSessionsRemoved) {
assertNull(session.sessions().getClientSession(realm, c));
}
for (String c : clientSessionsKept) {
assertNull(session.sessions().getClientSession(realm, c));
}
}
@Test
public void testRemoveUserSessionsByExpired() {
session.sessions().getUserSessions(realm, session.users().getUserByUsername("user1", realm));
ClientModel client = realm.getClientByClientId("test-app");
try {
Set<String> expired = new HashSet<String>();
Set<String> expiredClientSessions = new HashSet<String>();
Time.setOffset(-(realm.getSsoSessionMaxLifespan() + 1));
expired.add(session.sessions().createUserSession(realm, session.users().getUserByUsername("user1", realm), "user1", "127.0.0.1", "form", true, null, null).getId());
expiredClientSessions.add(session.sessions().createClientSession(realm, client).getId());
Time.setOffset(0);
UserSessionModel s = session.sessions().createUserSession(realm, session.users().getUserByUsername("user2", realm), "user2", "127.0.0.1", "form", true, null, null);
//s.setLastSessionRefresh(Time.currentTime() - (realm.getSsoSessionIdleTimeout() + 1));
s.setLastSessionRefresh(0);
expired.add(s.getId());
ClientSessionModel clSession = session.sessions().createClientSession(realm, client);
clSession.setUserSession(s);
expiredClientSessions.add(clSession.getId());
Set<String> valid = new HashSet<String>();
Set<String> validClientSessions = new HashSet<String>();
valid.add(session.sessions().createUserSession(realm, session.users().getUserByUsername("user1", realm), "user1", "127.0.0.1", "form", true, null, null).getId());
validClientSessions.add(session.sessions().createClientSession(realm, client).getId());
resetSession();
session.sessions().removeExpired(realm);
resetSession();
for (String e : expired) {
assertNull(session.sessions().getUserSession(realm, e));
}
for (String e : expiredClientSessions) {
assertNull(session.sessions().getClientSession(realm, e));
}
for (String v : valid) {
assertNotNull(session.sessions().getUserSession(realm, v));
}
for (String e : validClientSessions) {
assertNotNull(session.sessions().getClientSession(realm, e));
}
} finally {
Time.setOffset(0);
}
}
@Test
public void testExpireDetachedClientSessions() {
try {
realm.setAccessCodeLifespan(10);
realm.setAccessCodeLifespanUserAction(10);
realm.setAccessCodeLifespanLogin(30);
// Login lifespan is largest
String clientSessionId = session.sessions().createClientSession(realm, realm.getClientByClientId("test-app")).getId();
resetSession();
Time.setOffset(25);
session.sessions().removeExpired(realm);
resetSession();
assertNotNull(session.sessions().getClientSession(clientSessionId));
Time.setOffset(35);
session.sessions().removeExpired(realm);
resetSession();
assertNull(session.sessions().getClientSession(clientSessionId));
// User action is largest
realm.setAccessCodeLifespanUserAction(40);
Time.setOffset(0);
clientSessionId = session.sessions().createClientSession(realm, realm.getClientByClientId("test-app")).getId();
resetSession();
Time.setOffset(35);
session.sessions().removeExpired(realm);
resetSession();
assertNotNull(session.sessions().getClientSession(clientSessionId));
Time.setOffset(45);
session.sessions().removeExpired(realm);
resetSession();
assertNull(session.sessions().getClientSession(clientSessionId));
// Access code is largest
realm.setAccessCodeLifespan(50);
Time.setOffset(0);
clientSessionId = session.sessions().createClientSession(realm, realm.getClientByClientId("test-app")).getId();
resetSession();
Time.setOffset(45);
session.sessions().removeExpired(realm);
resetSession();
assertNotNull(session.sessions().getClientSession(clientSessionId));
Time.setOffset(55);
session.sessions().removeExpired(realm);
resetSession();
assertNull(session.sessions().getClientSession(clientSessionId));
} finally {
Time.setOffset(0);
realm.setAccessCodeLifespan(60);
realm.setAccessCodeLifespanUserAction(300);
realm.setAccessCodeLifespanLogin(1800);
}
}
// KEYCLOAK-2508
@Test
public void testRemovingExpiredSession() {
UserSessionModel[] sessions = createSessions();
try {
Time.setOffset(3600000);
UserSessionModel userSession = sessions[0];
RealmModel realm = userSession.getRealm();
session.sessions().removeExpired(realm);
resetSession();
// Assert no exception is thrown here
session.sessions().removeUserSession(realm, userSession);
} finally {
Time.setOffset(0);
}
}
@Test
public void testGetByClient() {
UserSessionModel[] sessions = createSessions();
assertSessions(session.sessions().getUserSessions(realm, realm.getClientByClientId("test-app")), sessions[0], sessions[1], sessions[2]);
assertSessions(session.sessions().getUserSessions(realm, realm.getClientByClientId("third-party")), sessions[0]);
}
@Test
public void testGetByClientPaginated() {
try {
for (int i = 0; i < 25; i++) {
Time.setOffset(i);
UserSessionModel userSession = session.sessions().createUserSession(realm, session.users().getUserByUsername("user1", realm), "user1", "127.0.0." + i, "form", false, null, null);
ClientSessionModel clientSession = session.sessions().createClientSession(realm, realm.getClientByClientId("test-app"));
clientSession.setUserSession(userSession);
clientSession.setRedirectUri("http://redirect");
clientSession.setRoles(new HashSet<String>());
clientSession.setNote(OIDCLoginProtocol.STATE_PARAM, "state");
clientSession.setTimestamp(userSession.getStarted());
}
} finally {
Time.setOffset(0);
}
resetSession();
assertPaginatedSession(realm, realm.getClientByClientId("test-app"), 0, 1, 1);
assertPaginatedSession(realm, realm.getClientByClientId("test-app"), 0, 10, 10);
assertPaginatedSession(realm, realm.getClientByClientId("test-app"), 10, 10, 10);
assertPaginatedSession(realm, realm.getClientByClientId("test-app"), 20, 10, 5);
assertPaginatedSession(realm, realm.getClientByClientId("test-app"), 30, 10, 0);
}
@Test
public void testCreateAndGetInSameTransaction() {
UserSessionModel userSession = session.sessions().createUserSession(realm, session.users().getUserByUsername("user1", realm), "user1", "127.0.0.2", "form", true, null, null);
ClientSessionModel clientSession = createClientSession(realm.getClientByClientId("test-app"), userSession, "http://redirect", "state", new HashSet<String>(), new HashSet<String>());
Assert.assertNotNull(session.sessions().getUserSession(realm, userSession.getId()));
Assert.assertNotNull(session.sessions().getClientSession(realm, clientSession.getId()));
Assert.assertEquals(userSession.getId(), clientSession.getUserSession().getId());
Assert.assertEquals(1, userSession.getClientSessions().size());
Assert.assertEquals(clientSession.getId(), userSession.getClientSessions().get(0).getId());
}
private void assertPaginatedSession(RealmModel realm, ClientModel client, int start, int max, int expectedSize) {
List<UserSessionModel> sessions = session.sessions().getUserSessions(realm, client, start, max);
String[] actualIps = new String[sessions.size()];
for (int i = 0; i < actualIps.length; i++) {
actualIps[i] = sessions.get(i).getIpAddress();
}
String[] expectedIps = new String[expectedSize];
for (int i = 0; i < expectedSize; i++) {
expectedIps[i] = "127.0.0." + (i + start);
}
assertArrayEquals(expectedIps, actualIps);
}
@Test
public void testGetCountByClient() {
createSessions();
assertEquals(3, session.sessions().getActiveUserSessions(realm, realm.getClientByClientId("test-app")));
assertEquals(1, session.sessions().getActiveUserSessions(realm, realm.getClientByClientId("third-party")));
}
@Test
public void loginFailures() {
UserLoginFailureModel failure1 = session.sessions().addUserLoginFailure(realm, "user1");
failure1.incrementFailures();
UserLoginFailureModel failure2 = session.sessions().addUserLoginFailure(realm, "user2");
failure2.incrementFailures();
failure2.incrementFailures();
resetSession();
failure1 = session.sessions().getUserLoginFailure(realm, "user1");
assertEquals(1, failure1.getNumFailures());
failure2 = session.sessions().getUserLoginFailure(realm, "user2");
assertEquals(2, failure2.getNumFailures());
resetSession();
failure1 = session.sessions().getUserLoginFailure(realm, "user1");
failure1.clearFailures();
resetSession();
failure1 = session.sessions().getUserLoginFailure(realm, "user1");
assertEquals(0, failure1.getNumFailures());
session.sessions().removeUserLoginFailure(realm, "user1");
resetSession();
assertNull(session.sessions().getUserLoginFailure(realm, "user1"));
session.sessions().removeAllUserLoginFailures(realm);
resetSession();
assertNull(session.sessions().getUserLoginFailure(realm, "user2"));
}
@Test
public void testOnUserRemoved() {
createSessions();
session.sessions().addUserLoginFailure(realm, "user1");
session.sessions().addUserLoginFailure(realm, "user1@localhost");
session.sessions().addUserLoginFailure(realm, "user2");
resetSession();
session.sessions().onUserRemoved(realm, session.users().getUserByUsername("user1", realm));
resetSession();
assertTrue(session.sessions().getUserSessions(realm, session.users().getUserByUsername("user1", realm)).isEmpty());
assertFalse(session.sessions().getUserSessions(realm, session.users().getUserByUsername("user2", realm)).isEmpty());
assertNull(session.sessions().getUserLoginFailure(realm, "user1"));
assertNull(session.sessions().getUserLoginFailure(realm, "user1@localhost"));
assertNotNull(session.sessions().getUserLoginFailure(realm, "user2"));
}
private ClientSessionModel createClientSession(ClientModel client, UserSessionModel userSession, String redirect, String state, Set<String> roles, Set<String> protocolMappers) {
ClientSessionModel clientSession = session.sessions().createClientSession(realm, client);
if (userSession != null) clientSession.setUserSession(userSession);
clientSession.setRedirectUri(redirect);
if (state != null) clientSession.setNote(OIDCLoginProtocol.STATE_PARAM, state);
if (roles != null) clientSession.setRoles(roles);
if (protocolMappers != null) clientSession.setProtocolMappers(protocolMappers);
return clientSession;
}
private UserSessionModel[] createSessions() {
UserSessionModel[] sessions = new UserSessionModel[3];
sessions[0] = session.sessions().createUserSession(realm, session.users().getUserByUsername("user1", realm), "user1", "127.0.0.1", "form", true, null, null);
Set<String> roles = new HashSet<String>();
roles.add("one");
roles.add("two");
Set<String> protocolMappers = new HashSet<String>();
protocolMappers.add("mapper-one");
protocolMappers.add("mapper-two");
createClientSession(realm.getClientByClientId("test-app"), sessions[0], "http://redirect", "state", roles, protocolMappers);
createClientSession(realm.getClientByClientId("third-party"), sessions[0], "http://redirect", "state", new HashSet<String>(), new HashSet<String>());
sessions[1] = session.sessions().createUserSession(realm, session.users().getUserByUsername("user1", realm), "user1", "127.0.0.2", "form", true, null, null);
createClientSession(realm.getClientByClientId("test-app"), sessions[1], "http://redirect", "state", new HashSet<String>(), new HashSet<String>());
sessions[2] = session.sessions().createUserSession(realm, session.users().getUserByUsername("user2", realm), "user2", "127.0.0.3", "form", true, null, null);
createClientSession(realm.getClientByClientId("test-app"), sessions[2], "http://redirect", "state", new HashSet<String>(), new HashSet<String>());
resetSession();
return sessions;
}
private void resetSession() {
kc.stopSession(session, true);
session = kc.startSession();
realm = session.realms().getRealm("test");
}
public static void assertSessions(List<UserSessionModel> actualSessions, UserSessionModel... expectedSessions) {
String[] expected = new String[expectedSessions.length];
for (int i = 0; i < expected.length; i++) {
expected[i] = expectedSessions[i].getId();
}
String[] actual = new String[actualSessions.size()];
for (int i = 0; i < actual.length; i++) {
actual[i] = actualSessions.get(i).getId();
}
Arrays.sort(expected);
Arrays.sort(actual);
assertArrayEquals(expected, actual);
}
public static void assertSession(UserSessionModel session, UserModel user, String ipAddress, int started, int lastRefresh, String... clients) {
assertEquals(user.getId(), session.getUser().getId());
assertEquals(ipAddress, session.getIpAddress());
assertEquals(user.getUsername(), session.getLoginUsername());
assertEquals("form", session.getAuthMethod());
assertEquals(true, session.isRememberMe());
assertTrue(session.getStarted() >= started - 1 && session.getStarted() <= started + 1);
assertTrue(session.getLastSessionRefresh() >= lastRefresh - 1 && session.getLastSessionRefresh() <= lastRefresh + 1);
String[] actualClients = new String[session.getClientSessions().size()];
for (int i = 0; i < actualClients.length; i++) {
actualClients[i] = session.getClientSessions().get(i).getClient().getClientId();
}
Arrays.sort(clients);
Arrays.sort(actualClients);
assertArrayEquals(clients, actualClients);
}
}
| iperdomo/keycloak | testsuite/integration/src/test/java/org/keycloak/testsuite/model/UserSessionProviderTest.java | Java | apache-2.0 | 25,703 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.sources
import org.apache.spark.annotation.{Evolving, Stable}
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.parseColumnPath
////////////////////////////////////////////////////////////////////////////////////////////////////
// This file defines all the filters that we can push down to the data sources.
////////////////////////////////////////////////////////////////////////////////////////////////////
/**
* A filter predicate for data sources. Mapping between Spark SQL types and filter value
* types follow the convention for return type of [[org.apache.spark.sql.Row#get(int)]].
*
* @since 1.3.0
*/
@Stable
sealed abstract class Filter {
/**
* List of columns that are referenced by this filter.
*
* Note that, each element in `references` represents a column; `dots` are used as separators
* for nested columns. If any part of the names contains `dots`, it is quoted to avoid confusion.
*
* @since 2.1.0
*/
def references: Array[String]
protected def findReferences(value: Any): Array[String] = value match {
case f: Filter => f.references
case _ => Array.empty
}
/**
* List of columns that are referenced by this filter.
*
* @return each element is a column name as an array of string multi-identifier
* @since 3.0.0
*/
def v2references: Array[Array[String]] = {
this.references.map(parseColumnPath(_).toArray)
}
/**
* If any of the references of this filter contains nested column
*/
private[sql] def containsNestedColumn: Boolean = {
this.v2references.exists(_.length > 1)
}
}
/**
* A filter that evaluates to `true` iff the column evaluates to a value
* equal to `value`.
*
* @param attribute of the column to be evaluated; `dots` are used as separators
* for nested columns. If any part of the names contains `dots`,
* it is quoted to avoid confusion.
* @since 1.3.0
*/
@Stable
case class EqualTo(attribute: String, value: Any) extends Filter {
override def references: Array[String] = Array(attribute) ++ findReferences(value)
}
/**
* Performs equality comparison, similar to [[EqualTo]]. However, this differs from [[EqualTo]]
* in that it returns `true` (rather than NULL) if both inputs are NULL, and `false`
* (rather than NULL) if one of the input is NULL and the other is not NULL.
*
* @param attribute of the column to be evaluated; `dots` are used as separators
* for nested columns. If any part of the names contains `dots`,
* it is quoted to avoid confusion.
* @since 1.5.0
*/
@Stable
case class EqualNullSafe(attribute: String, value: Any) extends Filter {
override def references: Array[String] = Array(attribute) ++ findReferences(value)
}
/**
* A filter that evaluates to `true` iff the attribute evaluates to a value
* greater than `value`.
*
* @param attribute of the column to be evaluated; `dots` are used as separators
* for nested columns. If any part of the names contains `dots`,
* it is quoted to avoid confusion.
* @since 1.3.0
*/
@Stable
case class GreaterThan(attribute: String, value: Any) extends Filter {
override def references: Array[String] = Array(attribute) ++ findReferences(value)
}
/**
* A filter that evaluates to `true` iff the attribute evaluates to a value
* greater than or equal to `value`.
*
* @param attribute of the column to be evaluated; `dots` are used as separators
* for nested columns. If any part of the names contains `dots`,
* it is quoted to avoid confusion.
* @since 1.3.0
*/
@Stable
case class GreaterThanOrEqual(attribute: String, value: Any) extends Filter {
override def references: Array[String] = Array(attribute) ++ findReferences(value)
}
/**
* A filter that evaluates to `true` iff the attribute evaluates to a value
* less than `value`.
*
* @param attribute of the column to be evaluated; `dots` are used as separators
* for nested columns. If any part of the names contains `dots`,
* it is quoted to avoid confusion.
* @since 1.3.0
*/
@Stable
case class LessThan(attribute: String, value: Any) extends Filter {
override def references: Array[String] = Array(attribute) ++ findReferences(value)
}
/**
* A filter that evaluates to `true` iff the attribute evaluates to a value
* less than or equal to `value`.
*
* @param attribute of the column to be evaluated; `dots` are used as separators
* for nested columns. If any part of the names contains `dots`,
* it is quoted to avoid confusion.
* @since 1.3.0
*/
@Stable
case class LessThanOrEqual(attribute: String, value: Any) extends Filter {
override def references: Array[String] = Array(attribute) ++ findReferences(value)
}
/**
* A filter that evaluates to `true` iff the attribute evaluates to one of the values in the array.
*
* @param attribute of the column to be evaluated; `dots` are used as separators
* for nested columns. If any part of the names contains `dots`,
* it is quoted to avoid confusion.
* @since 1.3.0
*/
@Stable
case class In(attribute: String, values: Array[Any]) extends Filter {
override def hashCode(): Int = {
var h = attribute.hashCode
values.foreach { v =>
h *= 41
h += (if (v != null) v.hashCode() else 0)
}
h
}
override def equals(o: Any): Boolean = o match {
case In(a, vs) =>
a == attribute && vs.length == values.length && vs.zip(values).forall(x => x._1 == x._2)
case _ => false
}
override def toString: String = {
s"In($attribute, [${values.mkString(",")}])"
}
override def references: Array[String] = Array(attribute) ++ values.flatMap(findReferences)
}
/**
* A filter that evaluates to `true` iff the attribute evaluates to null.
*
* @param attribute of the column to be evaluated; `dots` are used as separators
* for nested columns. If any part of the names contains `dots`,
* it is quoted to avoid confusion.
* @since 1.3.0
*/
@Stable
case class IsNull(attribute: String) extends Filter {
override def references: Array[String] = Array(attribute)
}
/**
* A filter that evaluates to `true` iff the attribute evaluates to a non-null value.
*
* @param attribute of the column to be evaluated; `dots` are used as separators
* for nested columns. If any part of the names contains `dots`,
* it is quoted to avoid confusion.
* @since 1.3.0
*/
@Stable
case class IsNotNull(attribute: String) extends Filter {
override def references: Array[String] = Array(attribute)
}
/**
* A filter that evaluates to `true` iff both `left` or `right` evaluate to `true`.
*
* @since 1.3.0
*/
@Stable
case class And(left: Filter, right: Filter) extends Filter {
override def references: Array[String] = left.references ++ right.references
}
/**
* A filter that evaluates to `true` iff at least one of `left` or `right` evaluates to `true`.
*
* @since 1.3.0
*/
@Stable
case class Or(left: Filter, right: Filter) extends Filter {
override def references: Array[String] = left.references ++ right.references
}
/**
* A filter that evaluates to `true` iff `child` is evaluated to `false`.
*
* @since 1.3.0
*/
@Stable
case class Not(child: Filter) extends Filter {
override def references: Array[String] = child.references
}
/**
* A filter that evaluates to `true` iff the attribute evaluates to
* a string that starts with `value`.
*
* @param attribute of the column to be evaluated; `dots` are used as separators
* for nested columns. If any part of the names contains `dots`,
* it is quoted to avoid confusion.
* @since 1.3.1
*/
@Stable
case class StringStartsWith(attribute: String, value: String) extends Filter {
override def references: Array[String] = Array(attribute)
}
/**
* A filter that evaluates to `true` iff the attribute evaluates to
* a string that ends with `value`.
*
* @param attribute of the column to be evaluated; `dots` are used as separators
* for nested columns. If any part of the names contains `dots`,
* it is quoted to avoid confusion.
* @since 1.3.1
*/
@Stable
case class StringEndsWith(attribute: String, value: String) extends Filter {
override def references: Array[String] = Array(attribute)
}
/**
* A filter that evaluates to `true` iff the attribute evaluates to
* a string that contains the string `value`.
*
* @param attribute of the column to be evaluated; `dots` are used as separators
* for nested columns. If any part of the names contains `dots`,
* it is quoted to avoid confusion.
* @since 1.3.1
*/
@Stable
case class StringContains(attribute: String, value: String) extends Filter {
override def references: Array[String] = Array(attribute)
}
/**
* A filter that always evaluates to `true`.
*
* @since 3.0.0
*/
@Evolving
case class AlwaysTrue() extends Filter {
override def references: Array[String] = Array.empty
}
@Evolving
object AlwaysTrue extends AlwaysTrue {
}
/**
* A filter that always evaluates to `false`.
*
* @since 3.0.0
*/
@Evolving
case class AlwaysFalse() extends Filter {
override def references: Array[String] = Array.empty
}
@Evolving
object AlwaysFalse extends AlwaysFalse {
}
| witgo/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/sources/filters.scala | Scala | apache-2.0 | 10,259 |
/*
* Copyright 2011 <a href="mailto:lincolnbaxter@gmail.com">Lincoln Baxter, III</a>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ocpsoft.rewrite.showcase.bookstore.web.list;
import java.util.List;
import javax.ejb.EJB;
import javax.enterprise.context.RequestScoped;
import javax.inject.Named;
import org.ocpsoft.rewrite.annotation.Join;
import org.ocpsoft.rewrite.annotation.Parameter;
import org.ocpsoft.rewrite.annotation.RequestAction;
import org.ocpsoft.rewrite.faces.annotation.Deferred;
import org.ocpsoft.rewrite.showcase.bookstore.dao.BookDao;
import org.ocpsoft.rewrite.showcase.bookstore.dao.CategoryDao;
import org.ocpsoft.rewrite.showcase.bookstore.model.Book;
import org.ocpsoft.rewrite.showcase.bookstore.model.Category;
import org.ocpsoft.rewrite.showcase.bookstore.web.utils.ResponseUtils;
@Named
@RequestScoped
@Join(path = "/category/{seoKey}", to = "/faces/category.xhtml")
public class CategoryBean
{
@Parameter
private String seoKey;
@EJB
private CategoryDao categoryDao;
@EJB
private BookDao bookDao;
private List<Book> books;
@RequestAction
@Deferred
public String loadData()
{
Category category = categoryDao.getBySeoKey(seoKey);
if (category == null) {
ResponseUtils.sendError(404);
return null;
}
books = bookDao.findByCategory(category);
return null;
}
public List<Book> getBooks()
{
return books;
}
public String getSeoKey()
{
return seoKey;
}
public void setSeoKey(String seoKey)
{
this.seoKey = seoKey;
}
}
| chkal/rewrite | showcase/bookstore/src/main/java/org/ocpsoft/rewrite/showcase/bookstore/web/list/CategoryBean.java | Java | apache-2.0 | 2,115 |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/ssm/model/ModifyDocumentPermissionRequest.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::SSM::Model;
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
ModifyDocumentPermissionRequest::ModifyDocumentPermissionRequest() :
m_nameHasBeenSet(false),
m_permissionType(DocumentPermissionType::NOT_SET),
m_permissionTypeHasBeenSet(false),
m_accountIdsToAddHasBeenSet(false),
m_accountIdsToRemoveHasBeenSet(false),
m_sharedDocumentVersionHasBeenSet(false)
{
}
Aws::String ModifyDocumentPermissionRequest::SerializePayload() const
{
JsonValue payload;
if(m_nameHasBeenSet)
{
payload.WithString("Name", m_name);
}
if(m_permissionTypeHasBeenSet)
{
payload.WithString("PermissionType", DocumentPermissionTypeMapper::GetNameForDocumentPermissionType(m_permissionType));
}
if(m_accountIdsToAddHasBeenSet)
{
Array<JsonValue> accountIdsToAddJsonList(m_accountIdsToAdd.size());
for(unsigned accountIdsToAddIndex = 0; accountIdsToAddIndex < accountIdsToAddJsonList.GetLength(); ++accountIdsToAddIndex)
{
accountIdsToAddJsonList[accountIdsToAddIndex].AsString(m_accountIdsToAdd[accountIdsToAddIndex]);
}
payload.WithArray("AccountIdsToAdd", std::move(accountIdsToAddJsonList));
}
if(m_accountIdsToRemoveHasBeenSet)
{
Array<JsonValue> accountIdsToRemoveJsonList(m_accountIdsToRemove.size());
for(unsigned accountIdsToRemoveIndex = 0; accountIdsToRemoveIndex < accountIdsToRemoveJsonList.GetLength(); ++accountIdsToRemoveIndex)
{
accountIdsToRemoveJsonList[accountIdsToRemoveIndex].AsString(m_accountIdsToRemove[accountIdsToRemoveIndex]);
}
payload.WithArray("AccountIdsToRemove", std::move(accountIdsToRemoveJsonList));
}
if(m_sharedDocumentVersionHasBeenSet)
{
payload.WithString("SharedDocumentVersion", m_sharedDocumentVersion);
}
return payload.View().WriteReadable();
}
Aws::Http::HeaderValueCollection ModifyDocumentPermissionRequest::GetRequestSpecificHeaders() const
{
Aws::Http::HeaderValueCollection headers;
headers.insert(Aws::Http::HeaderValuePair("X-Amz-Target", "AmazonSSM.ModifyDocumentPermission"));
return headers;
}
| awslabs/aws-sdk-cpp | aws-cpp-sdk-ssm/source/model/ModifyDocumentPermissionRequest.cpp | C++ | apache-2.0 | 2,340 |
/**
* Copyright 2015 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package wherehows.common.schemas;
import java.util.List;
public class DatasetInventoryPropertiesRecord extends AbstractRecord {
DatasetChangeAuditStamp changeAuditStamp;
String nativeType;
String uri;
DatasetCaseSensitiveRecord caseSensitivity;
public DatasetInventoryPropertiesRecord() {
}
@Override
public String[] getDbColumnNames() {
return new String[]{"change_audit_stamp", "native_type", "uri", "case_sensitivity"};
}
@Override
public List<Object> fillAllFields() {
return null;
}
public DatasetChangeAuditStamp getChangeAuditStamp() {
return changeAuditStamp;
}
public void setChangeAuditStamp(DatasetChangeAuditStamp changeAuditStamp) {
this.changeAuditStamp = changeAuditStamp;
}
public String getNativeType() {
return nativeType;
}
public void setNativeType(String nativeType) {
this.nativeType = nativeType;
}
public String getUri() {
return uri;
}
public void setUri(String uri) {
this.uri = uri;
}
public DatasetCaseSensitiveRecord getCaseSensitivity() {
return caseSensitivity;
}
public void setCaseSensitivity(DatasetCaseSensitiveRecord caseSensitivity) {
this.caseSensitivity = caseSensitivity;
}
}
| alyiwang/WhereHows | wherehows-common/src/main/java/wherehows/common/schemas/DatasetInventoryPropertiesRecord.java | Java | apache-2.0 | 1,738 |
#!/usr/bin/env php
<?php
$root = dirname(dirname(dirname(__FILE__)));
require_once $root.'/scripts/__init_script__.php';
if (ctags_check_executable() == false) {
echo phutil_console_format(
"Could not find Exuberant ctags. Make sure it is installed and\n".
"available in executable path.\n\n".
"Exuberant ctags project page: http://ctags.sourceforge.net/\n");
exit(1);
}
if ($argc !== 1 || posix_isatty(STDIN)) {
echo phutil_console_format(
"usage: find . -type f -name '*.py' | ./generate_ctags_symbols.php\n");
exit(1);
}
$input = file_get_contents('php://stdin');
$input = trim($input);
$input = explode("\n", $input);
$data = array();
$futures = array();
foreach ($input as $file) {
$file = Filesystem::readablePath($file);
$futures[$file] = ctags_get_parser_future($file);
}
$futures = id(new FutureIterator($futures))
->limit(8);
foreach ($futures as $file => $future) {
$tags = $future->resolve();
$tags = explode("\n", $tags[1]);
foreach ($tags as $tag) {
$parts = explode(';', $tag);
// skip lines that we can not parse
if (count($parts) < 2) {
continue;
}
// split ctags information
$tag_info = explode("\t", $parts[0]);
// split exuberant ctags "extension fields" (additional information)
$parts[1] = trim($parts[1], "\t \"");
$extension_fields = explode("\t", $parts[1]);
// skip lines that we can not parse
if (count($tag_info) < 3 || count($extension_fields) < 2) {
continue;
}
// default $context to empty
$extension_fields[] = '';
list($token, $file_path, $line_num) = $tag_info;
list($type, $language, $context) = $extension_fields;
// skip lines with tokens containing a space
if (strpos($token, ' ') !== false) {
continue;
}
// strip "language:"
$language = substr($language, 9);
// To keep consistent with "Separate with commas, for example: php, py"
// in Arcanist Project edit form.
$language = str_ireplace('python', 'py', $language);
// also, "normalize" c++ and c#
$language = str_ireplace('c++', 'cpp', $language);
$language = str_ireplace('c#', 'cs', $language);
// Ruby has "singleton method", for example
$type = substr(str_replace(' ', '_', $type), 0, 12);
// class:foo, struct:foo, union:foo, enum:foo, ...
$context = last(explode(':', $context, 2));
$ignore = array(
'variable' => true,
);
if (empty($ignore[$type])) {
print_symbol($file_path, $line_num, $type, $token, $context, $language);
}
}
}
function ctags_get_parser_future($file_path) {
$future = new ExecFuture('ctags -n --fields=Kls -o - %s',
$file_path);
return $future;
}
function ctags_check_executable() {
$future = new ExecFuture('ctags --version');
$result = $future->resolve();
if (empty($result[1])) {
return false;
}
return true;
}
function print_symbol($file, $line_num, $type, $token, $context, $language) {
// get rid of relative path
$file = explode('/', $file);
if ($file[0] == '.' || $file[0] == '..') {
array_shift($file);
}
$file = '/'.implode('/', $file);
$parts = array(
$context,
$token,
$type,
strtolower($language),
$line_num,
$file,
);
echo implode(' ', $parts)."\n";
}
| hach-que/unearth-phabricator | scripts/symbols/generate_ctags_symbols.php | PHP | apache-2.0 | 3,305 |
/*
* Copyright 2015 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
goog.module('$jscomp_object_test');
goog.setTestOnly();
const jsunit = goog.require('goog.testing.jsunit');
const testSuite = goog.require('goog.testing.testSuite');
const testing = goog.require('testing');
const assertDeepEquals = testing.assertDeepEquals;
testSuite({
testAssertDeepEquals() {
// Quick sanity check, since we don't unit test assertDeepEquals
assertDeepEquals({a: 4}, {a: 4});
assertThrowsJsUnitException(() => assertDeepEquals({}, {a: 4}));
assertThrowsJsUnitException(() => assertDeepEquals({a: 4}, {}));
},
testAssign_simple() {
const obj = {a: 2, z: 3};
assertEquals(obj, Object.assign(obj, {a: 4, b: 5}, null, {c: 6, b: 7}));
assertDeepEquals({a: 4, b: 7, c: 6, z: 3}, obj);
},
testAssign_skipsPrototypeProperties() {
if (!Object.create) return;
const proto = {a: 4, b: 5};
const from = Object.create(proto);
from.a = 6;
from.c = 7;
assertDeepEquals({a: 6, c: 7}, Object.assign({}, from));
assertDeepEquals({a: 6, b: 1, c: 7}, Object.assign({b: 1}, from));
},
testAssign_skipsNonEnumerableProperties() {
const from = {'b': 23};
try {
Object.defineProperty(from, 'a', {enumerable: false, value: 42});
} catch (err) {
return; // Object.defineProperty in IE8 test harness exists, always fails
}
assertDeepEquals({'b': 23}, Object.assign({}, from));
assertDeepEquals({'a': 1, 'b': 23}, Object.assign({'a': 1}, from));
},
testIs() {
assertTrue(Object.is(4, 4));
assertTrue(Object.is(0, 0));
assertTrue(Object.is('4', '4'));
assertTrue(Object.is('', ''));
assertTrue(Object.is(true, true));
assertTrue(Object.is(false, false));
assertTrue(Object.is(null, null));
assertTrue(Object.is(undefined, undefined));
assertTrue(Object.is(NaN, NaN));
const obj = {};
assertTrue(Object.is(obj, obj));
assertFalse(Object.is(0, -0));
assertFalse(Object.is({}, {}));
assertFalse(Object.is(4, '4'));
assertFalse(Object.is(null, void 0));
assertFalse(Object.is(1, true));
assertFalse(Object.is(0, false));
assertFalse(Object.is('', false));
}
});
| superkonduktr/closure-compiler | test/com/google/javascript/jscomp/js/es6/object_test.js | JavaScript | apache-2.0 | 2,754 |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.dmn.client.commands.expressions.types.function.supplementary;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import org.kie.workbench.common.dmn.api.definition.v1_1.Context;
import org.kie.workbench.common.dmn.api.definition.v1_1.ContextEntry;
import org.kie.workbench.common.dmn.client.commands.VetoExecutionCommand;
import org.kie.workbench.common.dmn.client.commands.VetoUndoCommand;
import org.kie.workbench.common.dmn.client.commands.util.CommandUtils;
import org.kie.workbench.common.dmn.client.widgets.grid.model.DMNGridData;
import org.kie.workbench.common.stunner.core.client.canvas.AbstractCanvasHandler;
import org.kie.workbench.common.stunner.core.client.canvas.command.AbstractCanvasCommand;
import org.kie.workbench.common.stunner.core.client.canvas.command.AbstractCanvasGraphCommand;
import org.kie.workbench.common.stunner.core.client.command.CanvasCommandResultBuilder;
import org.kie.workbench.common.stunner.core.client.command.CanvasViolation;
import org.kie.workbench.common.stunner.core.command.Command;
import org.kie.workbench.common.stunner.core.command.CommandResult;
import org.kie.workbench.common.stunner.core.graph.command.GraphCommandExecutionContext;
import org.kie.workbench.common.stunner.core.graph.command.GraphCommandResultBuilder;
import org.kie.workbench.common.stunner.core.graph.command.impl.AbstractGraphCommand;
import org.kie.workbench.common.stunner.core.rule.RuleViolation;
import org.uberfire.ext.wires.core.grids.client.model.GridRow;
public class MoveRowsCommand extends AbstractCanvasGraphCommand implements VetoExecutionCommand,
VetoUndoCommand {
private final Context context;
private final DMNGridData uiModel;
private final int index;
private final List<GridRow> rows;
private final org.uberfire.mvp.Command canvasOperation;
private final int oldIndex;
public MoveRowsCommand(final Context context,
final DMNGridData uiModel,
final int index,
final List<GridRow> rows,
final org.uberfire.mvp.Command canvasOperation) {
this.context = context;
this.uiModel = uiModel;
this.index = index;
this.rows = new ArrayList<>(rows);
this.canvasOperation = canvasOperation;
this.oldIndex = uiModel.getRows().indexOf(rows.get(0));
}
@Override
protected Command<GraphCommandExecutionContext, RuleViolation> newGraphCommand(final AbstractCanvasHandler ach) {
return new AbstractGraphCommand() {
@Override
protected CommandResult<RuleViolation> check(final GraphCommandExecutionContext gcec) {
return GraphCommandResultBuilder.SUCCESS;
}
@Override
public CommandResult<RuleViolation> execute(final GraphCommandExecutionContext gcec) {
moveRows(index);
return GraphCommandResultBuilder.SUCCESS;
}
@Override
public CommandResult<RuleViolation> undo(final GraphCommandExecutionContext gcec) {
moveRows(oldIndex);
return GraphCommandResultBuilder.SUCCESS;
}
private void moveRows(final int index) {
final List<ContextEntry> rowsToMove = rows
.stream()
.map(r -> uiModel.getRows().indexOf(r))
.map(i -> context.getContextEntry().get(i))
.collect(Collectors.toList());
final List<ContextEntry> rows = context.getContextEntry();
CommandUtils.moveRows(rows,
rowsToMove,
index);
}
};
}
@Override
protected Command<AbstractCanvasHandler, CanvasViolation> newCanvasCommand(final AbstractCanvasHandler ach) {
return new AbstractCanvasCommand() {
@Override
public CommandResult<CanvasViolation> allow(AbstractCanvasHandler context) {
if (index == uiModel.getRowCount() - 1) {
return CanvasCommandResultBuilder.FAILED;
}
return CanvasCommandResultBuilder.SUCCESS;
}
@Override
public CommandResult<CanvasViolation> execute(final AbstractCanvasHandler ach) {
uiModel.moveRowsTo(index,
rows);
updateRowNumbers();
updateParentInformation();
canvasOperation.execute();
return CanvasCommandResultBuilder.SUCCESS;
}
@Override
public CommandResult<CanvasViolation> undo(final AbstractCanvasHandler ach) {
uiModel.moveRowsTo(oldIndex,
rows);
updateRowNumbers();
updateParentInformation();
canvasOperation.execute();
return CanvasCommandResultBuilder.SUCCESS;
}
};
}
public void updateRowNumbers() {
CommandUtils.updateRowNumbers(uiModel,
IntStream.range(0,
uiModel.getRowCount()));
}
public void updateParentInformation() {
CommandUtils.updateParentInformation(uiModel);
}
}
| jhrcek/kie-wb-common | kie-wb-common-dmn/kie-wb-common-dmn-client/src/main/java/org/kie/workbench/common/dmn/client/commands/expressions/types/function/supplementary/MoveRowsCommand.java | Java | apache-2.0 | 6,228 |
//---------------------------------------------------------------------------
#ifndef du_boxH
#define du_boxH
//---------------------------------------------------------------------------
#define DU_BOX_NUMVERTEX 8
#define DU_BOX_NUMFACES 12
#define DU_BOX_NUMLINES 12
#define DU_BOX_NUMVERTEX2 36
extern ECORE_API Fvector du_box_vertices[];
extern ECORE_API WORD du_box_faces[];
extern ECORE_API WORD du_box_lines[];
extern ECORE_API Fvector du_box_vertices2[];
#endif
| OLR-xray/OLR-3.0 | src/xray/xr_3da/du_box.h | C | apache-2.0 | 475 |
package org.zstack.network.l3;
import org.zstack.core.GlobalProperty;
import org.zstack.core.GlobalPropertyDefinition;
/**
*/
@GlobalPropertyDefinition
public class NetworkGlobalProperty {
@GlobalProperty(name = "skip.ipv6", defaultValue = "false")
public static boolean SKIP_IPV6;
@GlobalProperty(name = "chssis.asset.tag", defaultValue = "www.zstack.io")
public static String CHASSIS_ASSET_TAG;
@GlobalProperty(name = "bridge.disable.iptables", defaultValue = "false")
public static boolean BRIDGE_DISABLE_IPTABLES;
}
| zstackio/zstack | network/src/main/java/org/zstack/network/l3/NetworkGlobalProperty.java | Java | apache-2.0 | 549 |
/*##########################################################################
_##
_## $Id$
_##
_##########################################################################*/
package jrds.probe.snmp;
import java.util.Set;
import org.snmp4j.smi.OID;
/**
* This probe is used to do simple mapping from oid to datastore
* @author Fabrice Bacchella
* @version $Revision$, $Date$
*/
public class RdsSnmpSimple extends SnmpProbe {
/* (non-Javadoc)
* @see com.aol.jrds.snmp.SnmpPreparator#makeOidSet()
*/
public Set<OID> getOidSet() {
return getOidNameMap().keySet();
}
}
| springlin2012/Mycat-Web | src/main/java/jrds/probe/snmp/RdsSnmpSimple.java | Java | apache-2.0 | 592 |
export { default } from 'ember-osf/components/search-facet-source/component';
| binoculars/ember-osf | app/components/search-facet-source/component.js | JavaScript | apache-2.0 | 78 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.routing.allocation.decider;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
/**
* An allocation decider that prevents multiple instances of the same shard to
* be allocated on the same <tt>node</tt>.
*
* The {@link #CLUSTER_ROUTING_ALLOCATION_SAME_HOST_SETTING} setting allows to perform a check to prevent
* allocation of multiple instances of the same shard on a single <tt>host</tt>,
* based on host name and host address. Defaults to `false`, meaning that no
* check is performed by default.
*
* <p>
* Note: this setting only applies if multiple nodes are started on the same
* <tt>host</tt>. Allocations of multiple copies of the same shard on the same
* <tt>node</tt> are not allowed independently of this setting.
* </p>
*/
public class SameShardAllocationDecider extends AllocationDecider {
public static final String NAME = "same_shard";
public static final Setting<Boolean> CLUSTER_ROUTING_ALLOCATION_SAME_HOST_SETTING =
Setting.boolSetting("cluster.routing.allocation.same_shard.host", false, Setting.Property.NodeScope);
private final boolean sameHost;
public SameShardAllocationDecider(Settings settings) {
super(settings);
this.sameHost = CLUSTER_ROUTING_ALLOCATION_SAME_HOST_SETTING.get(settings);
}
@Override
public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
Iterable<ShardRouting> assignedShards = allocation.routingNodes().assignedShards(shardRouting.shardId());
Decision decision = decideSameNode(shardRouting, node, allocation, assignedShards);
if (decision.type() == Decision.Type.NO || sameHost == false) {
// if its already a NO decision looking at the node, or we aren't configured to look at the host, return the decision
return decision;
}
if (node.node() != null) {
for (RoutingNode checkNode : allocation.routingNodes()) {
if (checkNode.node() == null) {
continue;
}
// check if its on the same host as the one we want to allocate to
boolean checkNodeOnSameHostName = false;
boolean checkNodeOnSameHostAddress = false;
if (Strings.hasLength(checkNode.node().getHostAddress()) && Strings.hasLength(node.node().getHostAddress())) {
if (checkNode.node().getHostAddress().equals(node.node().getHostAddress())) {
checkNodeOnSameHostAddress = true;
}
} else if (Strings.hasLength(checkNode.node().getHostName()) && Strings.hasLength(node.node().getHostName())) {
if (checkNode.node().getHostName().equals(node.node().getHostName())) {
checkNodeOnSameHostName = true;
}
}
if (checkNodeOnSameHostAddress || checkNodeOnSameHostName) {
for (ShardRouting assignedShard : assignedShards) {
if (checkNode.nodeId().equals(assignedShard.currentNodeId())) {
String hostType = checkNodeOnSameHostAddress ? "address" : "name";
String host = checkNodeOnSameHostAddress ? node.node().getHostAddress() : node.node().getHostName();
return allocation.decision(Decision.NO, NAME,
"the shard cannot be allocated on host %s [%s], where it already exists on node [%s]; " +
"set [%s] to false to allow multiple nodes on the same host to hold the same shard copies",
hostType, host, node.nodeId(), CLUSTER_ROUTING_ALLOCATION_SAME_HOST_SETTING.getKey());
}
}
}
}
}
return allocation.decision(Decision.YES, NAME, "the shard does not exist on the same host");
}
@Override
public Decision canForceAllocatePrimary(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) {
assert shardRouting.primary() : "must not call force allocate on a non-primary shard";
Iterable<ShardRouting> assignedShards = allocation.routingNodes().assignedShards(shardRouting.shardId());
return decideSameNode(shardRouting, node, allocation, assignedShards);
}
private Decision decideSameNode(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation,
Iterable<ShardRouting> assignedShards) {
for (ShardRouting assignedShard : assignedShards) {
if (node.nodeId().equals(assignedShard.currentNodeId())) {
if (assignedShard.isSameAllocation(shardRouting)) {
return allocation.decision(Decision.NO, NAME,
"the shard cannot be allocated to the node on which it already exists [%s]",
shardRouting.toString());
} else {
return allocation.decision(Decision.NO, NAME,
"the shard cannot be allocated to the same node on which a copy of the shard already exists [%s]",
assignedShard.toString());
}
}
}
return allocation.decision(Decision.YES, NAME, "the shard does not exist on the same node");
}
}
| MaineC/elasticsearch | core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SameShardAllocationDecider.java | Java | apache-2.0 | 6,524 |
/**
* Copyright 2015 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as dom from '../../src/dom';
import {BaseElement} from '../../src/base-element';
import {createAmpElementForTesting} from '../../src/custom-element';
import {loadPromise} from '../../src/event-helper';
import {toArray} from '../../src/types';
describes.sandboxed('DOM', {}, env => {
let sandbox;
beforeEach(() => {
sandbox = env.sandbox;
});
afterEach(() => {
dom.setScopeSelectorSupportedForTesting(undefined);
sandbox.restore();
});
it('should remove all children', () => {
const element = document.createElement('div');
element.appendChild(document.createElement('div'));
element.appendChild(document.createTextNode('ABC'));
expect(element.children.length).to.equal(1);
expect(element.firstChild).to.not.equal(null);
expect(element.textContent).to.equal('ABC');
dom.removeChildren(element);
expect(element.children.length).to.equal(0);
expect(element.firstChild).to.equal(null);
expect(element.textContent).to.equal('');
});
it('should copy all children', () => {
const element = document.createElement('div');
element.appendChild(document.createElement('div'));
element.appendChild(document.createTextNode('ABC'));
const other = document.createElement('div');
dom.copyChildren(element, other);
expect(element.children.length).to.equal(1);
expect(element.firstChild).to.not.equal(null);
expect(element.textContent).to.equal('ABC');
expect(other.children.length).to.equal(1);
expect(other.firstChild).to.not.equal(null);
expect(other.firstChild.tagName).to.equal('DIV');
expect(other.textContent).to.equal('ABC');
});
it('isConnectedNode', () => {
expect(dom.isConnectedNode(document)).to.be.true;
const a = document.createElement('div');
expect(dom.isConnectedNode(a)).to.be.false;
const b = document.createElement('div');
b.appendChild(a);
document.body.appendChild(b);
expect(dom.isConnectedNode(a)).to.be.true;
const shadow = a.attachShadow({mode: 'open'});
const c = document.createElement('div');
shadow.appendChild(c);
expect(dom.isConnectedNode(c)).to.be.true;
document.body.removeChild(b);
expect(dom.isConnectedNode(c)).to.be.false;
});
it('isConnectedNode (no Node.p.isConnected)', () => {
if (!Object.hasOwnProperty.call(Node.prototype, 'isConnected')) {
return;
}
const desc = Object.getOwnPropertyDescriptor(Node.prototype,
'isConnected');
try {
delete Node.prototype.isConnected;
expect(dom.isConnectedNode(document)).to.be.true;
const a = document.createElement('div');
expect(dom.isConnectedNode(a)).to.be.false;
const b = document.createElement('div');
b.appendChild(a);
document.body.appendChild(b);
expect(dom.isConnectedNode(a)).to.be.true;
const shadow = a.attachShadow({mode: 'open'});
const c = document.createElement('div');
shadow.appendChild(c);
expect(dom.isConnectedNode(c)).to.be.true;
document.body.removeChild(b);
expect(dom.isConnectedNode(c)).to.be.false;
} finally {
Object.defineProperty(Node.prototype, 'isConnected', desc);
}
});
it('rootNodeFor', () => {
const a = document.createElement('div');
expect(dom.rootNodeFor(a)).to.equal(a);
const b = document.createElement('div');
a.appendChild(b);
expect(dom.rootNodeFor(b)).to.equal(a);
const c = document.createElement('div');
b.appendChild(c);
expect(dom.rootNodeFor(c)).to.equal(a);
});
it('rootNodeFor (no Node.p.getRootNode)', () => {
if (!Object.hasOwnProperty.call(Node.prototype, 'getRootNode')) {
return;
}
const desc = Object.getOwnPropertyDescriptor(Node.prototype,
'getRootNode');
try {
delete Node.prototype.getRootNode;
const a = document.createElement('div');
expect(dom.rootNodeFor(a)).to.equal(a);
const b = document.createElement('div');
a.appendChild(b);
expect(dom.rootNodeFor(b)).to.equal(a);
const c = document.createElement('div');
b.appendChild(c);
expect(dom.rootNodeFor(c)).to.equal(a);
} finally {
Object.defineProperty(Node.prototype, 'getRootNode', desc);
}
});
it('closest should find itself', () => {
const element = document.createElement('div');
const child = document.createElement('div');
element.appendChild(child);
expect(dom.closest(child, () => true)).to.equal(child);
expect(dom.closestNode(child, () => true)).to.equal(child);
expect(dom.closestByTag(child, 'div')).to.equal(child);
expect(dom.closestByTag(child, 'DIV')).to.equal(child);
});
it('closest should stop search at opt_stopAt', () => {
const cbSpy = sandbox.spy();
const cb = el => {
cbSpy();
return el.tagName == 'DIV';
};
const element = document.createElement('div');
const child = document.createElement('p');
const grandchild = document.createElement('img');
child.appendChild(grandchild);
element.appendChild(child);
expect(dom.closest(grandchild, cb)).to.equal(element);
expect(cbSpy).to.be.calledThrice;
expect(dom.closest(grandchild, cb, child)).to.be.null;
expect(cbSpy).to.have.callCount(4);
});
it('closest should find first match', () => {
const parent = document.createElement('parent');
const element = document.createElement('element');
parent.appendChild(element);
const child = document.createElement('child');
element.appendChild(child);
expect(dom.closest(child, e => e.tagName == 'CHILD')).to.equal(child);
expect(dom.closestNode(child, e => e.tagName == 'CHILD')).to.equal(child);
expect(dom.closestByTag(child, 'child')).to.equal(child);
expect(dom.closest(child, e => e.tagName == 'ELEMENT')).to.equal(element);
expect(dom.closestNode(child, e => e.tagName == 'ELEMENT'))
.to.equal(element);
expect(dom.closestByTag(child, 'element')).to.equal(element);
expect(dom.closest(child, e => e.tagName == 'PARENT')).to.equal(parent);
expect(dom.closestNode(child, e => e.tagName == 'PARENT')).to.equal(parent);
expect(dom.closestByTag(child, 'parent')).to.equal(parent);
});
it('closestNode should find nodes as well as elements', () => {
const fragment = document.createDocumentFragment();
const element = document.createElement('div');
fragment.appendChild(element);
const text = document.createTextNode('abc');
element.appendChild(text);
expect(dom.closestNode(text, () => true)).to.equal(text);
expect(dom.closestNode(text, n => n.nodeType == 1)).to.equal(element);
expect(dom.closestNode(text, n => n.nodeType == 11)).to.equal(fragment);
});
it('closestBySelector should find first match', () => {
const parent = document.createElement('parent');
parent.className = 'parent';
parent.id = 'parent';
const element = document.createElement('element');
element.id = 'element';
element.className = 'element';
parent.appendChild(element);
const child = document.createElement('child');
child.id = 'child';
child.className = 'child';
element.appendChild(child);
expect(dom.closestBySelector(child, 'child')).to.equal(child);
expect(dom.closestBySelector(child, '.child')).to.equal(child);
expect(dom.closestBySelector(child, '#child')).to.equal(child);
expect(dom.closestBySelector(child, 'element')).to.equal(element);
expect(dom.closestBySelector(child, '.element')).to.equal(element);
expect(dom.closestBySelector(child, '#element')).to.equal(element);
expect(dom.closestBySelector(child, 'parent')).to.equal(parent);
expect(dom.closestBySelector(child, '.parent')).to.equal(parent);
expect(dom.closestBySelector(child, '#parent')).to.equal(parent);
});
it('elementByTag should find first match', () => {
const parent = document.createElement('parent');
const element1 = document.createElement('element');
parent.appendChild(element1);
const element2 = document.createElement('element');
parent.appendChild(element2);
expect(dom.elementByTag(parent, 'element')).to.equal(element1);
expect(dom.elementByTag(parent, 'ELEMENT')).to.equal(element1);
});
it('childElement should find first match', () => {
const parent = document.createElement('parent');
const element1 = document.createElement('element1');
parent.appendChild(element1);
const element2 = document.createElement('element2');
parent.appendChild(element2);
expect(dom.childElement(parent, () => true)).to.equal(element1);
expect(dom.childElement(parent, e => e.tagName == 'ELEMENT1'))
.to.equal(element1);
expect(dom.childElement(parent, e => e.tagName == 'ELEMENT2'))
.to.equal(element2);
expect(dom.childElement(parent, e => e.tagName == 'ELEMENT3'))
.to.be.null;
});
it('childElements should find all matches', () => {
const parent = document.createElement('parent');
const element1 = document.createElement('element1');
parent.appendChild(element1);
const element2 = document.createElement('element2');
parent.appendChild(element2);
expect(dom.childElements(parent, () => true).length).to.equal(2);
expect(dom.childElements(parent, e => e.tagName == 'ELEMENT1').length)
.to.equal(1);
expect(dom.childElements(parent, e => e.tagName == 'ELEMENT2').length)
.to.equal(1);
expect(dom.childElements(parent, e => e.tagName == 'ELEMENT3').length)
.to.be.equal(0);
});
it('childNodes should find all matches', () => {
const parent = document.createElement('parent');
parent.appendChild(document.createTextNode('text1'));
parent.appendChild(document.createTextNode('text2'));
parent.appendChild(document.createElement('element'));
expect(dom.childNodes(parent, () => true).length).to.equal(3);
expect(dom.childNodes(parent, node => node.textContent == 'text1').length)
.to.equal(1);
expect(dom.childNodes(parent, node => node.textContent == 'text2').length)
.to.equal(1);
expect(dom.childNodes(parent, node => node.textContent == 'text3').length)
.to.equal(0);
expect(dom.childNodes(parent, node => node.tagName == 'ELEMENT').length)
.to.equal(1);
expect(dom.childNodes(parent, node => node.tagName == 'ELEMENT2').length)
.to.equal(0);
});
function testChildElementByTag() {
const parent = document.createElement('parent');
const element1 = document.createElement('element1');
parent.appendChild(element1);
const element2 = document.createElement('element2');
parent.appendChild(element2);
const element3 = document.createElement('element3');
element1.appendChild(element3);
expect(dom.childElementByTag(parent, 'element1')).to.equal(element1);
expect(dom.childElementByTag(parent, 'element2')).to.equal(element2);
expect(dom.childElementByTag(parent, 'element3')).to.be.null;
expect(dom.childElementByTag(parent, 'element4')).to.be.null;
}
it('childElementByTag should find first match', testChildElementByTag);
it('childElementByTag should find first match (polyfill)', () => {
dom.setScopeSelectorSupportedForTesting(false);
testChildElementByTag();
});
function testChildElementsByTag() {
const parent = document.createElement('parent');
const element1 = document.createElement('element1');
parent.appendChild(element1);
const element2 = document.createElement('element23');
parent.appendChild(element2);
const element3 = document.createElement('element23');
parent.appendChild(element3);
expect(toArray(dom.childElementsByTag(parent, 'element1')))
.to.deep.equal([element1]);
expect(toArray(dom.childElementsByTag(parent, 'element23')))
.to.deep.equal([element2, element3]);
expect(toArray(dom.childElementsByTag(parent, 'element3')))
.to.deep.equal([]);
}
it('childElementsByTag should find first match', testChildElementsByTag);
it('childElementsByTag should find first match (polyfill)', () => {
dom.setScopeSelectorSupportedForTesting(false);
testChildElementsByTag();
});
function testChildElementByAttr() {
const parent = document.createElement('parent');
const element1 = document.createElement('element1');
element1.setAttribute('attr1', '1');
element1.setAttribute('attr12', '1');
parent.appendChild(element1);
const element2 = document.createElement('element2');
element2.setAttribute('attr2', '2');
element2.setAttribute('attr12', '2');
parent.appendChild(element2);
const element3 = document.createElement('element2');
element3.setAttribute('on-child', '');
element2.appendChild(element3);
expect(dom.childElementByAttr(parent, 'attr1')).to.equal(element1);
expect(dom.childElementByAttr(parent, 'attr2')).to.equal(element2);
expect(dom.childElementByAttr(parent, 'attr12')).to.equal(element1);
expect(dom.childElementByAttr(parent, 'attr3')).to.be.null;
expect(dom.childElementByAttr(parent, 'on-child')).to.be.null;
}
it('childElementByAttr should find first match', testChildElementByAttr);
it('childElementByAttr should find first match', () => {
dom.setScopeSelectorSupportedForTesting(false);
testChildElementByAttr();
});
function testChildElementsByAttr() {
const parent = document.createElement('parent');
const element1 = document.createElement('element1');
element1.setAttribute('attr1', '1');
element1.setAttribute('attr12', '1');
parent.appendChild(element1);
const element2 = document.createElement('element2');
element2.setAttribute('attr2', '2');
element2.setAttribute('attr12', '2');
parent.appendChild(element2);
const element3 = document.createElement('element2');
element3.setAttribute('on-child', '');
element2.appendChild(element3);
expect(dom.childElementsByAttr(parent, 'attr1').length).to.equal(1);
expect(dom.childElementsByAttr(parent, 'attr2').length).to.equal(1);
expect(dom.childElementsByAttr(parent, 'attr12').length).to.equal(2);
expect(dom.childElementsByAttr(parent, 'attr3').length).to.be.equal(0);
expect(dom.childElementsByAttr(parent, 'on-child').length).to.be.equal(0);
}
it('childElementsByAttr should find all matches', testChildElementsByAttr);
it('childElementsByAttr should find all matches', () => {
dom.setScopeSelectorSupportedForTesting(false);
testChildElementsByAttr();
});
it('lastChildElementByAttr should find last match', () => {
const parent = document.createElement('parent');
const element1 = document.createElement('element1');
element1.setAttribute('attr1', '1');
element1.setAttribute('attr12', '1');
parent.appendChild(element1);
const element2 = document.createElement('element2');
element2.setAttribute('attr2', '2');
element2.setAttribute('attr12', '2');
parent.appendChild(element2);
const element3 = document.createElement('element2');
element3.setAttribute('on-child', '');
element2.appendChild(element3);
expect(dom.lastChildElementByAttr(parent, 'attr1')).to.equal(element1);
expect(dom.lastChildElementByAttr(parent, 'attr2')).to.equal(element2);
expect(dom.lastChildElementByAttr(parent, 'attr12')).to.equal(element2);
expect(dom.lastChildElementByAttr(parent, 'attr3')).to.be.null;
expect(dom.lastChildElementByAttr(parent, 'on-child')).to.be.null;
});
it('ancestorElements should find all matches', () => {
const parent = document.createElement('parent');
const element1 = document.createElement('element1');
parent.appendChild(element1);
const element2 = document.createElement('element2');
element1.appendChild(element2);
expect(dom.ancestorElements(element2, () => true).length).to.equal(2);
expect(dom.ancestorElements(element2, e => e.tagName == 'ELEMENT1').length)
.to.equal(1);
expect(dom.ancestorElements(element1, e => e.tagName == 'PARENT').length)
.to.equal(1);
expect(dom.ancestorElements(parent, e => e.tagName == 'ELEMENT3').length)
.to.be.equal(0);
});
it('ancestorElementsByTag should find all matches', () => {
const parent = document.createElement('parent');
const element1 = document.createElement('element1');
parent.appendChild(element1);
const element2 = document.createElement('element2');
element1.appendChild(element2);
expect(dom.ancestorElementsByTag(element2, 'ELEMENT1').length)
.to.equal(1);
expect(dom.ancestorElementsByTag(element1, 'PARENT').length)
.to.equal(1);
expect(dom.ancestorElementsByTag(element2, 'ELEMENT3').length)
.to.be.equal(0);
});
it('iterateCursor should loop through every element in a NodeList', () => {
const fragment = document.createDocumentFragment();
[0, 1, 2].forEach(() => fragment.appendChild(document.createElement('i')));
const iSpy = sandbox.spy();
dom.iterateCursor(fragment.querySelectorAll('i'), iSpy);
expect(iSpy).to.be.calledThrice;
const bSpy = sandbox.spy();
dom.iterateCursor(fragment.querySelectorAll('b'), bSpy);
expect(bSpy).to.have.not.been.called;
});
it('iterateCursor should allow null elements in a list', () => {
const list = ['wow', null, 'cool'];
const spy = sandbox.spy();
dom.iterateCursor(list, spy);
expect(spy).to.be.calledThrice;
});
function testScopedQuerySelector() {
const grandparent = document.createElement('div');
const parent = document.createElement('div');
grandparent.appendChild(parent);
const element1 = document.createElement('div');
parent.appendChild(element1);
expect(dom.scopedQuerySelector(parent, 'div')).to.equal(element1);
expect(dom.scopedQuerySelector(grandparent, 'div div')).to.equal(element1);
}
it('scopedQuerySelector should find first match', testScopedQuerySelector);
it('scopedQuerySelector should find first match (polyfill)', () => {
dom.setScopeSelectorSupportedForTesting(false);
testScopedQuerySelector();
});
function testScopedQuerySelectorAll() {
const grandparent = document.createElement('div');
const parent = document.createElement('div');
grandparent.appendChild(parent);
const element1 = document.createElement('div');
parent.appendChild(element1);
const element2 = document.createElement('div');
parent.appendChild(element2);
expect(toArray(dom.scopedQuerySelectorAll(parent, 'div')))
.to.deep.equal([element1, element2]);
expect(toArray(dom.scopedQuerySelectorAll(grandparent, 'div div')))
.to.deep.equal([element1, element2]);
}
it('scopedQuerySelectorAll should find all matches',
testScopedQuerySelectorAll);
it('scopedQuerySelectorAll should find all matches (polyfill)', () => {
dom.setScopeSelectorSupportedForTesting(false);
testScopedQuerySelectorAll();
});
describe('waitFor', () => {
let parent;
let child;
beforeEach(() => {
parent = document.createElement('div');
child = document.createElement('div');
});
function contains() {
return parent.contains(child);
}
it('should immediately return if child is available', () => {
parent.appendChild(child);
const spy = sandbox.spy();
dom.waitForChild(parent, contains, spy);
expect(spy).to.be.calledOnce;
});
it('should wait until child is available', () => {
const spy = sandbox.spy();
dom.waitForChild(parent, contains, spy);
expect(spy).to.have.not.been.called;
return new Promise(resolve => {
const interval = setInterval(() => {
if (spy.callCount > 0) {
clearInterval(interval);
resolve();
}
}, 10);
parent.appendChild(child);
}).then(() => {
expect(spy).to.be.calledOnce;
});
});
it('should prefer MutationObserver and disconnect when done', () => {
let mutationCallback;
const mutationObserver = {
observe: sandbox.spy(),
disconnect: sandbox.spy(),
};
const parent = {
ownerDocument: {
defaultView: {
MutationObserver: callback => {
mutationCallback = callback;
return mutationObserver;
},
},
},
};
let checkFuncValue = false;
const checkFunc = () => checkFuncValue;
const spy = sandbox.spy();
dom.waitForChild(parent, checkFunc, spy);
expect(spy).to.have.not.been.called;
expect(mutationObserver.observe).to.be.calledOnce;
expect(mutationObserver.observe.firstCall.args[0]).to.equal(parent);
expect(mutationObserver.observe.firstCall.args[1])
.to.deep.equal({childList: true});
expect(mutationCallback).to.exist;
// False callback.
mutationCallback();
expect(spy).to.have.not.been.called;
expect(mutationObserver.disconnect).to.have.not.been.called;
// True callback.
checkFuncValue = true;
mutationCallback();
expect(spy).to.be.calledOnce;
expect(mutationObserver.disconnect).to.be.calledOnce;
});
it('should fallback to polling without MutationObserver', () => {
let intervalCallback;
const win = {
setInterval: callback => {
intervalCallback = callback;
return 123;
},
clearInterval: sandbox.spy(),
};
const parent = {
ownerDocument: {
defaultView: win,
},
};
let checkFuncValue = false;
const checkFunc = () => checkFuncValue;
const spy = sandbox.spy();
dom.waitForChild(parent, checkFunc, spy);
expect(spy).to.have.not.been.called;
expect(intervalCallback).to.exist;
// False callback.
intervalCallback();
expect(spy).to.have.not.been.called;
expect(win.clearInterval).to.have.not.been.called;
// True callback.
checkFuncValue = true;
intervalCallback();
expect(spy).to.be.calledOnce;
expect(win.clearInterval).to.be.calledOnce;
});
it('should wait for body', () => {
return dom.waitForBodyPromise(document).then(() => {
expect(document.body).to.exist;
});
});
});
describe('getDataParamsFromAttributes', () => {
it('should return key-value for data-param- attributes', () => {
const element = document.createElement('element');
element.setAttribute('attr1', '1');
element.setAttribute('data-param-hello', '2');
element.setAttribute('data-param-from-the-other-side', '3');
const params = dom.getDataParamsFromAttributes(element);
expect(params.hello).to.be.equal('2');
expect(params.fromTheOtherSide).to.be.equal('3');
expect(params.attr1).to.be.undefined;
});
it('should return key-value for custom data attributes', () => {
const element = document.createElement('element');
element.setAttribute('data-vars-event-name', 'click');
const params = dom.getDataParamsFromAttributes(element, null,
/^vars(.+)/);
expect(params.eventName).to.be.equal('click');
});
});
describe('hasNextNodeInDocumentOrder', () => {
it('should return true when the element has a nextSibling', () => {
const element = document.createElement('div');
const parent = document.createElement('div');
const sibling = document.createElement('div');
expect(dom.hasNextNodeInDocumentOrder(element)).to.be.false;
parent.appendChild(element);
expect(dom.hasNextNodeInDocumentOrder(element)).to.be.false;
parent.appendChild(sibling);
expect(dom.hasNextNodeInDocumentOrder(element)).to.be.true;
});
it('should return true when element ancestor has nextSibling', () => {
const element = document.createElement('div');
const parent = document.createElement('div');
const uncle = document.createElement('div');
const ancestor = document.createElement('div');
expect(dom.hasNextNodeInDocumentOrder(element)).to.be.false;
ancestor.appendChild(parent);
ancestor.appendChild(uncle);
parent.appendChild(element);
expect(dom.hasNextNodeInDocumentOrder(element)).to.be.true;
});
it('should return false when ancestor with sibling with stop node', () => {
const element = document.createElement('div');
const parent = document.createElement('div');
const uncle = document.createElement('div');
const ancestor = document.createElement('div');
ancestor.appendChild(parent);
ancestor.appendChild(uncle);
parent.appendChild(element);
expect(dom.hasNextNodeInDocumentOrder(element)).to.be.true;
expect(dom.hasNextNodeInDocumentOrder(element, parent)).to.be.false;
});
});
describe('openWindowDialog', () => {
let windowApi;
let windowMock;
beforeEach(() => {
windowApi = {
open: () => {throw new Error('not mocked');},
};
windowMock = sandbox.mock(windowApi);
});
afterEach(() => {
windowMock.verify();
});
it('should return on first success', () => {
const dialog = {};
windowMock.expects('open')
.withExactArgs('https://example.com/', '_blank', 'width=1')
.returns(dialog)
.once();
const res = dom.openWindowDialog(windowApi, 'https://example.com/',
'_blank', 'width=1');
expect(res).to.equal(dialog);
});
it('should retry on first null', () => {
const dialog = {};
windowMock.expects('open')
.withExactArgs('https://example.com/', '_blank', 'width=1')
.returns(null)
.once();
windowMock.expects('open')
.withExactArgs('https://example.com/', '_top')
.returns(dialog)
.once();
const res = dom.openWindowDialog(windowApi, 'https://example.com/',
'_blank', 'width=1');
expect(res).to.equal(dialog);
});
it('should retry on first undefined', () => {
const dialog = {};
windowMock.expects('open')
.withExactArgs('https://example.com/', '_blank', 'width=1')
.returns(undefined)
.once();
windowMock.expects('open')
.withExactArgs('https://example.com/', '_top')
.returns(dialog)
.once();
const res = dom.openWindowDialog(windowApi, 'https://example.com/',
'_blank', 'width=1');
expect(res).to.equal(dialog);
});
it('should retry on first exception', () => {
const dialog = {};
windowMock.expects('open')
.withExactArgs('https://example.com/', '_blank', 'width=1')
.throws(new Error('intentional'))
.once();
windowMock.expects('open')
.withExactArgs('https://example.com/', '_top')
.returns(dialog)
.once();
const res = dom.openWindowDialog(windowApi, 'https://example.com/',
'_blank', 'width=1');
expect(res).to.equal(dialog);
});
it('should return the final result', () => {
windowMock.expects('open')
.withExactArgs('https://example.com/', '_blank', 'width=1')
.returns(undefined)
.once();
windowMock.expects('open')
.withExactArgs('https://example.com/', '_top')
.returns(null)
.once();
const res = dom.openWindowDialog(windowApi, 'https://example.com/',
'_blank', 'width=1');
expect(res).to.be.null;
});
it('should return the final exception', () => {
windowMock.expects('open')
.withExactArgs('https://example.com/', '_blank', 'width=1')
.throws(new Error('intentional1'))
.once();
windowMock.expects('open')
.withExactArgs('https://example.com/', '_top')
.throws(new Error('intentional2'))
.once();
allowConsoleError(() => { expect(() => {
dom.openWindowDialog(windowApi, 'https://example.com/',
'_blank', 'width=1');
}).to.throw(/intentional2/); });
});
it('should retry only non-top target', () => {
windowMock.expects('open')
.withExactArgs('https://example.com/', '_top', 'width=1')
.returns(null)
.once();
const res = dom.openWindowDialog(windowApi, 'https://example.com/',
'_top', 'width=1');
expect(res).to.be.null;
});
});
describe('isJsonScriptTag', () => {
it('should return true for <script type="application/json">', () => {
const element = document.createElement('script');
element.setAttribute('type', 'application/json');
expect(dom.isJsonScriptTag(element)).to.be.true;
});
it('should return true for <script type="aPPLication/jSon">', () => {
const element = document.createElement('script');
element.setAttribute('type', 'aPPLication/jSon');
expect(dom.isJsonScriptTag(element)).to.be.true;
});
it('should return false for <script type="text/javascript">', () => {
const element = document.createElement('script');
element.setAttribute('type', 'text/javascript');
expect(dom.isJsonScriptTag(element)).to.be.false;
});
it('should return false for <div type="application/json">', () => {
const element = document.createElement('div');
element.setAttribute('type', 'application/json');
expect(dom.isJsonScriptTag(element)).to.be.false;
});
});
describe('escapeCssSelectorIdent', () => {
it('should escape', () => {
expect(dom.escapeCssSelectorIdent('a b')).to.equal('a\\ b');
});
});
describe('escapeHtml', () => {
it('should tolerate empty string', () => {
expect(dom.escapeHtml('')).to.equal('');
});
it('should ignore non-escapes', () => {
expect(dom.escapeHtml('abc')).to.equal('abc');
});
it('should subsctitute escapes', () => {
expect(dom.escapeHtml('a<b>&c"d\'e\`f')).to.equal(
'a<b>&c"d'e`f');
});
});
describe('tryFocus', () => {
it('should call focus on the element', () => {
const element = {
focus() {},
};
const focusSpy = sandbox.spy(element, 'focus');
dom.tryFocus(element);
expect(focusSpy).to.have.been.called;
});
it('should not throw exception if element focus throws exception', () => {
const element = {
focus() {
throw new Error('Cannot focus');
},
};
const focusSpy = sandbox.spy(element, 'focus');
dom.tryFocus(element);
expect(focusSpy).to.have.been.called;
expect(focusSpy).to.not.throw;
});
});
describe('matches', () => {
let div, img1, iframe, ampEl;
beforeEach(() => {
ampEl = document.createElement('amp-ad');
ampEl.className = 'i-amphtml-element';
ampEl.id = 'ampEl';
iframe = document.createElement('iframe');
div = document.createElement('div');
div.id = 'div';
img1 = document.createElement('amp-img');
img1.id = 'img1';
div.appendChild(img1);
iframe.srcdoc = div.outerHTML;
document.body.appendChild(ampEl);
const loaded = loadPromise(iframe);
ampEl.appendChild(iframe);
return loaded;
});
afterEach(() => {
document.body.removeChild(ampEl);
});
it('finds element by id', () => {
expect(dom.matches(ampEl, '#ampEl')).to.be.true;
[div, img1, iframe].map(el => {
expect(dom.matches(el, '#ampEl')).to.be.false;
});
});
it('finds element by tagname', () => {
expect(dom.matches(div, 'div')).to.be.true;
[ampEl, img1, iframe].map(el => {
expect(dom.matches(el, 'div')).to.be.false;
});
});
});
it('isEnabled', () => {
expect(dom.isEnabled(document)).to.be.true;
const a = document.createElement('button');
expect(dom.isEnabled(a)).to.be.true;
a.disabled = true;
expect(dom.isEnabled(a)).to.be.false;
a.disabled = false;
expect(dom.isEnabled(a)).to.be.true;
const b = document.createElement('fieldset');
b.appendChild(a);
expect(dom.isEnabled(a)).to.be.true;
b.disabled = true;
expect(dom.isEnabled(a)).to.be.false;
b.removeChild(a);
const c = document.createElement('legend');
c.appendChild(a);
b.appendChild(c);
expect(dom.isEnabled(a)).to.be.true;
});
it('templateContentClone on a <template> element (browser supports' +
' HTMLTemplateElement)', () => {
const template = document.createElement('template');
template.innerHTML = '<span>123</span><span>456<em>789</em></span>';
const content = dom.templateContentClone(template);
const spans = content.querySelectorAll('span');
expect(spans.length).to.equal(2);
expect(spans[0].innerHTML).to.equal('123');
expect(spans[1].innerHTML).to.equal('456<em>789</em>');
});
it('templateContentClone on a <template> element (simulate a browser' +
' that does not support HTMLTemplateElement)', () => {
const template = document.createElement('div');
template.innerHTML = '<span>123</span><span>456<em>789</em></span>';
const content = dom.templateContentClone(template);
const spans = content.querySelectorAll('span');
expect(spans.length).to.equal(2);
expect(spans[0].innerHTML).to.equal('123');
expect(spans[1].innerHTML).to.equal('456<em>789</em>');
});
});
describes.realWin('DOM', {
amp: { /* amp spec */
ampdoc: 'single',
},
}, env => {
let doc;
class TestElement extends BaseElement {}
describe('whenUpgradeToCustomElement function', () => {
beforeEach(() => {
doc = env.win.document;
});
it('should not continue if element is not AMP element', () => {
const element = doc.createElement('div');
allowConsoleError(() => {
expect(() => dom.whenUpgradedToCustomElement(element)).to.throw(
'element is not AmpElement');
});
});
it('should resolve if element has already upgrade', () => {
const element = doc.createElement('amp-img');
doc.body.appendChild(element);
return dom.whenUpgradedToCustomElement(element).then(element => {
expect(element.whenBuilt).to.exist;
});
});
it('should resolve when element upgrade', () => {
const element = doc.createElement('amp-test');
doc.body.appendChild(element);
env.win.setTimeout(() => {
env.win.customElements.define('amp-test', createAmpElementForTesting(
env.win, 'amp-test', TestElement));
}, 100);
return dom.whenUpgradedToCustomElement(element).then(element => {
expect(element.whenBuilt).to.exist;
});
});
});
});
| chaveznvg/amphtml | test/functional/test-dom.js | JavaScript | apache-2.0 | 35,505 |
function Graph() {
this._nodes = {};
this._edges = [];
};
Graph.prototype.setNode = function(id, value) {
this._nodes[id] = value;
};
Graph.prototype.removeNode = function(id) {
var node = this._nodes[id];
for (var i in node._inEdges) {
var edge = node._inEdges[i];
var sourceNode = edge.source.node;
var idx = sourceNode._outEdges.indexOf(edge);
sourceNode._outEdges.splice(idx, 1);
var graphIdx = this._edges.indexOf(edge);
this._edges.splice(graphIdx, 1);
}
for (var i in node._outEdges) {
var edge = node._outEdges[i];
var targetNode = edge.target.node;
var idx = targetNode._inEdges.indexOf(edge);
targetNode._inEdges.splice(idx, 1);
var graphIdx = this._edges.indexOf(edge);
this._edges.splice(graphIdx, 1);
}
delete this._nodes[id];
}
Graph.prototype.addEdge = function(sourceId, targetId, key) {
var source = this._nodes[sourceId];
if (source === undefined) {
throw "source node does not exist: " + sourceId;
}
var target = this._nodes[targetId];
if (target === undefined) {
throw "target node does not exist: " + targetId;
}
for (var i in target._inEdges) {
if (target._inEdges[i].source.node.id == source.id) {
// edge already exists; skip
return;
}
}
if (source._edgeKeys.indexOf(key) == -1) {
source._edgeKeys.push(key);
}
if (target._edgeKeys.indexOf(key) == -1) {
target._edgeKeys.push(key);
}
var edgeSource = source._edgeSources[key];
if (!edgeSource) {
edgeSource = new EdgeSource(source, key);
source._edgeSources[key] = edgeSource;
}
var edgeTarget = target._edgeTargets[key];
if (!edgeTarget) {
edgeTarget = new EdgeTarget(target, key);
target._edgeTargets[key] = edgeTarget;
}
var edge = new Edge(edgeSource, edgeTarget, key);
target._inEdges.push(edge);
source._outEdges.push(edge);
this._edges.push(edge);
}
Graph.prototype.removeEdge = function(edge) {
var inIdx = edge.target.node._inEdges.indexOf(edge);
edge.target.node._inEdges.splice(inIdx, 1);
var outIdx = edge.source.node._outEdges.indexOf(edge);
edge.source.node._outEdges.splice(outIdx, 1);
var graphIdx = this._edges.indexOf(edge);
this._edges.splice(graphIdx, 1);
}
Graph.prototype.node = function(id) {
return this._nodes[id];
};
Graph.prototype.nodes = function() {
var nodes = [];
for (var id in this._nodes) {
nodes.push(this._nodes[id]);
}
return nodes;
};
Graph.prototype.edges = function() {
return this._edges;
};
Graph.prototype.layout = function() {
var columns = [];
for (var i in this._nodes) {
var node = this._nodes[i];
var columnIdx = node.column();
var column = columns[columnIdx];
if (!column) {
column = new Column(columnIdx);
columns[columnIdx] = column;
}
column.nodes.push(node);
}
for (var i in this._nodes) {
var node = this._nodes[i];
var column = node.column();
var columnOffset = 0;
for (var c in columns) {
if (c < column) {
columnOffset += columns[c].width() + 50;
}
}
node._position.x = columnOffset + ((columns[column].width() - node.width()) / 2);
node._edgeKeys.sort(function(a, b) {
var targetA = node._edgeTargets[a];
var targetB = node._edgeTargets[b];
if (targetA && !targetB) {
return -1;
} else if (!targetA && targetB) {
return 1;
} else if (targetA && targetB) {
var introRankA = targetA.rankOfFirstAppearance();
var introRankB = targetB.rankOfFirstAppearance();
if(introRankA < introRankB) {
return -1;
} else if (introRankA > introRankB) {
return 1;
}
}
return compareNames(a, b);
});
}
// first pass: initial rough sorting and layout
// second pass: detangle now that we know downstream positioning
for (var repeat = 0; repeat < 2; repeat++) {
for (var c in columns) {
columns[c].sortNodes();
columns[c].layout();
}
}
}
Graph.prototype.computeRanks = function() {
var forwardNodes = {};
for (var n in this._nodes) {
var node = this._nodes[n];
if (node._inEdges.length == 0) {
node._cachedRank = 0;
forwardNodes[node.id] = node;
}
}
var bottomNodes = {};
// walk over all nodes from left to right and determine their rank
while (!objectIsEmpty(forwardNodes)) {
var nextNodes = {};
for (var n in forwardNodes) {
var node = forwardNodes[n];
if (node._outEdges.length == 0) {
bottomNodes[node.id] = node;
}
for (var e in node._outEdges) {
var nextNode = node._outEdges[e].target.node;
// careful: two edges may go to the same node but be from different
// ranks, so always destination nodes as far to the right as possible
nextNode._cachedRank = Math.max(nextNode._cachedRank, node._cachedRank + 1);
nextNodes[nextNode.id] = nextNode;
}
}
forwardNodes = nextNodes;
}
var backwardNodes = bottomNodes;
// walk over all nodes from right to left and bring upstream nodes as far
// to the right as possible, so that edges aren't passing through ranks
while (!objectIsEmpty(backwardNodes)) {
var prevNodes = {};
for (var n in backwardNodes) {
var node = backwardNodes[n];
// for all upstream nodes, determine rightmost possible column by taking
// the minimum rank of all downstream nodes and placing it in the rank
// immediately preceding it
for (var e in node._inEdges) {
var prevNode = node._inEdges[e].source.node;
var rightmostRank = prevNode.rightmostPossibleRank();
if (rightmostRank !== undefined) {
prevNode._cachedRank = rightmostRank;
}
prevNodes[prevNode.id] = prevNode;
}
}
backwardNodes = prevNodes;
}
};
Graph.prototype.collapseEquivalentNodes = function() {
var nodesByRank = [];
for (var n in this._nodes) {
var node = this._nodes[n];
var byRank = nodesByRank[node.rank()];
if (byRank === undefined) {
byRank = {};
nodesByRank[node.rank()] = byRank;
}
if (node.equivalentBy === undefined) {
continue;
}
byEqv = byRank[node.equivalentBy];
if (byEqv === undefined) {
byEqv = [];
byRank[node.equivalentBy] = byEqv;
}
byEqv.push(node);
}
for (var r in nodesByRank) {
var byEqv = nodesByRank[r];
for (var e in byEqv) {
var nodes = byEqv[e];
if (nodes.length == 1) {
continue;
}
var chosenOne = nodes[0];
for (var i = 1; i < nodes.length; i++) {
var loser = nodes[i];
for (var ie in loser._inEdges) {
var edge = loser._inEdges[ie];
this.addEdge(edge.source.node.id, chosenOne.id, edge.key);
}
for (var oe in loser._outEdges) {
var edge = loser._outEdges[oe];
this.addEdge(chosenOne.id, edge.target.node.id, edge.key);
}
this.removeNode(loser.id);
}
}
}
}
Graph.prototype.addSpacingNodes = function() {
var edgesToRemove = [];
for (var e in this._edges) {
var edge = this._edges[e];
var delta = edge.target.node.rank() - edge.source.node.rank();
if (delta > 1) {
var upstreamNode = edge.source.node;
var downstreamNode = edge.target.node;
var repeatedNode;
if (edge.source.node.repeatable) {
repeatedNode = upstreamNode;
} else {
repeatedNode = downstreamNode;
}
for (var i = 0; i < (delta - 1); i++) {
var spacerID = edge.source.node.id + "-spacing-" + i;
var spacingNode = this.node(spacerID);
if (!spacingNode) {
spacingNode = repeatedNode.copy();
spacingNode.id = spacerID;
spacingNode._cachedRank = upstreamNode.rank() + 1;
this.setNode(spacingNode.id, spacingNode);
}
this.addEdge(upstreamNode.id, spacingNode.id, edge.key);
upstreamNode = spacingNode;
}
this.addEdge(upstreamNode.id, edge.target.node.id, edge.key);
edgesToRemove.push(edge);
}
}
for (var e in edgesToRemove) {
this.removeEdge(edgesToRemove[e]);
}
}
function Column(idx) {
this.index = idx;
this.nodes = [];
this._spacing = 10;
}
Column.prototype.sortNodes = function() {
var nodes = this.nodes;
var before = this.nodes.slice();
nodes.sort(function(a, b) {
if (a._inEdges.length && b._inEdges.length) {
// position nodes closer to their upstream sources
var compare = a.highestUpstreamSource() - b.highestUpstreamSource();
if (compare != 0) {
return compare;
}
}
if (a._outEdges.length && b._outEdges.length) {
// position nodes closer to their downstream targets
var compare = a.highestDownstreamTarget() - b.highestDownstreamTarget();
if (compare != 0) {
return compare;
}
}
if (a._inEdges.length && b._outEdges.length) {
// position nodes closer to their sources than others that are just
// closer to their destinations
var compare = a.highestUpstreamSource() - b.highestDownstreamTarget();
if (compare != 0) {
return compare;
}
}
if (a._outEdges.length && b._inEdges.length) {
// position nodes closer to their sources than others that are just
// closer to their destinations
var compare = a.highestDownstreamTarget() - b.highestUpstreamSource();
if (compare != 0) {
return compare;
}
}
// place nodes that threaded through upstream nodes higher
var aPassedThrough = a.passedThroughAnyPreviousNode();
var bPassedThrough = b.passedThroughAnyPreviousNode();
if (aPassedThrough && !bPassedThrough) {
return -1;
}
// place nodes that thread through downstream nodes higher
var aPassesThrough = a.passesThroughAnyNextNode();
var bPassesThrough = b.passesThroughAnyNextNode();
if (aPassesThrough && !bPassesThrough) {
return -1;
}
// place nodes with more out edges higher
var byOutEdges = b._outEdges.length - a._outEdges.length;
if (byOutEdges != 0) {
return byOutEdges;
}
if (!aPassesThrough && bPassesThrough) {
return 1;
}
// both are of equivalent; compare names so it's at least deterministic
a.debugMarked = true; // to aid in debugging (adds .marked css class)
b.debugMarked = true;
return compareNames(a.name, b.name);
});
var changed = false;
for (var c in nodes) {
if (nodes[c] !== before[c]) {
changed = true;
}
}
return changed;
}
Column.prototype.mark = function() {
for (var i in this.nodes) {
this.nodes[i].columnMarked = true;
}
}
Column.prototype.width = function() {
var width = 0;
for (var i in this.nodes) {
width = Math.max(width, this.nodes[i].width())
}
return width;
}
Column.prototype.layout = function() {
var rollingOffset = 0;
for (var i in this.nodes) {
var node = this.nodes[i];
node._position.y = rollingOffset;
rollingOffset += node.height() + this._spacing;
}
}
function Node(opts) {
// Graph node ID
this.id = opts.id;
this.name = opts.name;
this.class = opts.class;
this.status = opts.status;
this.repeatable = opts.repeatable;
this.key = opts.key;
this.url = opts.url;
this.svg = opts.svg;
this.equivalentBy = opts.equivalentBy;
// DOM element
this.label = undefined;
// [EdgeTarget]
this._edgeTargets = {};
// [EdgeSource]
this._edgeSources = {};
this._edgeKeys = [];
this._inEdges = [];
this._outEdges = [];
this._cachedRank = -1;
this._cachedWidth = 0;
// position (determined by graph.layout())
this._position = {
x: 0,
y: 0
};
};
Node.prototype.copy = function() {
return new Node({
id: this.id,
name: this.name,
class: this.class,
status: this.status,
key: this.key,
url: this.url,
svg: this.svg,
equivalentBy: this.equivalentBy
});
};
Node.prototype.width = function() {
if (this._cachedWidth == 0) {
var id = this.id;
var svgNode = this.svg.selectAll("g.node").filter(function(node) {
return node.id == id;
})
var textNode = svgNode.select("text").node();
if (textNode) {
this._cachedWidth = textNode.getBBox().width;
} else {
return 0;
}
}
return this._cachedWidth + 10;
}
Node.prototype.height = function() {
var keys = Math.max(this._edgeKeys.length, 1);
return (20 * keys) + (10 * (keys - 1));
}
Node.prototype.position = function() {
return this._position;
}
Node.prototype.column = function() {
return this.rank();
};
Node.prototype.rank = function() {
return this._cachedRank;
}
Node.prototype.rightmostPossibleRank = function() {
var rightmostRank;
for (var o in this._outEdges) {
var prevTargetNode = this._outEdges[o].target.node;
var targetPrecedingRank = prevTargetNode.rank() - 1;
if (rightmostRank === undefined) {
rightmostRank = targetPrecedingRank;
} else {
rightmostRank = Math.min(rightmostRank, targetPrecedingRank);
}
}
return rightmostRank;
}
Node.prototype.dependsOn = function(node, stack) {
for (var i in this._inEdges) {
var source = this._inEdges[i].source.node;
if (source == node) {
return true;
}
if (stack.indexOf(this) != -1) {
continue;
}
stack.push(this)
if (source.dependsOn(node, stack)) {
return true;
}
}
return false;
}
Node.prototype.highestUpstreamSource = function() {
var minY;
var y;
for (var e in this._inEdges) {
y = this._inEdges[e].source.position().y;
if (minY === undefined || y < minY) {
minY = y;
}
}
return minY;
};
Node.prototype.highestDownstreamTarget = function() {
var minY;
var y;
for (var e in this._outEdges) {
y = this._outEdges[e].target.position().y;
if (minY === undefined || y < minY) {
minY = y;
}
}
return minY;
};
Node.prototype.passedThroughAnyPreviousNode = function() {
for (var e in this._inEdges) {
var edge = this._inEdges[e];
if (edge.key in edge.source.node._edgeTargets) {
return true;
}
}
return false;
};
Node.prototype.passesThroughAnyNextNode = function() {
for (var e in this._outEdges) {
var edge = this._outEdges[e];
if (edge.key in edge.target.node._edgeSources) {
return true;
}
}
return false;
};
function Edge(source, target, key) {
this.source = source;
this.target = target;
this.key = key;
}
Edge.prototype.id = function() {
return this.source.id() + "-to-" + this.target.id();
}
Edge.prototype.path = function() {
var sourcePosition = this.source.position();
var targetPosition = this.target.position();
var curvature = 0.5;
var x0 = sourcePosition.x,
x1 = targetPosition.x,
y0 = sourcePosition.y,
y1 = targetPosition.y;
var intermediatePoints = [];
if (sourcePosition.x > targetPosition.x) {
var belowSourceNode = this.source.node.position().y + this.source.node.height(),
belowTargetNode = this.target.node.position().y + this.target.node.height();
intermediatePoints = [
(sourcePosition.x + 100) + "," + (belowSourceNode + 100),
(targetPosition.x - 100) + "," + (belowTargetNode + 100),
]
} else {
var xi = d3.interpolateNumber(x0, x1),
x2 = xi(curvature),
x3 = xi(1 - curvature),
intermediatePoints = [x2+","+y0, x3+","+y1]
}
return "M" + x0 + "," + y0 +" "
+ "C" + intermediatePoints.join(" ")
+ " " + x1 + "," + y1;
}
function EdgeSource(node, key) {
// spacing between edge sources
this._spacing = 30;
// Node
this.node = node;
// Key
this.key = key;
};
EdgeSource.prototype.width = function() {
return 0;
}
EdgeSource.prototype.height = function() {
return 0;
}
EdgeSource.prototype.id = function() {
return this.node.id + "-" + this.key + "-source";
}
EdgeSource.prototype.position = function() {
return {
x: this.node.position().x + this.node.width(),
y: this.y()
}
};
EdgeSource.prototype.y = function() {
var nodePosition = this.node.position();
var index = this.node._edgeKeys.indexOf(this.key);
return nodePosition.y + 10 + ((this.height() + this._spacing) * index)
}
function EdgeTarget(node, key) {
// spacing between edge targets
this._spacing = 30;
// Node
this.node = node;
// Key
this.key = key;
};
EdgeTarget.prototype.width = function() {
return 0;
}
EdgeTarget.prototype.height = function() {
return 0;
}
EdgeTarget.prototype.rankOfFirstAppearance = function() {
var inEdges = this.node._inEdges;
var minRank = Infinity;
for (var i in inEdges) {
var inEdge = inEdges[i];
if (inEdge.source.key == this.key) {
var upstreamNodeInEdges = inEdge.source.node._inEdges;
if (upstreamNodeInEdges.length == 0) {
return inEdge.source.node.rank();
}
var foundUpstreamInEdge = false;
for (var j in upstreamNodeInEdges) {
var upstreamEdge = upstreamNodeInEdges[j];
if (upstreamEdge.target.key == this.key) {
foundUpstreamInEdge = true;
var rank = upstreamEdge.target.rankOfFirstAppearance()
if (rank < minRank) {
minRank = rank;
}
}
}
if (!foundUpstreamInEdge) {
return inEdge.source.node.rank();
}
}
}
return minRank;
}
EdgeTarget.prototype.id = function() {
return this.node.id + "-" + this.key + "-target";
}
EdgeTarget.prototype.position = function() {
return {
x: this.node.position().x,
y: this.y()
}
};
EdgeTarget.prototype.y = function() {
var nodePosition = this.node.position();
var index = this.node._edgeKeys.indexOf(this.key);
return nodePosition.y + 10 + ((this.height() + this._spacing) * index)
}
function compareNames(a, b) {
var byLength = a.length - b.length;
if (byLength != 0) {
// place shorter names higher. pretty arbitrary but looks better.
return byLength;
}
return a.localeCompare(b);
}
| utako/atc | web/public/graph.js | JavaScript | apache-2.0 | 18,105 |
// Copyright 2015 The Cockroach Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the License.
//
// Author: Marc berhault (marc@cockroachlabs.com)
package cli
import (
"bytes"
"database/sql/driver"
"fmt"
"io"
"net/url"
"strings"
"text/tabwriter"
"time"
"unicode"
"unicode/utf8"
"github.com/lib/pq"
"golang.org/x/net/context"
"github.com/cockroachdb/cockroach/pkg/security"
"github.com/cockroachdb/cockroach/pkg/sql/parser"
"github.com/cockroachdb/cockroach/pkg/util/log"
)
type sqlConnI interface {
driver.Conn
driver.Execer
driver.Queryer
}
type sqlConn struct {
url string
conn sqlConnI
reconnecting bool
}
func (c *sqlConn) ensureConn() error {
if c.conn == nil {
if c.reconnecting && isInteractive {
fmt.Fprintf(stderr, "connection lost; opening new connection and resetting session parameters...\n")
}
conn, err := pq.Open(c.url)
if err != nil {
return err
}
c.reconnecting = false
c.conn = conn.(sqlConnI)
}
return nil
}
func (c *sqlConn) Exec(query string, args []driver.Value) error {
if err := c.ensureConn(); err != nil {
return err
}
_, err := c.conn.Exec(query, args)
return err
}
func (c *sqlConn) Query(query string, args []driver.Value) (*sqlRows, error) {
if err := c.ensureConn(); err != nil {
return nil, err
}
rows, err := c.conn.Query(query, args)
if err == driver.ErrBadConn {
c.reconnecting = true
c.Close()
}
if err != nil {
return nil, err
}
return &sqlRows{rows: rows.(sqlRowsI), conn: c}, nil
}
func (c *sqlConn) QueryRow(query string, args []driver.Value) ([]driver.Value, error) {
rows, err := makeQuery(query, args...)(c)
if err != nil {
return nil, err
}
defer func() { _ = rows.Close() }()
vals := make([]driver.Value, len(rows.Columns()))
err = rows.Next(vals)
return vals, err
}
func (c *sqlConn) Close() {
if c.conn != nil {
err := c.conn.Close()
if err != nil && err != driver.ErrBadConn {
log.Info(context.TODO(), err)
}
c.conn = nil
}
}
type sqlRowsI interface {
driver.Rows
Result() driver.Result
Tag() string
// Go 1.8 multiple result set interfaces.
// TODO(mjibson): clean this up after 1.8 is released.
HasNextResultSet() bool
NextResultSet() error
}
type sqlRows struct {
rows sqlRowsI
conn *sqlConn
}
func (r *sqlRows) Columns() []string {
return r.rows.Columns()
}
func (r *sqlRows) Result() driver.Result {
return r.rows.Result()
}
func (r *sqlRows) Tag() string {
return r.rows.Tag()
}
func (r *sqlRows) Close() error {
err := r.rows.Close()
if err == driver.ErrBadConn {
r.conn.Close()
}
return err
}
// Next populates values with the next row of results. []byte values are copied
// so that subsequent calls to Next and Close do not mutate values. This
// makes it slower than theoretically possible but the safety concerns
// (since this is unobvious and unexpected behavior) outweigh.
func (r *sqlRows) Next(values []driver.Value) error {
err := r.rows.Next(values)
if err == driver.ErrBadConn {
r.conn.reconnecting = true
r.conn.Close()
}
for i, v := range values {
if b, ok := v.([]byte); ok {
values[i] = append([]byte{}, b...)
}
}
return err
}
// NextResultSet prepares the next result set for reading.
func (r *sqlRows) NextResultSet() (bool, error) {
if !r.rows.HasNextResultSet() {
return false, nil
}
return true, r.rows.NextResultSet()
}
func makeSQLConn(url string) *sqlConn {
return &sqlConn{
url: url,
}
}
// getPasswordAndMakeSQLClient prompts for a password if running in secure mode
// and no certificates have been supplied. security.RootUser won't be prompted
// for a password as the only authentication method available for this user is
// certificate authentication.
func getPasswordAndMakeSQLClient() (*sqlConn, error) {
if len(connURL) != 0 {
return makeSQLConn(connURL), nil
}
var user *url.Userinfo
if !baseCfg.Insecure && connUser != security.RootUser && baseCfg.SSLCert == "" && baseCfg.SSLCertKey == "" {
pwd, err := security.PromptForPassword()
if err != nil {
return nil, err
}
user = url.UserPassword(connUser, pwd)
} else {
user = url.User(connUser)
}
return makeSQLClient(user)
}
func makeSQLClient(user *url.Userinfo) (*sqlConn, error) {
sqlURL := connURL
if len(connURL) == 0 {
u, err := sqlCtx.PGURL(user)
if err != nil {
return nil, err
}
u.Path = connDBName
sqlURL = u.String()
}
return makeSQLConn(sqlURL), nil
}
type queryFunc func(conn *sqlConn) (*sqlRows, error)
func makeQuery(query string, parameters ...driver.Value) queryFunc {
return func(conn *sqlConn) (*sqlRows, error) {
// driver.Value is an alias for interface{}, but must adhere to a restricted
// set of types when being passed to driver.Queryer.Query (see
// driver.IsValue). We use driver.DefaultParameterConverter to perform the
// necessary conversion. This is usually taken care of by the sql package,
// but we have to do so manually because we're talking directly to the
// driver.
for i := range parameters {
var err error
parameters[i], err = driver.DefaultParameterConverter.ConvertValue(parameters[i])
if err != nil {
return nil, err
}
}
return conn.Query(query, parameters)
}
}
// runQuery takes a 'query' with optional 'parameters'.
// It runs the sql query and returns a list of columns names and a list of rows.
func runQuery(
conn *sqlConn, fn queryFunc, showMoreChars bool,
) ([]string, [][]string, string, error) {
rows, err := fn(conn)
if err != nil {
return nil, nil, "", err
}
defer func() { _ = rows.Close() }()
return sqlRowsToStrings(rows, showMoreChars)
}
// runQueryAndFormatResults takes a 'query' with optional 'parameters'.
// It runs the sql query and writes output to 'w'.
func runQueryAndFormatResults(
conn *sqlConn, w io.Writer, fn queryFunc, displayFormat tableDisplayFormat,
) error {
rows, err := fn(conn)
if err != nil {
return err
}
defer func() { _ = rows.Close() }()
for {
cols, allRows, result, err := sqlRowsToStrings(rows, true)
if err != nil {
return err
}
printQueryOutput(w, cols, allRows, result, displayFormat)
if more, err := rows.NextResultSet(); err != nil {
return err
} else if !more {
return nil
}
}
}
// sqlRowsToStrings turns 'rows' into a list of rows, each of which
// is a list of column values.
// 'rows' should be closed by the caller.
// It returns the header row followed by all data rows.
// If both the header row and list of rows are empty, it means no row
// information was returned (eg: statement was not a query).
// If showMoreChars is true, then more characters are not escaped.
func sqlRowsToStrings(rows *sqlRows, showMoreChars bool) ([]string, [][]string, string, error) {
srcCols := rows.Columns()
cols := make([]string, len(srcCols))
for i, c := range srcCols {
cols[i] = formatVal(c, showMoreChars, false)
}
var allRows [][]string
var vals []driver.Value
if len(cols) > 0 {
vals = make([]driver.Value, len(cols))
}
for {
err := rows.Next(vals)
if err == io.EOF {
break
}
if err != nil {
return nil, nil, "", err
}
rowStrings := make([]string, len(cols))
for i, v := range vals {
rowStrings[i] = formatVal(v, showMoreChars, showMoreChars)
}
allRows = append(allRows, rowStrings)
}
result := rows.Result()
tag := rows.Tag()
switch tag {
case "":
tag = "OK"
case "DELETE", "INSERT", "UPDATE":
if n, err := result.RowsAffected(); err == nil {
tag = fmt.Sprintf("%s %d", tag, n)
}
}
return cols, allRows, tag, nil
}
// expandTabsAndNewLines ensures that multi-line row strings that may
// contain tabs are properly formatted: tabs are expanded to spaces,
// and newline characters are marked visually. Marking newline
// characters is especially important in single-column results where
// the underlying TableWriter would not otherwise show the difference
// between one multi-line row and two one-line rows.
func expandTabsAndNewLines(s string) string {
var buf bytes.Buffer
// 4-wide columns, 1 character minimum width.
w := tabwriter.NewWriter(&buf, 4, 0, 1, ' ', 0)
fmt.Fprint(w, strings.Replace(s, "\n", "\n", -1))
_ = w.Flush()
return buf.String()
}
func isNotPrintableASCII(r rune) bool { return r < 0x20 || r > 0x7e || r == '"' || r == '\\' }
func isNotGraphicUnicode(r rune) bool { return !unicode.IsGraphic(r) }
func isNotGraphicUnicodeOrTabOrNewline(r rune) bool {
return r != '\t' && r != '\n' && !unicode.IsGraphic(r)
}
func formatVal(val driver.Value, showPrintableUnicode bool, showNewLinesAndTabs bool) string {
switch t := val.(type) {
case nil:
return "NULL"
case string:
if showPrintableUnicode {
pred := isNotGraphicUnicode
if showNewLinesAndTabs {
pred = isNotGraphicUnicodeOrTabOrNewline
}
if utf8.ValidString(t) && strings.IndexFunc(t, pred) == -1 {
return t
}
} else {
if strings.IndexFunc(t, isNotPrintableASCII) == -1 {
return t
}
}
return fmt.Sprintf("%+q", t)
case []byte:
if showPrintableUnicode {
pred := isNotGraphicUnicode
if showNewLinesAndTabs {
pred = isNotGraphicUnicodeOrTabOrNewline
}
if utf8.Valid(t) && bytes.IndexFunc(t, pred) == -1 {
return string(t)
}
} else {
if bytes.IndexFunc(t, isNotPrintableASCII) == -1 {
return string(t)
}
}
return fmt.Sprintf("%+q", t)
case time.Time:
return t.Format(parser.TimestampNodeFormat)
}
return fmt.Sprint(val)
}
| paperstreet/cockroach | pkg/cli/sql_util.go | GO | apache-2.0 | 9,891 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>com.google.zxing.aztec.decoder (ZXing 3.4.1 API)</title>
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../script.js"></script>
</head>
<body>
<h1 class="bar"><a href="../../../../../com/google/zxing/aztec/decoder/package-summary.html" target="classFrame">com.google.zxing.aztec.decoder</a></h1>
<div class="indexContainer">
<h2 title="Classes">Classes</h2>
<ul title="Classes">
<li><a href="Decoder.html" title="class in com.google.zxing.aztec.decoder" target="classFrame">Decoder</a></li>
</ul>
</div>
</body>
</html>
| tanelihuuskonen/zxing | docs/apidocs/com/google/zxing/aztec/decoder/package-frame.html | HTML | apache-2.0 | 861 |
// Copyright 2019 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by go generate in expression/generator; DO NOT EDIT.
package expression
import (
"math"
"testing"
. "github.com/pingcap/check"
"github.com/pingcap/parser/ast"
"github.com/pingcap/parser/mysql"
"github.com/pingcap/tidb/types"
)
type gener struct {
defaultGener
}
func (g gener) gen() interface{} {
result := g.defaultGener.gen()
if _, ok := result.(string); ok {
dg := newDefaultGener(0, types.ETDuration)
d := dg.gen().(types.Duration)
if int8(d.Duration)%2 == 0 {
d.Fsp = 0
} else {
d.Fsp = 1
}
result = d.String()
}
return result
}
var vecBuiltinTimeGeneratedCases = map[string][]vecExprBenchCase{
ast.AddTime: {
// builtinAddDatetimeAndDurationSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDuration},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETDatetime)},
gener{*newDefaultGener(0.2, types.ETDuration)},
},
},
// builtinAddDatetimeAndStringSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETDatetime)},
gener{*newDefaultGener(0.2, types.ETString)},
},
},
// builtinAddDurationAndDurationSig
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDuration},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETDuration)},
gener{*newDefaultGener(0.2, types.ETDuration)},
},
},
// builtinAddDurationAndStringSig
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETDuration)},
gener{*newDefaultGener(0.2, types.ETString)},
},
},
// builtinAddStringAndDurationSig
{
retEvalType: types.ETString,
childrenTypes: []types.EvalType{types.ETString, types.ETDuration},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETString)},
gener{*newDefaultGener(0.2, types.ETDuration)},
},
},
// builtinAddStringAndStringSig
{
retEvalType: types.ETString,
childrenTypes: []types.EvalType{types.ETString, types.ETString},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETString)},
gener{*newDefaultGener(0.2, types.ETString)},
},
},
// builtinAddDateAndDurationSig
{
retEvalType: types.ETString,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDuration},
childrenFieldTypes: []*types.FieldType{types.NewFieldType(mysql.TypeDate), types.NewFieldType(mysql.TypeDuration)},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETDatetime)},
gener{*newDefaultGener(0.2, types.ETDuration)},
},
},
// builtinAddDateAndStringSig
{
retEvalType: types.ETString,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString},
childrenFieldTypes: []*types.FieldType{types.NewFieldType(mysql.TypeDate), types.NewFieldType(mysql.TypeString)},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETDatetime)},
gener{*newDefaultGener(0.2, types.ETString)},
},
},
// builtinAddTimeDateTimeNullSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDatetime},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETDatetime)},
gener{*newDefaultGener(0.2, types.ETDatetime)},
},
},
// builtinAddTimeStringNullSig
{
retEvalType: types.ETString,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDatetime},
childrenFieldTypes: []*types.FieldType{types.NewFieldType(mysql.TypeDate), types.NewFieldType(mysql.TypeDatetime)},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETDatetime)},
gener{*newDefaultGener(0.2, types.ETDatetime)},
},
},
// builtinAddTimeDurationNullSig
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDatetime},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETDuration)},
gener{*newDefaultGener(0.2, types.ETDatetime)},
},
},
},
ast.SubTime: {
// builtinSubDatetimeAndDurationSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDuration},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETDatetime)},
gener{*newDefaultGener(0.2, types.ETDuration)},
},
},
// builtinSubDatetimeAndStringSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETDatetime)},
gener{*newDefaultGener(0.2, types.ETString)},
},
},
// builtinSubDurationAndDurationSig
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDuration},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETDuration)},
gener{*newDefaultGener(0.2, types.ETDuration)},
},
},
// builtinSubDurationAndStringSig
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETDuration)},
gener{*newDefaultGener(0.2, types.ETString)},
},
},
// builtinSubStringAndDurationSig
{
retEvalType: types.ETString,
childrenTypes: []types.EvalType{types.ETString, types.ETDuration},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETString)},
gener{*newDefaultGener(0.2, types.ETDuration)},
},
},
// builtinSubStringAndStringSig
{
retEvalType: types.ETString,
childrenTypes: []types.EvalType{types.ETString, types.ETString},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETString)},
gener{*newDefaultGener(0.2, types.ETString)},
},
},
// builtinSubDateAndDurationSig
{
retEvalType: types.ETString,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDuration},
childrenFieldTypes: []*types.FieldType{types.NewFieldType(mysql.TypeDate), types.NewFieldType(mysql.TypeDuration)},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETDatetime)},
gener{*newDefaultGener(0.2, types.ETDuration)},
},
},
// builtinSubDateAndStringSig
{
retEvalType: types.ETString,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString},
childrenFieldTypes: []*types.FieldType{types.NewFieldType(mysql.TypeDate), types.NewFieldType(mysql.TypeString)},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETDatetime)},
gener{*newDefaultGener(0.2, types.ETString)},
},
},
// builtinSubTimeDateTimeNullSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDatetime},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETDatetime)},
gener{*newDefaultGener(0.2, types.ETDatetime)},
},
},
// builtinSubTimeStringNullSig
{
retEvalType: types.ETString,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDatetime},
childrenFieldTypes: []*types.FieldType{types.NewFieldType(mysql.TypeDate), types.NewFieldType(mysql.TypeDatetime)},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETDatetime)},
gener{*newDefaultGener(0.2, types.ETDatetime)},
},
},
// builtinSubTimeDurationNullSig
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDatetime},
geners: []dataGenerator{
gener{*newDefaultGener(0.2, types.ETDuration)},
gener{*newDefaultGener(0.2, types.ETDatetime)},
},
},
},
ast.TimeDiff: {
// builtinNullTimeDiffSig
{retEvalType: types.ETDuration, childrenTypes: []types.EvalType{types.ETDuration, types.ETDatetime}},
{retEvalType: types.ETDuration, childrenTypes: []types.EvalType{types.ETDuration, types.ETTimestamp}},
{retEvalType: types.ETDuration, childrenTypes: []types.EvalType{types.ETDatetime, types.ETDuration}},
{retEvalType: types.ETDuration, childrenTypes: []types.EvalType{types.ETTimestamp, types.ETDuration}},
// builtinDurationDurationTimeDiffSig
{retEvalType: types.ETDuration, childrenTypes: []types.EvalType{types.ETDuration, types.ETDuration}},
// builtinDurationStringTimeDiffSig
{retEvalType: types.ETDuration, childrenTypes: []types.EvalType{types.ETDuration, types.ETString}, geners: []dataGenerator{nil, &dateTimeStrGener{Year: 2019, Month: 11, randGen: newDefaultRandGen()}}},
{retEvalType: types.ETDuration, childrenTypes: []types.EvalType{types.ETDuration, types.ETString}, geners: []dataGenerator{nil, &dateTimeStrGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}}},
{retEvalType: types.ETDuration, childrenTypes: []types.EvalType{types.ETDuration, types.ETString}, geners: []dataGenerator{nil, &dateTimeStrGener{Year: 2019, Month: 10, Fsp: 4, randGen: newDefaultRandGen()}}},
// builtinTimeTimeTimeDiffSig
{retEvalType: types.ETDuration, childrenTypes: []types.EvalType{types.ETDatetime, types.ETDatetime}, geners: []dataGenerator{&dateTimeGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}, &dateTimeGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}}},
{retEvalType: types.ETDuration, childrenTypes: []types.EvalType{types.ETDatetime, types.ETTimestamp}, geners: []dataGenerator{&dateTimeGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}, &dateTimeGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}}},
{retEvalType: types.ETDuration, childrenTypes: []types.EvalType{types.ETTimestamp, types.ETTimestamp}, geners: []dataGenerator{&dateTimeGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}, &dateTimeGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}}},
{retEvalType: types.ETDuration, childrenTypes: []types.EvalType{types.ETTimestamp, types.ETDatetime}, geners: []dataGenerator{&dateTimeGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}, &dateTimeGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}}},
// builtinTimeStringTimeDiffSig
{retEvalType: types.ETDuration, childrenTypes: []types.EvalType{types.ETDatetime, types.ETString}, geners: []dataGenerator{&dateTimeGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}, &dateTimeStrGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}}},
{retEvalType: types.ETDuration, childrenTypes: []types.EvalType{types.ETTimestamp, types.ETString}, geners: []dataGenerator{&dateTimeGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}, &dateTimeStrGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}}},
// builtinStringDurationTimeDiffSig
{retEvalType: types.ETDuration, childrenTypes: []types.EvalType{types.ETString, types.ETDuration}, geners: []dataGenerator{&dateTimeStrGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}, nil}},
// builtinStringTimeTimeDiffSig
{retEvalType: types.ETDuration, childrenTypes: []types.EvalType{types.ETString, types.ETDatetime}, geners: []dataGenerator{&dateTimeStrGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}, &dateTimeGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}}},
{retEvalType: types.ETDuration, childrenTypes: []types.EvalType{types.ETString, types.ETTimestamp}, geners: []dataGenerator{&dateTimeStrGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}, &dateTimeGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}}},
// builtinStringStringTimeDiffSig
{retEvalType: types.ETDuration, childrenTypes: []types.EvalType{types.ETString, types.ETString}, geners: []dataGenerator{&dateTimeStrGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}, &dateTimeStrGener{Year: 2019, Month: 10, randGen: newDefaultRandGen()}}},
},
ast.AddDate: {
// builtinAddDateStringStringSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinAddDateStringIntSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinAddDateStringRealSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinAddDateStringDecimalSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinAddDateIntStringSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinAddDateIntIntSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinAddDateIntRealSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinAddDateIntDecimalSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinAddDateDatetimeStringSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinAddDateDatetimeIntSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinAddDateDatetimeRealSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinAddDateDatetimeDecimalSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinAddDateDurationStringSig
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinAddDateDurationIntSig
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinAddDateDurationRealSig
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinAddDateDurationDecimalSig
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
},
ast.SubDate: {
// builtinSubDateStringStringSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETString, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinSubDateStringIntSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinSubDateStringRealSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinSubDateStringDecimalSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETString, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateStrGener{NullRation: 0.2, randGen: newDefaultRandGen()},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinSubDateIntStringSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETString, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinSubDateIntIntSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETInt, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinSubDateIntRealSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETReal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinSubDateIntDecimalSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETInt, types.ETDecimal, types.ETString},
geners: []dataGenerator{
&dateTimeIntGener{dateTimeGener: dateTimeGener{randGen: newDefaultRandGen()}, nullRation: 0.2},
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinSubDateDatetimeStringSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinSubDateDatetimeIntSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinSubDateDatetimeRealSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinSubDateDatetimeDecimalSig
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDatetime,
childrenTypes: []types.EvalType{types.ETDatetime, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDatetime),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinSubDateDurationStringSig
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETString, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
&numStrGener{rangeInt64Gener{math.MinInt32 + 1, math.MaxInt32, newDefaultRandGen()}},
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinSubDateDurationIntSig
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETInt, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETInt),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinSubDateDurationRealSig
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETReal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETReal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
// builtinSubDateDurationDecimalSig
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("WEEK"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("QUARTER"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("SECOND_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("MINUTE_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("HOUR_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MICROSECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_SECOND"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_MINUTE"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("DAY_HOUR"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
{
retEvalType: types.ETDuration,
childrenTypes: []types.EvalType{types.ETDuration, types.ETDecimal, types.ETString},
geners: []dataGenerator{
newDefaultGener(0.2, types.ETDuration),
newDefaultGener(0.2, types.ETDecimal),
},
constants: []*Constant{nil, nil, {Value: types.NewStringDatum("YEAR_MONTH"), RetType: types.NewFieldType(mysql.TypeString)}},
chunkSize: 128,
},
},
}
func (s *testVectorizeSuite1) TestVectorizedBuiltinTimeEvalOneVecGenerated(c *C) {
testVectorizedEvalOneVec(c, vecBuiltinTimeGeneratedCases)
}
func (s *testVectorizeSuite1) TestVectorizedBuiltinTimeFuncGenerated(c *C) {
testVectorizedBuiltinFunc(c, vecBuiltinTimeGeneratedCases)
}
func BenchmarkVectorizedBuiltinTimeEvalOneVecGenerated(b *testing.B) {
benchmarkVectorizedEvalOneVec(b, vecBuiltinTimeGeneratedCases)
}
func BenchmarkVectorizedBuiltinTimeFuncGenerated(b *testing.B) {
benchmarkVectorizedBuiltinFunc(b, vecBuiltinTimeGeneratedCases)
}
| EvilMcJerkface/tidb | expression/builtin_time_vec_generated_test.go | GO | apache-2.0 | 286,854 |
package com.sanders.db.sanders.com.dbproject;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
} | SSOOnline/android-orm | sample/src/androidTest/java/com/sanders/db/sanders/com/dbproject/ApplicationTest.java | Java | apache-2.0 | 367 |
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.eclipse.flow.common.editor.editpart;
import org.eclipse.draw2d.AbstractLayout;
import org.eclipse.draw2d.IFigure;
import org.eclipse.draw2d.geometry.Dimension;
public class GraphLayoutManager extends AbstractLayout {
private ProcessEditPart diagram;
public GraphLayoutManager(ProcessEditPart diagram) {
this.diagram = diagram;
}
protected Dimension calculatePreferredSize(IFigure container, int wHint, int hHint) {
container.validate();
return container.getSize();
}
public void layout(IFigure container) {
new DirectedGraphLayoutVisitor().layoutDiagram(diagram);
// diagram.setTableModelBounds();
}
}
| psiroky/droolsjbpm-tools | drools-eclipse/org.drools.eclipse/src/main/java/org/drools/eclipse/flow/common/editor/editpart/GraphLayoutManager.java | Java | apache-2.0 | 1,292 |
<div style="width:100%;height:100%;">
<div class="settings-section" data-dojo-attach-point="searchesSection">
<table class="setting-table input-table" cellspacing="0">
<tbody>
<tr>
<td class="first">${nls.arrangement}</td>
<td class="second">
<select style="margin-left: 10px;" data-dojo-attach-point="selectArrangement" data-dojo-type="dijit/form/Select">
<option value="0">${nls.left}</option>
<option value="1">${nls.right}</option>
</select>
</td>
</tr>
<tr>
<td class="first">${nls.autoUpdate}</td>
<td class="second">
<input style="margin-left: 10px;" data-dojo-attach-point="autoUpdate"
data-dojo-type="dijit/form/CheckBox"/>
</td>
</tr>
<tr>
<td class="first">${nls.respectCurrentMapScale}</td>
<td class="second">
<input style="margin-left: 10px;" data-dojo-attach-point="respectCurrentMapScale"
data-dojo-type="dijit/form/CheckBox"/>
</td>
</tr>
</tbody>
</table>
</div>
</div> | fiskinator/WAB2.0_JBox_MutualAid | widgets/Legend/setting/Setting.html | HTML | apache-2.0 | 1,181 |
/*
Copyright 2016 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package validation
import (
"github.com/robfig/cron"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/unversioned"
unversionedvalidation "k8s.io/kubernetes/pkg/api/unversioned/validation"
apivalidation "k8s.io/kubernetes/pkg/api/validation"
"k8s.io/kubernetes/pkg/apis/batch"
"k8s.io/kubernetes/pkg/labels"
"k8s.io/kubernetes/pkg/util/validation/field"
)
// TODO: generalize for other controller objects that will follow the same pattern, such as ReplicaSet and DaemonSet, and
// move to new location. Replace batch.Job with an interface.
//
// ValidateGeneratedSelector validates that the generated selector on a controller object match the controller object
// metadata, and the labels on the pod template are as generated.
func ValidateGeneratedSelector(obj *batch.Job) field.ErrorList {
allErrs := field.ErrorList{}
if obj.Spec.ManualSelector != nil && *obj.Spec.ManualSelector {
return allErrs
}
if obj.Spec.Selector == nil {
return allErrs // This case should already have been checked in caller. No need for more errors.
}
// If somehow uid was unset then we would get "controller-uid=" as the selector
// which is bad.
if obj.ObjectMeta.UID == "" {
allErrs = append(allErrs, field.Required(field.NewPath("metadata").Child("uid"), ""))
}
// If somehow uid was unset then we would get "controller-uid=" as the selector
// which is bad.
if obj.ObjectMeta.UID == "" {
allErrs = append(allErrs, field.Required(field.NewPath("metadata").Child("uid"), ""))
}
// If selector generation was requested, then expected labels must be
// present on pod template, and much match job's uid and name. The
// generated (not-manual) selectors/labels ensure no overlap with other
// controllers. The manual mode allows orphaning, adoption,
// backward-compatibility, and experimentation with new
// labeling/selection schemes. Automatic selector generation should
// have placed certain labels on the pod, but this could have failed if
// the user added coflicting labels. Validate that the expected
// generated ones are there.
allErrs = append(allErrs, apivalidation.ValidateHasLabel(obj.Spec.Template.ObjectMeta, field.NewPath("spec").Child("template").Child("metadata"), "controller-uid", string(obj.UID))...)
allErrs = append(allErrs, apivalidation.ValidateHasLabel(obj.Spec.Template.ObjectMeta, field.NewPath("spec").Child("template").Child("metadata"), "job-name", string(obj.Name))...)
expectedLabels := make(map[string]string)
expectedLabels["controller-uid"] = string(obj.UID)
expectedLabels["job-name"] = string(obj.Name)
// Whether manually or automatically generated, the selector of the job must match the pods it will produce.
if selector, err := unversioned.LabelSelectorAsSelector(obj.Spec.Selector); err == nil {
if !selector.Matches(labels.Set(expectedLabels)) {
allErrs = append(allErrs, field.Invalid(field.NewPath("spec").Child("selector"), obj.Spec.Selector, "`selector` not auto-generated"))
}
}
return allErrs
}
func ValidateJob(job *batch.Job) field.ErrorList {
// Jobs and rcs have the same name validation
allErrs := apivalidation.ValidateObjectMeta(&job.ObjectMeta, true, apivalidation.ValidateReplicationControllerName, field.NewPath("metadata"))
allErrs = append(allErrs, ValidateGeneratedSelector(job)...)
allErrs = append(allErrs, ValidateJobSpec(&job.Spec, field.NewPath("spec"))...)
return allErrs
}
func ValidateJobSpec(spec *batch.JobSpec, fldPath *field.Path) field.ErrorList {
allErrs := field.ErrorList{}
if spec.Parallelism != nil {
allErrs = append(allErrs, apivalidation.ValidateNonnegativeField(int64(*spec.Parallelism), fldPath.Child("parallelism"))...)
}
if spec.Completions != nil {
allErrs = append(allErrs, apivalidation.ValidateNonnegativeField(int64(*spec.Completions), fldPath.Child("completions"))...)
}
if spec.ActiveDeadlineSeconds != nil {
allErrs = append(allErrs, apivalidation.ValidateNonnegativeField(int64(*spec.ActiveDeadlineSeconds), fldPath.Child("activeDeadlineSeconds"))...)
}
if spec.Selector == nil {
allErrs = append(allErrs, field.Required(fldPath.Child("selector"), ""))
} else {
allErrs = append(allErrs, unversionedvalidation.ValidateLabelSelector(spec.Selector, fldPath.Child("selector"))...)
}
// Whether manually or automatically generated, the selector of the job must match the pods it will produce.
if selector, err := unversioned.LabelSelectorAsSelector(spec.Selector); err == nil {
labels := labels.Set(spec.Template.Labels)
if !selector.Matches(labels) {
allErrs = append(allErrs, field.Invalid(fldPath.Child("template", "metadata", "labels"), spec.Template.Labels, "`selector` does not match template `labels`"))
}
}
allErrs = append(allErrs, apivalidation.ValidatePodTemplateSpec(&spec.Template, fldPath.Child("template"))...)
if spec.Template.Spec.RestartPolicy != api.RestartPolicyOnFailure &&
spec.Template.Spec.RestartPolicy != api.RestartPolicyNever {
allErrs = append(allErrs, field.NotSupported(fldPath.Child("template", "spec", "restartPolicy"),
spec.Template.Spec.RestartPolicy, []string{string(api.RestartPolicyOnFailure), string(api.RestartPolicyNever)}))
}
return allErrs
}
func ValidateJobStatus(status *batch.JobStatus, fldPath *field.Path) field.ErrorList {
allErrs := field.ErrorList{}
allErrs = append(allErrs, apivalidation.ValidateNonnegativeField(int64(status.Active), fldPath.Child("active"))...)
allErrs = append(allErrs, apivalidation.ValidateNonnegativeField(int64(status.Succeeded), fldPath.Child("succeeded"))...)
allErrs = append(allErrs, apivalidation.ValidateNonnegativeField(int64(status.Failed), fldPath.Child("failed"))...)
return allErrs
}
func ValidateJobUpdate(job, oldJob *batch.Job) field.ErrorList {
allErrs := apivalidation.ValidateObjectMetaUpdate(&oldJob.ObjectMeta, &job.ObjectMeta, field.NewPath("metadata"))
allErrs = append(allErrs, ValidateJobSpecUpdate(job.Spec, oldJob.Spec, field.NewPath("spec"))...)
return allErrs
}
func ValidateJobUpdateStatus(job, oldJob *batch.Job) field.ErrorList {
allErrs := apivalidation.ValidateObjectMetaUpdate(&oldJob.ObjectMeta, &job.ObjectMeta, field.NewPath("metadata"))
allErrs = append(allErrs, ValidateJobStatusUpdate(job.Status, oldJob.Status)...)
return allErrs
}
func ValidateJobSpecUpdate(spec, oldSpec batch.JobSpec, fldPath *field.Path) field.ErrorList {
allErrs := field.ErrorList{}
allErrs = append(allErrs, ValidateJobSpec(&spec, fldPath)...)
allErrs = append(allErrs, apivalidation.ValidateImmutableField(spec.Completions, oldSpec.Completions, fldPath.Child("completions"))...)
allErrs = append(allErrs, apivalidation.ValidateImmutableField(spec.Selector, oldSpec.Selector, fldPath.Child("selector"))...)
allErrs = append(allErrs, apivalidation.ValidateImmutableField(spec.Template, oldSpec.Template, fldPath.Child("template"))...)
return allErrs
}
func ValidateJobStatusUpdate(status, oldStatus batch.JobStatus) field.ErrorList {
allErrs := field.ErrorList{}
allErrs = append(allErrs, ValidateJobStatus(&status, field.NewPath("status"))...)
return allErrs
}
func ValidateScheduledJob(scheduledJob *batch.ScheduledJob) field.ErrorList {
// ScheduledJobs and rcs have the same name validation
allErrs := apivalidation.ValidateObjectMeta(&scheduledJob.ObjectMeta, true, apivalidation.ValidateReplicationControllerName, field.NewPath("metadata"))
allErrs = append(allErrs, ValidateScheduledJobSpec(&scheduledJob.Spec, field.NewPath("spec"))...)
return allErrs
}
func ValidateScheduledJobSpec(spec *batch.ScheduledJobSpec, fldPath *field.Path) field.ErrorList {
allErrs := field.ErrorList{}
if len(spec.Schedule) == 0 {
allErrs = append(allErrs, field.Required(fldPath.Child("schedule"), ""))
} else {
allErrs = append(allErrs, validateScheduleFormat(spec.Schedule, fldPath.Child("schedule"))...)
}
if spec.StartingDeadlineSeconds != nil {
allErrs = append(allErrs, apivalidation.ValidateNonnegativeField(int64(*spec.StartingDeadlineSeconds), fldPath.Child("startingDeadlineSeconds"))...)
}
allErrs = append(allErrs, validateConcurrencyPolicy(&spec.ConcurrencyPolicy, fldPath.Child("concurrencyPolicy"))...)
allErrs = append(allErrs, ValidateJobTemplateSpec(&spec.JobTemplate, fldPath.Child("jobTemplate"))...)
return allErrs
}
func validateConcurrencyPolicy(concurrencyPolicy *batch.ConcurrencyPolicy, fldPath *field.Path) field.ErrorList {
allErrs := field.ErrorList{}
switch *concurrencyPolicy {
case batch.AllowConcurrent, batch.ForbidConcurrent, batch.ReplaceConcurrent:
break
case "":
allErrs = append(allErrs, field.Required(fldPath, ""))
default:
validValues := []string{string(batch.AllowConcurrent), string(batch.ForbidConcurrent), string(batch.ReplaceConcurrent)}
allErrs = append(allErrs, field.NotSupported(fldPath, *concurrencyPolicy, validValues))
}
return allErrs
}
func validateScheduleFormat(schedule string, fldPath *field.Path) field.ErrorList {
allErrs := field.ErrorList{}
_, err := cron.Parse(schedule)
if err != nil {
allErrs = append(allErrs, field.Invalid(fldPath, schedule, err.Error()))
}
return allErrs
}
func ValidateJobTemplate(job *batch.JobTemplate) field.ErrorList {
// this method should be identical to ValidateJob
allErrs := apivalidation.ValidateObjectMeta(&job.ObjectMeta, true, apivalidation.ValidateReplicationControllerName, field.NewPath("metadata"))
allErrs = append(allErrs, ValidateJobTemplateSpec(&job.Template, field.NewPath("template"))...)
return allErrs
}
func ValidateJobTemplateSpec(spec *batch.JobTemplateSpec, fldPath *field.Path) field.ErrorList {
// this method should be identical to ValidateJob
allErrs := ValidateJobSpec(&spec.Spec, fldPath.Child("spec"))
return allErrs
}
| tnachen/kubernetes | pkg/apis/batch/validation/validation.go | GO | apache-2.0 | 10,298 |
/* (c) 2014 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied.
*/
package com.linkedin.cubert.block;
import java.io.IOException;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.codehaus.jackson.JsonNode;
import com.linkedin.cubert.utils.JsonUtils;
/**
* Creates tuples by merging two ByteArrayTuples.
*
* This creator is needed in the reducer stage, where the Mapper key and values are merged
* to create a single tuple.
*
* @author Maneesh Varshney
*
*/
public class MergedTupleCreator implements TupleCreator
{
private BlockSchema keySchema;
private boolean valueIsNull;
private Tuple outputTuple;
private int[] keyFieldIndex;
private int[] valueFieldIndex;
@Override
public void setup(JsonNode json) throws IOException
{
String[] pivotColumns = JsonUtils.asArray(json.get("pivotKeys"));
BlockSchema fullSchema = new BlockSchema(json.get("schema"));
keySchema = fullSchema.getSubset(pivotColumns);
BlockSchema valueSchema = fullSchema.getComplementSubset(pivotColumns);
keyFieldIndex = new int[keySchema.getNumColumns()];
valueFieldIndex = new int[valueSchema.getNumColumns()];
for (int i = 0; i < keyFieldIndex.length; i++)
{
keyFieldIndex[i] = fullSchema.getIndex(keySchema.getName(i));
}
for (int i = 0; i < valueFieldIndex.length; i++)
{
valueFieldIndex[i] = fullSchema.getIndex(valueSchema.getName(i));
}
outputTuple = TupleFactory.getInstance().newTuple(fullSchema.getNumColumns());
valueIsNull = (valueSchema.getNumColumns() == 0);
}
@Override
public Tuple create(Object key, Object value) throws IOException
{
Tuple keyTuple = (Tuple) key;
for (int i = 0; i < keyTuple.size(); i++)
{
outputTuple.set(keyFieldIndex[i], keyTuple.get(i));
}
if (!valueIsNull)
{
Tuple valueTuple = (Tuple) value;
for (int i = 0; i < valueTuple.size(); i++)
{
outputTuple.set(valueFieldIndex[i], valueTuple.get(i));
}
}
return outputTuple;
}
}
| linkedin/Cubert | src/main/java/com/linkedin/cubert/block/MergedTupleCreator.java | Java | apache-2.0 | 2,655 |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.module.impl.scopes;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.ProjectScope;
import com.intellij.util.containers.Queue;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import java.util.HashSet;
import java.util.Set;
/**
* @author max
*/
public class ModuleWithDependentsScope extends GlobalSearchScope {
private final Module myModule;
private final boolean myOnlyTests;
private final ProjectFileIndex myProjectFileIndex;
private final Set<Module> myModules;
private final GlobalSearchScope myProjectScope;
public ModuleWithDependentsScope(Module module, boolean onlyTests) {
super(module.getProject());
myModule = module;
myOnlyTests = onlyTests;
myProjectFileIndex = ProjectRootManager.getInstance(myModule.getProject()).getFileIndex();
myProjectScope = ProjectScope.getProjectScope(myModule.getProject());
myModules = new HashSet<Module>();
myModules.add(myModule);
fillModules();
}
private void fillModules() {
Queue<Module> walkingQueue = new Queue<Module>(10);
walkingQueue.addLast(myModule);
Module[] allModules = ModuleManager.getInstance(myModule.getProject()).getModules();
Set<Module> processed = new THashSet<Module>();
while (!walkingQueue.isEmpty()) {
Module current = walkingQueue.pullFirst();
processed.add(current);
for (Module dependent : allModules) {
for (OrderEntry orderEntry : ModuleRootManager.getInstance(dependent).getOrderEntries()) {
if (orderEntry instanceof ModuleOrderEntry && current.equals(((ModuleOrderEntry)orderEntry).getModule())) {
myModules.add(dependent);
if (!processed.contains(dependent) && ((ModuleOrderEntry)orderEntry).isExported()) {
walkingQueue.addLast(dependent);
}
}
}
}
}
}
public boolean contains(VirtualFile file) {
Module moduleOfFile = myProjectFileIndex.getModuleForFile(file);
if (moduleOfFile == null) return false;
if (!myModules.contains(moduleOfFile)) return false;
if (myOnlyTests && !myProjectFileIndex.isInTestSourceContent(file)) return false;
return myProjectScope.contains(file);
}
public int compare(VirtualFile file1, VirtualFile file2) {
return 0;
}
public boolean isSearchInModuleContent(@NotNull Module aModule) {
return myModules.contains(aModule);
}
public boolean isSearchInLibraries() {
return false;
}
@NonNls
public String toString() {
return "Module with dependents:" + myModule.getName();
}
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof ModuleWithDependentsScope)) return false;
final ModuleWithDependentsScope moduleWithDependentsScope = (ModuleWithDependentsScope)o;
if (myOnlyTests != moduleWithDependentsScope.myOnlyTests) return false;
if (!myModule.equals(moduleWithDependentsScope.myModule)) return false;
return true;
}
public int hashCode() {
return myModule.hashCode();
}
}
| android-ia/platform_tools_idea | platform/indexing-impl/src/com/intellij/openapi/module/impl/scopes/ModuleWithDependentsScope.java | Java | apache-2.0 | 3,906 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.cdi;
import java.util.Set;
import javax.enterprise.context.spi.CreationalContext;
import javax.enterprise.inject.spi.Bean;
import javax.enterprise.inject.spi.BeanManager;
/**
* To make looking up beans in CDI easier
*/
public final class CdiBeanManagerHelper {
/**
* To lookup a bean by a type
*/
public static <T> T lookupBeanByType(BeanManager beanManager, Class<T> type) {
Set<Bean<?>> beans = beanManager.getBeans(type);
if (!beans.isEmpty()) {
Bean<?> bean = beanManager.resolve(beans);
CreationalContext<?> creationalContext = beanManager.createCreationalContext(bean);
Object result = beanManager.getReference(bean, type, creationalContext);
if (result != null) {
return type.cast(result);
}
}
return null;
}
/**
* To lookup a bean by a name
*/
public static Object lookupBeanByName(BeanManager beanManager, String name) {
return lookupBeanByNameAndType(beanManager, name, Object.class);
}
/**
* To lookup a bean by name and type
*/
public static <T> T lookupBeanByNameAndType(BeanManager beanManager, String name, Class<T> type) {
Set<Bean<?>> beans = beanManager.getBeans(name);
if (!beans.isEmpty()) {
Bean<?> bean = beanManager.resolve(beans);
CreationalContext<?> creationalContext = beanManager.createCreationalContext(bean);
Object result = beanManager.getReference(bean, type, creationalContext);
if (result != null) {
return type.cast(result);
}
}
return null;
}
}
| grgrzybek/camel | components/camel-cdi/src/main/java/org/apache/camel/cdi/CdiBeanManagerHelper.java | Java | apache-2.0 | 2,510 |
/*
Copyright (c) 2013 Timon Wong
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
Copyright 2012 Uladzimir Pylinski aka barthess.
You may use this work without restrictions, as long as this notice is included.
The work is provided "as is" without warranty of any kind, neither express nor implied.
*/
/*****************************************************************************
* DATASHEET NOTES
*****************************************************************************
Write cycle time (byte or page) - 5 ms
Note:
Page write operations are limited to writing bytes within a single physical
page, regardless of the number of bytes actually being written. Physical page
boundaries start at addresses that are integer multiples of the page buffer
size (or page size and end at addresses that are integer multiples of
[page size]. If a Page Write command attempts to write across a physical
page boundary, the result is that the data wraps around to the beginning of
the current page (overwriting data previously stored there), instead of
being written to the next page as might be expected.
*********************************************************************/
#include "ee25xx.h"
#include <string.h>
#if (defined(HAL_USE_EEPROM) && HAL_USE_EEPROM && EEPROM_USE_EE25XX) || defined(__DOXYGEN__)
/**
* @name Commands of 25XX chip.
* @{
*/
#define CMD_READ 0x03 /**< @brief Read data from memory array beginning at
selected address. */
#define CMD_WRITE 0x02 /**< @brief Write data to memory array beginning at
selected address. */
#define CMD_WRDI 0x04 /**< Reset the write enable latch (disable write
operations). */
#define CMD_WREN 0x06 /**< Set the write enable latch (enable write
operations). */
#define CMD_RDSR 0x05 /**< Read STATUS register. */
#define CMD_WRSR 0x01 /**< Write STATUS register. */
/** @} */
/**
* @name Status of 25XX chip.
* @{}
*/
#define STAT_BP1 0x08 /**< @brief Block protection (high). */
#define STAT_BP0 0x04 /**< @brief Block protection (low). */
#define STAT_WEL 0x02 /**< @brief Write enable latch. */
#define STAT_WIP 0x01 /**< @brief Write-In-Progress. */
/** @} */
/**
* @brief 25XX low level write then read rountine.
*
* @param[in] eepcfg pointer to configuration structure of eeprom file.
* @param[in] txbuf pointer to buffer to be transfered.
* @param[in] txlen number of bytes to be transfered.
* @param[out] rxbuf pointer to buffer to be received.
* @param[in] rxlen number of bytes to be received.
*/
static void ll_25xx_transmit_receive(const SPIEepromFileConfig *eepcfg,
const uint8_t *txbuf, size_t txlen,
uint8_t *rxbuf, size_t rxlen) {
#if SPI_USE_MUTUAL_EXCLUSION
spiAcquireBus(eepcfg->spip);
#endif
spiSelect(eepcfg->spip);
spiSend(eepcfg->spip, txlen, txbuf);
if (rxlen) /* Check if receive is needed. */
spiReceive(eepcfg->spip, rxlen, rxbuf);
spiUnselect(eepcfg->spip);
#if SPI_USE_MUTUAL_EXCLUSION
spiReleaseBus(eepcfg->spip);
#endif
}
/**
* @brief Check whether the device is busy (writing in progress).
*
* @param[in] eepcfg pointer to configuration structure of eeprom file.
* @return @p true on busy.
*/
static bool ll_eeprom_is_busy(const SPIEepromFileConfig *eepcfg) {
uint8_t cmd = CMD_RDSR;
uint8_t stat;
ll_25xx_transmit_receive(eepcfg, &cmd, 1, &stat, 1);
if (stat & STAT_WIP)
return TRUE;
return FALSE;
}
/**
* @brief Lock device.
*
* @param[in] eepcfg pointer to configuration structure of eeprom file.
*/
static void ll_eeprom_lock(const SPIEepromFileConfig *eepcfg) {
uint8_t cmd = CMD_WRDI;
ll_25xx_transmit_receive(eepcfg, &cmd, 1, NULL, 0);
}
/**
* @brief Unlock device.
*
* @param[in] eepcfg pointer to configuration structure of eeprom file.
*/
static void ll_eeprom_unlock(const SPIEepromFileConfig *eepcfg) {
uint8_t cmd = CMD_WREN;
ll_25xx_transmit_receive(eepcfg, &cmd, 1, NULL, 0);
}
/**
* @brief Prepare byte sequence for command and address
*
* @param[in] seq pointer to first 3byte sequence
* @param[in] size size of the eeprom device
* @param[in] cmd command
* @param[in] addr address
* @return number of bytes of this sequence
*/
static uint8_t ll_eeprom_prepare_seq(uint8_t *seq, uint32_t size, uint8_t cmd,
uint32_t addr) {
seq[0] = ((uint8_t)cmd & 0xff);
if (size > 0xffffUL) {
/* High density, 24bit address. */
seq[1] = (uint8_t)((addr >> 16) & 0xff);
seq[2] = (uint8_t)((addr >> 8) & 0xff);
seq[3] = (uint8_t)(addr & 0xff);
return 4;
}
else if (size > 0x00ffUL) {
/* Medium density, 16bit address. */
seq[1] = (uint8_t)((addr >> 8) & 0xff);
seq[2] = (uint8_t)(addr & 0xff);
return 3;
}
/* Low density, 8bit address. */
seq[1] = (uint8_t)(addr & 0xff);
return 2;
}
/**
* @brief EEPROM read routine.
*
* @param[in] eepcfg pointer to configuration structure of eeprom file.
* @param[in] offset addres of 1-st byte to be read.
* @param[out] data pointer to buffer with data to be written.
* @param[in] len number of bytes to be red.
*/
static msg_t ll_eeprom_read(const SPIEepromFileConfig *eepcfg, uint32_t offset,
uint8_t *data, size_t len) {
uint8_t txbuff[4];
uint8_t txlen;
osalDbgAssert(((len <= eepcfg->size) && ((offset + len) <= eepcfg->size)),
"out of device bounds");
if (eepcfg->spip->state != SPI_READY)
return MSG_RESET;
txlen = ll_eeprom_prepare_seq(txbuff, eepcfg->size, CMD_READ,
(offset + eepcfg->barrier_low));
ll_25xx_transmit_receive(eepcfg, txbuff, txlen, data, len);
return MSG_OK;
}
/**
* @brief EEPROM write routine.
* @details Function writes data to EEPROM.
* @pre Data must be fit to single EEPROM page.
*
* @param[in] eepcfg pointer to configuration structure of eeprom file.
* @param[in] offset addres of 1-st byte to be writen.
* @param[in] data pointer to buffer with data to be written.
* @param[in] len number of bytes to be written.
*/
static msg_t ll_eeprom_write(const SPIEepromFileConfig *eepcfg, uint32_t offset,
const uint8_t *data, size_t len) {
uint8_t txbuff[4];
uint8_t txlen;
systime_t now;
osalDbgAssert(((len <= eepcfg->size) && ((offset + len) <= eepcfg->size)),
"out of device bounds");
osalDbgAssert((((offset + eepcfg->barrier_low) / eepcfg->pagesize) ==
(((offset + eepcfg->barrier_low) + len - 1) / eepcfg->pagesize)),
"data can not be fitted in single page");
if (eepcfg->spip->state != SPI_READY)
return MSG_RESET;
/* Unlock array for writting. */
ll_eeprom_unlock(eepcfg);
#if SPI_USE_MUTUAL_EXCLUSION
spiAcquireBus(eepcfg->spip);
#endif
spiSelect(eepcfg->spip);
txlen = ll_eeprom_prepare_seq(txbuff, eepcfg->size, CMD_WRITE,
(offset + eepcfg->barrier_low));
spiSend(eepcfg->spip, txlen, txbuff);
spiSend(eepcfg->spip, len, data);
spiUnselect(eepcfg->spip);
#if SPI_USE_MUTUAL_EXCLUSION
spiReleaseBus(eepcfg->spip);
#endif
/* Wait until EEPROM process data. */
now = chVTGetSystemTimeX();
while (ll_eeprom_is_busy(eepcfg)) {
if ((chVTGetSystemTimeX() - now) > eepcfg->write_time) {
return MSG_TIMEOUT;
}
chThdYield();
}
/* Lock array preventing unexpected access */
ll_eeprom_lock(eepcfg);
return MSG_OK;
}
/**
* @brief Determines and returns size of data that can be processed
*/
static size_t __clamp_size(void *ip, size_t n) {
if (((size_t)eepfs_getposition(ip) + n) > (size_t)eepfs_getsize(ip))
return eepfs_getsize(ip) - eepfs_getposition(ip);
else
return n;
}
/**
* @brief Write data that can be fitted in one page boundary
*/
static msg_t __fitted_write(void *ip, const uint8_t *data, size_t len, uint32_t *written) {
msg_t status = MSG_RESET;
osalDbgAssert(len != 0, "something broken in hi level part");
status = ll_eeprom_write(((SPIEepromFileStream *)ip)->cfg,
eepfs_getposition(ip), data, len);
if (status == MSG_OK) {
*written += len;
eepfs_lseek(ip, eepfs_getposition(ip) + len);
}
return status;
}
/**
* @brief Write data to EEPROM.
* @details Only one EEPROM page can be written at once. So fucntion
* splits large data chunks in small EEPROM transactions if needed.
* @note To achieve the maximum effectivity use write operations
* aligned to EEPROM page boundaries.
*/
static size_t write(void *ip, const uint8_t *bp, size_t n) {
size_t len = 0; /* bytes to be written at one trasaction */
uint32_t written; /* total bytes successfully written */
uint16_t pagesize;
uint32_t firstpage;
uint32_t lastpage;
volatile const SPIEepromFileConfig *cfg = ((SPIEepromFileStream *)ip)->cfg;
osalDbgCheck((ip != NULL) && (((SPIEepromFileStream *)ip)->vmt != NULL));
if (n == 0)
return 0;
n = __clamp_size(ip, n);
if (n == 0)
return 0;
pagesize = cfg->pagesize;
firstpage = (cfg->barrier_low + eepfs_getposition(ip)) / pagesize;
lastpage = ((cfg->barrier_low + eepfs_getposition(ip) + n) - 1) / pagesize;
written = 0;
/* data fitted in single page */
if (firstpage == lastpage) {
len = n;
__fitted_write(ip, bp, len, &written);
bp += len;
return written;
}
else {
/* write first piece of data to first page boundary */
len = ((firstpage + 1) * pagesize) - eepfs_getposition(ip);
len -= cfg->barrier_low;
__fitted_write(ip, bp, len, &written);
bp += len;
/* now writes blocks at a size of pages (may be no one) */
while ((n - written) > pagesize) {
len = pagesize;
if (__fitted_write(ip, bp, len, &written) != MSG_OK) // Fixed: Would increase bp forever and crash in case of timeouts...
return written;
bp += len;
}
/* wrtie tail */
len = n - written;
if (len == 0)
return written;
else {
__fitted_write(ip, bp, len, &written);
}
}
return written;
}
/**
* Read some bytes from current position in file. After successful
* read operation the position pointer will be increased by the number
* of read bytes.
*/
static size_t read(void *ip, uint8_t *bp, size_t n) {
msg_t status = MSG_OK;
osalDbgCheck((ip != NULL) && (((EepromFileStream *)ip)->vmt != NULL));
if (n == 0)
return 0;
n = __clamp_size(ip, n);
if (n == 0)
return 0;
/* call low level function */
status = ll_eeprom_read(((SPIEepromFileStream *)ip)->cfg,
eepfs_getposition(ip), bp, n);
if (status != MSG_OK)
return 0;
else {
eepfs_lseek(ip, (eepfs_getposition(ip) + n));
return n;
}
}
static const struct EepromFileStreamVMT vmt = {
write,
read,
eepfs_put,
eepfs_get,
eepfs_close,
eepfs_geterror,
eepfs_getsize,
eepfs_getposition,
eepfs_lseek,
};
EepromDevice eepdev_25xx = {
EEPROM_DEV_25XX,
&vmt
};
#endif /* EEPROM_USE_EE25XX */
| raphaelchang/quadthingy-software | quadrotor/ChibiOS_16.1.4/community/os/hal/src/ee25xx.c | C | apache-2.0 | 12,242 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.analytics.mapper;
import com.carrotsearch.hppc.DoubleArrayList;
import com.carrotsearch.hppc.IntArrayList;
import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SortField;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.ByteBuffersDataOutput;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentSubParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.LeafHistogramFieldData;
import org.elasticsearch.index.fielddata.HistogramValue;
import org.elasticsearch.index.fielddata.HistogramValues;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexHistogramFieldData;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.TypeParsers;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.xpack.analytics.aggregations.support.AnalyticsValuesSourceType;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
/**
* Field Mapper for pre-aggregated histograms.
*/
public class HistogramFieldMapper extends FieldMapper {
public static final String CONTENT_TYPE = "histogram";
public static class Names {
public static final String IGNORE_MALFORMED = "ignore_malformed";
}
public static class Defaults {
public static final Explicit<Boolean> IGNORE_MALFORMED = new Explicit<>(false, false);
public static final HistogramFieldType FIELD_TYPE = new HistogramFieldType();
static {
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setHasDocValues(true);
FIELD_TYPE.setIndexOptions(IndexOptions.NONE);
FIELD_TYPE.freeze();
}
}
public static final ParseField COUNTS_FIELD = new ParseField("counts");
public static final ParseField VALUES_FIELD = new ParseField("values");
public static class Builder extends FieldMapper.Builder<Builder, HistogramFieldMapper> {
protected Boolean ignoreMalformed;
public Builder(String name) {
super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
builder = this;
}
public Builder ignoreMalformed(boolean ignoreMalformed) {
this.ignoreMalformed = ignoreMalformed;
return builder;
}
protected Explicit<Boolean> ignoreMalformed(BuilderContext context) {
if (ignoreMalformed != null) {
return new Explicit<>(ignoreMalformed, true);
}
if (context.indexSettings() != null) {
return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false);
}
return HistogramFieldMapper.Defaults.IGNORE_MALFORMED;
}
public HistogramFieldMapper build(BuilderContext context, String simpleName, MappedFieldType fieldType,
MappedFieldType defaultFieldType, Settings indexSettings,
MultiFields multiFields, Explicit<Boolean> ignoreMalformed, CopyTo copyTo) {
setupFieldType(context);
return new HistogramFieldMapper(simpleName, fieldType, defaultFieldType, indexSettings, multiFields,
ignoreMalformed, copyTo);
}
@Override
public HistogramFieldMapper build(BuilderContext context) {
return build(context, name, fieldType, defaultFieldType, context.indexSettings(),
multiFieldsBuilder.build(this, context), ignoreMalformed(context), copyTo);
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder<Builder, HistogramFieldMapper> parse(String name,
Map<String, Object> node, ParserContext parserContext)
throws MapperParsingException {
Builder builder = new HistogramFieldMapper.Builder(name);
TypeParsers.parseMeta(builder, name, node);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals(Names.IGNORE_MALFORMED)) {
builder.ignoreMalformed(XContentMapValues.nodeBooleanValue(propNode, name + "." + Names.IGNORE_MALFORMED));
iterator.remove();
}
}
return builder;
}
}
protected Explicit<Boolean> ignoreMalformed;
public HistogramFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
Settings indexSettings, MultiFields multiFields, Explicit<Boolean> ignoreMalformed, CopyTo copyTo) {
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
this.ignoreMalformed = ignoreMalformed;
}
@Override
protected void doMerge(Mapper mergeWith) {
super.doMerge(mergeWith);
HistogramFieldMapper gpfmMergeWith = (HistogramFieldMapper) mergeWith;
if (gpfmMergeWith.ignoreMalformed.explicit()) {
this.ignoreMalformed = gpfmMergeWith.ignoreMalformed;
}
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called");
}
public static class HistogramFieldType extends MappedFieldType {
public HistogramFieldType() {
}
HistogramFieldType(HistogramFieldType ref) {
super(ref);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public MappedFieldType clone() {
return new HistogramFieldType(this);
}
@Override
public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) {
failIfNoDocValues();
return new IndexFieldData.Builder() {
@Override
public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
return new IndexHistogramFieldData(indexSettings.getIndex(), fieldType.name()) {
@Override
public LeafHistogramFieldData load(LeafReaderContext context) {
return new LeafHistogramFieldData() {
@Override
public HistogramValues getHistogramValues() throws IOException {
try {
final BinaryDocValues values = DocValues.getBinary(context.reader(), fieldName);
final InternalHistogramValue value = new InternalHistogramValue();
return new HistogramValues() {
@Override
public boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
@Override
public HistogramValue histogram() throws IOException {
try {
value.reset(values.binaryValue());
return value;
} catch (IOException e) {
throw new IOException("Cannot load doc value", e);
}
}
};
} catch (IOException e) {
throw new IOException("Cannot load doc values", e);
}
}
@Override
public ScriptDocValues<?> getScriptValues() {
throw new UnsupportedOperationException("The [" + CONTENT_TYPE + "] field does not " +
"support scripts");
}
@Override
public SortedBinaryDocValues getBytesValues() {
throw new UnsupportedOperationException("String representation of doc values " +
"for [" + CONTENT_TYPE + "] fields is not supported");
}
@Override
public long ramBytesUsed() {
return 0; // Unknown
}
@Override
public void close() {
}
};
}
@Override
public LeafHistogramFieldData loadDirect(LeafReaderContext context) throws Exception {
return load(context);
}
@Override
public SortField sortField(Object missingValue, MultiValueMode sortMode,
XFieldComparatorSource.Nested nested, boolean reverse) {
throw new UnsupportedOperationException("can't sort on the [" + CONTENT_TYPE + "] field");
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode,
Nested nested, SortOrder sortOrder, DocValueFormat format, int bucketSize, BucketedSort.ExtraData extra) {
throw new IllegalArgumentException("can't sort on the [" + CONTENT_TYPE + "] field");
}
};
}
};
}
@Override
public ValuesSourceType getValuesSourceType() {
return AnalyticsValuesSourceType.HISTOGRAM;
}
@Override
public Query existsQuery(QueryShardContext context) {
if (hasDocValues()) {
return new DocValuesFieldExistsQuery(name());
} else {
throw new QueryShardException(context, "field " + name() + " of type [" + CONTENT_TYPE + "] " +
"has no doc values and cannot be searched");
}
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
throw new QueryShardException(context, "[" + CONTENT_TYPE + "] field do not support searching, " +
"use dedicated aggregations instead: ["
+ name() + "]");
}
}
@Override
public void parse(ParseContext context) throws IOException {
if (context.externalValueSet()) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] can't be used in multi-fields");
}
context.path().add(simpleName());
XContentParser.Token token = null;
XContentSubParser subParser = null;
try {
token = context.parser().currentToken();
if (token == XContentParser.Token.VALUE_NULL) {
context.path().remove();
return;
}
DoubleArrayList values = null;
IntArrayList counts = null;
// should be an object
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, context.parser()::getTokenLocation);
subParser = new XContentSubParser(context.parser());
token = subParser.nextToken();
while (token != XContentParser.Token.END_OBJECT) {
// should be an field
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, subParser::getTokenLocation);
String fieldName = subParser.currentName();
if (fieldName.equals(VALUES_FIELD.getPreferredName())) {
token = subParser.nextToken();
// should be an array
ensureExpectedToken(XContentParser.Token.START_ARRAY, token, subParser::getTokenLocation);
values = new DoubleArrayList();
token = subParser.nextToken();
double previousVal = -Double.MAX_VALUE;
while (token != XContentParser.Token.END_ARRAY) {
// should be a number
ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, subParser::getTokenLocation);
double val = subParser.doubleValue();
if (val < previousVal) {
// values must be in increasing order
throw new MapperParsingException("error parsing field ["
+ name() + "], ["+ COUNTS_FIELD + "] values must be in increasing order, got [" + val +
"] but previous value was [" + previousVal +"]");
}
values.add(val);
previousVal = val;
token = subParser.nextToken();
}
} else if (fieldName.equals(COUNTS_FIELD.getPreferredName())) {
token = subParser.nextToken();
// should be an array
ensureExpectedToken(XContentParser.Token.START_ARRAY, token, subParser::getTokenLocation);
counts = new IntArrayList();
token = subParser.nextToken();
while (token != XContentParser.Token.END_ARRAY) {
// should be a number
ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, subParser::getTokenLocation);
counts.add(subParser.intValue());
token = subParser.nextToken();
}
} else {
throw new MapperParsingException("error parsing field [" +
name() + "], with unknown parameter [" + fieldName + "]");
}
token = subParser.nextToken();
}
if (values == null) {
throw new MapperParsingException("error parsing field ["
+ name() + "], expected field called [" + VALUES_FIELD.getPreferredName() + "]");
}
if (counts == null) {
throw new MapperParsingException("error parsing field ["
+ name() + "], expected field called [" + COUNTS_FIELD.getPreferredName() + "]");
}
if (values.size() != counts.size()) {
throw new MapperParsingException("error parsing field ["
+ name() + "], expected same length from [" + VALUES_FIELD.getPreferredName() +"] and " +
"[" + COUNTS_FIELD.getPreferredName() +"] but got [" + values.size() + " != " + counts.size() +"]");
}
if (fieldType().hasDocValues()) {
ByteBuffersDataOutput dataOutput = new ByteBuffersDataOutput();
for (int i = 0; i < values.size(); i++) {
int count = counts.get(i);
if (count < 0) {
throw new MapperParsingException("error parsing field ["
+ name() + "], ["+ COUNTS_FIELD + "] elements must be >= 0 but got " + counts.get(i));
} else if (count > 0) {
// we do not add elements with count == 0
dataOutput.writeVInt(count);
dataOutput.writeLong(Double.doubleToRawLongBits(values.get(i)));
}
}
BytesRef docValue = new BytesRef(dataOutput.toArrayCopy(), 0, Math.toIntExact(dataOutput.size()));
Field field = new BinaryDocValuesField(name(), docValue);
if (context.doc().getByKey(fieldType().name()) != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() +
"] doesn't not support indexing multiple values for the same field in the same document");
}
context.doc().addWithKey(fieldType().name(), field);
}
} catch (Exception ex) {
if (ignoreMalformed.value() == false) {
throw new MapperParsingException("failed to parse field [{}] of type [{}]",
ex, fieldType().name(), fieldType().typeName());
}
if (subParser != null) {
// close the subParser so we advance to the end of the object
subParser.close();
}
context.addIgnoredField(fieldType().name());
}
context.path().remove();
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || ignoreMalformed.explicit()) {
builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value());
}
}
/** re-usable {@link HistogramValue} implementation */
private static class InternalHistogramValue extends HistogramValue {
double value;
int count;
boolean isExhausted;
ByteArrayDataInput dataInput;
InternalHistogramValue() {
dataInput = new ByteArrayDataInput();
}
/** reset the value for the histogram */
void reset(BytesRef bytesRef) {
dataInput.reset(bytesRef.bytes, bytesRef.offset, bytesRef.length);
isExhausted = false;
value = 0;
count = 0;
}
@Override
public boolean next() {
if (dataInput.eof() == false) {
count = dataInput.readVInt();
value = Double.longBitsToDouble(dataInput.readLong());
return true;
}
isExhausted = true;
return false;
}
@Override
public double value() {
if (isExhausted) {
throw new IllegalArgumentException("histogram already exhausted");
}
return value;
}
@Override
public int count() {
if (isExhausted) {
throw new IllegalArgumentException("histogram already exhausted");
}
return count;
}
}
}
| HonzaKral/elasticsearch | x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java | Java | apache-2.0 | 21,691 |
package io.katharsis.repository.exception;
import io.katharsis.errorhandling.exception.KatharsisMatchingException;
public class RepositoryAnnotationNotFoundException extends KatharsisMatchingException {
public RepositoryAnnotationNotFoundException(String message) {
super(message);
}
}
| zachncst/katharsis-core | src/main/java/io/katharsis/repository/exception/RepositoryAnnotationNotFoundException.java | Java | apache-2.0 | 305 |
/*******************************************************************************
* Code contributed to the webinos project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright 2011-2012 Paddy Byers
*
******************************************************************************/
package org.webinos.wrt.renderer;
import org.webinos.wrt.ui.RendererActivity;
import android.app.AlertDialog;
import android.util.Log;
import android.webkit.ConsoleMessage;
import android.webkit.JsResult;
import android.webkit.WebView;
public class WebChromeClient extends android.webkit.WebChromeClient{
private RendererActivity activity;
private static final String TAG = "org.webinos.wrt.renderer.WebChromeClient";
public WebChromeClient(RendererActivity activity) {
this.activity = activity;
}
@Override
public boolean onJsAlert(WebView view, String url, String message, JsResult result) {
Log.d(TAG, message);
new AlertDialog.Builder(view.getContext()).setMessage(message).setCancelable(true).show();
result.confirm();
return true;
}
@Override
public void onReceivedTitle(WebView webView, String title) {
if(title != null && !title.isEmpty())
activity.setTitle(title);
}
@Override
public boolean onConsoleMessage(ConsoleMessage consoleMessage) {
Log.v(TAG, consoleMessage.sourceId() + ':' + consoleMessage.lineNumber() + " " + consoleMessage.message());
return true;
}
}
| krishnabangalore/Webinos-Platform | webinos/platform/android/wrt/src/org/webinos/wrt/renderer/WebChromeClient.java | Java | apache-2.0 | 1,937 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_21) on Wed May 04 07:58:35 PDT 2011 -->
<TITLE>
Uses of Package org.apache.hadoop.util (Hadoop 0.20.203.0 API)
</TITLE>
<META NAME="date" CONTENT="2011-05-04">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Package org.apache.hadoop.util (Hadoop 0.20.203.0 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../index.html?org/apache/hadoop/util/package-use.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-use.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Package<br>org.apache.hadoop.util</B></H2>
</CENTER>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Packages that use <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.contrib.index.mapred"><B>org.apache.hadoop.contrib.index.mapred</B></A></TD>
<TD> </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.examples"><B>org.apache.hadoop.examples</B></A></TD>
<TD>Hadoop example code. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.examples.dancing"><B>org.apache.hadoop.examples.dancing</B></A></TD>
<TD>This package is a distributed implementation of Knuth's <a
href="http://en.wikipedia.org/wiki/Dancing_Links">dancing links</a>
algorithm that can run under Hadoop. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.examples.terasort"><B>org.apache.hadoop.examples.terasort</B></A></TD>
<TD>This package consists of 3 map/reduce applications for Hadoop to
compete in the annual <a
href="http://www.hpl.hp.com/hosted/sortbenchmark" target="_top">terabyte sort</a>
competition. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.fs"><B>org.apache.hadoop.fs</B></A></TD>
<TD>An abstract file system API. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.fs.ftp"><B>org.apache.hadoop.fs.ftp</B></A></TD>
<TD> </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.fs.kfs"><B>org.apache.hadoop.fs.kfs</B></A></TD>
<TD>A client for the Kosmos filesystem (KFS) </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.fs.s3"><B>org.apache.hadoop.fs.s3</B></A></TD>
<TD>A distributed, block-based implementation of <A HREF="../../../../org/apache/hadoop/fs/FileSystem.html" title="class in org.apache.hadoop.fs"><CODE>FileSystem</CODE></A> that uses <a href="http://aws.amazon.com/s3">Amazon S3</a>
as a backing store. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.fs.s3native"><B>org.apache.hadoop.fs.s3native</B></A></TD>
<TD>
A distributed implementation of <A HREF="../../../../org/apache/hadoop/fs/FileSystem.html" title="class in org.apache.hadoop.fs"><CODE>FileSystem</CODE></A> for reading and writing files on
<a href="http://aws.amazon.com/s3">Amazon S3</a>. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.io"><B>org.apache.hadoop.io</B></A></TD>
<TD>Generic i/o code for use when reading and writing data to the network,
to databases, and to files. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.mapred"><B>org.apache.hadoop.mapred</B></A></TD>
<TD>A software framework for easily writing applications which process vast
amounts of data (multi-terabyte data-sets) parallelly on large clusters
(thousands of nodes) built of commodity hardware in a reliable, fault-tolerant
manner. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.mapred.lib"><B>org.apache.hadoop.mapred.lib</B></A></TD>
<TD>Library of generally useful mappers, reducers, and partitioners. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.mapred.lib.db"><B>org.apache.hadoop.mapred.lib.db</B></A></TD>
<TD>org.apache.hadoop.mapred.lib.db Package </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.mapred.pipes"><B>org.apache.hadoop.mapred.pipes</B></A></TD>
<TD>Hadoop Pipes allows C++ code to use Hadoop DFS and map/reduce. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.mapred.tools"><B>org.apache.hadoop.mapred.tools</B></A></TD>
<TD> </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.mapreduce"><B>org.apache.hadoop.mapreduce</B></A></TD>
<TD> </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.streaming"><B>org.apache.hadoop.streaming</B></A></TD>
<TD><tt>Hadoop Streaming</tt> is a utility which allows users to create and run
Map-Reduce jobs with any executables (e.g. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.util"><B>org.apache.hadoop.util</B></A></TD>
<TD>Common utilities. </TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.contrib.index.mapred"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A> used by <A HREF="../../../../org/apache/hadoop/contrib/index/mapred/package-summary.html">org.apache.hadoop.contrib.index.mapred</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Progressable.html#org.apache.hadoop.contrib.index.mapred"><B>Progressable</B></A></B>
<BR>
A facility for reporting progress.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.examples"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A> used by <A HREF="../../../../org/apache/hadoop/examples/package-summary.html">org.apache.hadoop.examples</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Tool.html#org.apache.hadoop.examples"><B>Tool</B></A></B>
<BR>
A tool interface that supports handling of generic command-line options.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.examples.dancing"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A> used by <A HREF="../../../../org/apache/hadoop/examples/dancing/package-summary.html">org.apache.hadoop.examples.dancing</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Tool.html#org.apache.hadoop.examples.dancing"><B>Tool</B></A></B>
<BR>
A tool interface that supports handling of generic command-line options.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.examples.terasort"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A> used by <A HREF="../../../../org/apache/hadoop/examples/terasort/package-summary.html">org.apache.hadoop.examples.terasort</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Progressable.html#org.apache.hadoop.examples.terasort"><B>Progressable</B></A></B>
<BR>
A facility for reporting progress.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Tool.html#org.apache.hadoop.examples.terasort"><B>Tool</B></A></B>
<BR>
A tool interface that supports handling of generic command-line options.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.fs"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A> used by <A HREF="../../../../org/apache/hadoop/fs/package-summary.html">org.apache.hadoop.fs</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Progressable.html#org.apache.hadoop.fs"><B>Progressable</B></A></B>
<BR>
A facility for reporting progress.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Shell.html#org.apache.hadoop.fs"><B>Shell</B></A></B>
<BR>
A base class for running a Unix command.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Tool.html#org.apache.hadoop.fs"><B>Tool</B></A></B>
<BR>
A tool interface that supports handling of generic command-line options.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.fs.ftp"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A> used by <A HREF="../../../../org/apache/hadoop/fs/ftp/package-summary.html">org.apache.hadoop.fs.ftp</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Progressable.html#org.apache.hadoop.fs.ftp"><B>Progressable</B></A></B>
<BR>
A facility for reporting progress.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.fs.kfs"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A> used by <A HREF="../../../../org/apache/hadoop/fs/kfs/package-summary.html">org.apache.hadoop.fs.kfs</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Progressable.html#org.apache.hadoop.fs.kfs"><B>Progressable</B></A></B>
<BR>
A facility for reporting progress.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.fs.s3"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A> used by <A HREF="../../../../org/apache/hadoop/fs/s3/package-summary.html">org.apache.hadoop.fs.s3</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Progressable.html#org.apache.hadoop.fs.s3"><B>Progressable</B></A></B>
<BR>
A facility for reporting progress.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Tool.html#org.apache.hadoop.fs.s3"><B>Tool</B></A></B>
<BR>
A tool interface that supports handling of generic command-line options.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.fs.s3native"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A> used by <A HREF="../../../../org/apache/hadoop/fs/s3native/package-summary.html">org.apache.hadoop.fs.s3native</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Progressable.html#org.apache.hadoop.fs.s3native"><B>Progressable</B></A></B>
<BR>
A facility for reporting progress.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.io"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A> used by <A HREF="../../../../org/apache/hadoop/io/package-summary.html">org.apache.hadoop.io</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Progress.html#org.apache.hadoop.io"><B>Progress</B></A></B>
<BR>
Utility to assist with generation of progress reports.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Progressable.html#org.apache.hadoop.io"><B>Progressable</B></A></B>
<BR>
A facility for reporting progress.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.mapred"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A> used by <A HREF="../../../../org/apache/hadoop/mapred/package-summary.html">org.apache.hadoop.mapred</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/LineReader.html#org.apache.hadoop.mapred"><B>LineReader</B></A></B>
<BR>
A class that provides a line reader from an input stream.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/ProcessTree.Signal.html#org.apache.hadoop.mapred"><B>ProcessTree.Signal</B></A></B>
<BR>
The constants for the signals.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Progress.html#org.apache.hadoop.mapred"><B>Progress</B></A></B>
<BR>
Utility to assist with generation of progress reports.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Progressable.html#org.apache.hadoop.mapred"><B>Progressable</B></A></B>
<BR>
A facility for reporting progress.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Tool.html#org.apache.hadoop.mapred"><B>Tool</B></A></B>
<BR>
A tool interface that supports handling of generic command-line options.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.mapred.lib"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A> used by <A HREF="../../../../org/apache/hadoop/mapred/lib/package-summary.html">org.apache.hadoop.mapred.lib</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Progressable.html#org.apache.hadoop.mapred.lib"><B>Progressable</B></A></B>
<BR>
A facility for reporting progress.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Tool.html#org.apache.hadoop.mapred.lib"><B>Tool</B></A></B>
<BR>
A tool interface that supports handling of generic command-line options.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.mapred.lib.db"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A> used by <A HREF="../../../../org/apache/hadoop/mapred/lib/db/package-summary.html">org.apache.hadoop.mapred.lib.db</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Progressable.html#org.apache.hadoop.mapred.lib.db"><B>Progressable</B></A></B>
<BR>
A facility for reporting progress.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.mapred.pipes"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A> used by <A HREF="../../../../org/apache/hadoop/mapred/pipes/package-summary.html">org.apache.hadoop.mapred.pipes</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Tool.html#org.apache.hadoop.mapred.pipes"><B>Tool</B></A></B>
<BR>
A tool interface that supports handling of generic command-line options.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.mapred.tools"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A> used by <A HREF="../../../../org/apache/hadoop/mapred/tools/package-summary.html">org.apache.hadoop.mapred.tools</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Tool.html#org.apache.hadoop.mapred.tools"><B>Tool</B></A></B>
<BR>
A tool interface that supports handling of generic command-line options.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.mapreduce"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A> used by <A HREF="../../../../org/apache/hadoop/mapreduce/package-summary.html">org.apache.hadoop.mapreduce</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Progressable.html#org.apache.hadoop.mapreduce"><B>Progressable</B></A></B>
<BR>
A facility for reporting progress.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.streaming"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A> used by <A HREF="../../../../org/apache/hadoop/streaming/package-summary.html">org.apache.hadoop.streaming</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/LineReader.html#org.apache.hadoop.streaming"><B>LineReader</B></A></B>
<BR>
A class that provides a line reader from an input stream.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Tool.html#org.apache.hadoop.streaming"><B>Tool</B></A></B>
<BR>
A tool interface that supports handling of generic command-line options.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.util"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Classes in <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A> used by <A HREF="../../../../org/apache/hadoop/util/package-summary.html">org.apache.hadoop.util</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/DataChecksum.html#org.apache.hadoop.util"><B>DataChecksum</B></A></B>
<BR>
This class provides inteface and utilities for processing checksums for
DFS data transfers.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/DiskChecker.DiskErrorException.html#org.apache.hadoop.util"><B>DiskChecker.DiskErrorException</B></A></B>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/IndexedSortable.html#org.apache.hadoop.util"><B>IndexedSortable</B></A></B>
<BR>
Interface for collections capable of being sorted by <A HREF="../../../../org/apache/hadoop/util/IndexedSorter.html" title="interface in org.apache.hadoop.util"><CODE>IndexedSorter</CODE></A>
algorithms.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/IndexedSorter.html#org.apache.hadoop.util"><B>IndexedSorter</B></A></B>
<BR>
Interface for sort algorithms accepting <A HREF="../../../../org/apache/hadoop/util/IndexedSortable.html" title="interface in org.apache.hadoop.util"><CODE>IndexedSortable</CODE></A> items.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/MemoryCalculatorPlugin.html#org.apache.hadoop.util"><B>MemoryCalculatorPlugin</B></A></B>
<BR>
Plugin to calculate virtual and physical memories on the system.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/ProcessTree.html#org.apache.hadoop.util"><B>ProcessTree</B></A></B>
<BR>
Process tree related operations</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/ProcessTree.Signal.html#org.apache.hadoop.util"><B>ProcessTree.Signal</B></A></B>
<BR>
The constants for the signals.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/ProcfsBasedProcessTree.html#org.apache.hadoop.util"><B>ProcfsBasedProcessTree</B></A></B>
<BR>
A Proc file-system based ProcessTree.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Progress.html#org.apache.hadoop.util"><B>Progress</B></A></B>
<BR>
Utility to assist with generation of progress reports.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Progressable.html#org.apache.hadoop.util"><B>Progressable</B></A></B>
<BR>
A facility for reporting progress.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/QueueProcessingStatistics.State.html#org.apache.hadoop.util"><B>QueueProcessingStatistics.State</B></A></B>
<BR>
This enum provides the "states" of a state machine for
<A HREF="../../../../org/apache/hadoop/util/QueueProcessingStatistics.html" title="class in org.apache.hadoop.util"><CODE>QueueProcessingStatistics</CODE></A>.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Shell.html#org.apache.hadoop.util"><B>Shell</B></A></B>
<BR>
A base class for running a Unix command.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/StringUtils.TraditionalBinaryPrefix.html#org.apache.hadoop.util"><B>StringUtils.TraditionalBinaryPrefix</B></A></B>
<BR>
The traditional binary prefixes, kilo, mega, ..., exa,
which can be represented by a 64-bit integer.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><B><A HREF="../../../../org/apache/hadoop/util/class-use/Tool.html#org.apache.hadoop.util"><B>Tool</B></A></B>
<BR>
A tool interface that supports handling of generic command-line options.</TD>
</TR>
</TABLE>
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../index.html?org/apache/hadoop/util/package-use.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-use.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright © 2009 The Apache Software Foundation
</BODY>
</HTML>
| zincumyx/Mammoth | mammoth-src/docs/api/org/apache/hadoop/util/package-use.html | HTML | apache-2.0 | 32,022 |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/quicksight/model/ListDashboardsRequest.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <aws/core/http/URI.h>
#include <aws/core/utils/memory/stl/AWSStringStream.h>
#include <utility>
using namespace Aws::QuickSight::Model;
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
using namespace Aws::Http;
ListDashboardsRequest::ListDashboardsRequest() :
m_awsAccountIdHasBeenSet(false),
m_nextTokenHasBeenSet(false),
m_maxResults(0),
m_maxResultsHasBeenSet(false)
{
}
Aws::String ListDashboardsRequest::SerializePayload() const
{
return {};
}
void ListDashboardsRequest::AddQueryStringParameters(URI& uri) const
{
Aws::StringStream ss;
if(m_nextTokenHasBeenSet)
{
ss << m_nextToken;
uri.AddQueryStringParameter("next-token", ss.str());
ss.str("");
}
if(m_maxResultsHasBeenSet)
{
ss << m_maxResults;
uri.AddQueryStringParameter("max-results", ss.str());
ss.str("");
}
}
| awslabs/aws-sdk-cpp | aws-cpp-sdk-quicksight/source/model/ListDashboardsRequest.cpp | C++ | apache-2.0 | 1,117 |
#include <Eigen/Eigen>
#include <iostream>
#ifndef M_PI
#define M_PI 3.1415926535897932384626433832795
#endif
using namespace Eigen;
using namespace std;
int main(int, char**)
{
cout.precision(3);
Matrix4i m = Matrix4i::Random();
m.row(1).setOnes();
cout << m << endl;
return 0;
}
| shishaochen/TensorFlow-0.8-Win | third_party/eigen-eigen-50812b426b7c/build_dir/doc/snippets/compile_MatrixBase_setOnes.cpp | C++ | apache-2.0 | 292 |
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy of
// the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
couchTests.purge = function(debug) {
var db = new CouchDB("test_suite_db", {"X-Couch-Full-Commit":"false"});
db.deleteDb();
db.createDb();
if (debug) debugger;
/*
purge is not to be confused with a document deletion. It removes the
document and all edit history from the local instance of the database.
*/
var numDocs = 10;
var designDoc = {
_id:"_design/test",
language: "javascript",
views: {
all_docs_twice: {map: "function(doc) { emit(doc.integer, null); emit(doc.integer, null) }"},
single_doc: {map: "function(doc) { if (doc._id == \"1\") { emit(1, null) }}"}
}
};
T(db.save(designDoc).ok);
db.bulkSave(makeDocs(1, numDocs + 1));
// go ahead and validate the views before purging
var rows = db.view("test/all_docs_twice").rows;
for (var i = 0; i < numDocs; i++) {
T(rows[2*i].key == i+1);
T(rows[(2*i)+1].key == i+1);
}
T(db.view("test/single_doc").total_rows == 1);
var info = db.info();
var doc1 = db.open("1");
var doc2 = db.open("2");
// purge the documents
var xhr = CouchDB.request("POST", "/test_suite_db/_purge", {
body: JSON.stringify({"1":[doc1._rev], "2":[doc2._rev]})
});
T(xhr.status == 200);
var result = JSON.parse(xhr.responseText);
var newInfo = db.info();
// purging increments the update sequence
T(info.update_seq+1 == newInfo.update_seq);
// and it increments the purge_seq
T(info.purge_seq+1 == newInfo.purge_seq);
T(result.purge_seq == newInfo.purge_seq);
T(result.purged["1"][0] == doc1._rev);
T(result.purged["2"][0] == doc2._rev);
T(db.open("1") == null);
T(db.open("2") == null);
var rows = db.view("test/all_docs_twice").rows;
for (var i = 2; i < numDocs; i++) {
T(rows[2*(i-2)].key == i+1);
T(rows[(2*(i-2))+1].key == i+1);
}
T(db.view("test/single_doc").total_rows == 0);
// purge documents twice in a row without loading views
// (causes full view rebuilds)
var doc3 = db.open("3");
var doc4 = db.open("4");
xhr = CouchDB.request("POST", "/test_suite_db/_purge", {
body: JSON.stringify({"3":[doc3._rev]})
});
T(xhr.status == 200);
xhr = CouchDB.request("POST", "/test_suite_db/_purge", {
body: JSON.stringify({"4":[doc4._rev]})
});
T(xhr.status == 200);
result = JSON.parse(xhr.responseText);
T(result.purge_seq == db.info().purge_seq);
var rows = db.view("test/all_docs_twice").rows;
for (var i = 4; i < numDocs; i++) {
T(rows[2*(i-4)].key == i+1);
T(rows[(2*(i-4))+1].key == i+1);
}
T(db.view("test/single_doc").total_rows == 0);
};
| Odyl/couchdb | share/www/script/test/purge.js | JavaScript | apache-2.0 | 3,127 |
// Copyright (C) 2015 the V8 project authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
description: >
The value of the [[Prototype]] internal slot of the GeneratorFunction
prototype object is the FunctionPrototype intrinsic object.
es6id: 25.2.2.2
---*/
function f() {}
var g = function*() {};
assert.sameValue(
Object.getPrototypeOf(Object.getPrototypeOf(g)),
Object.getPrototypeOf(f)
);
| m0ppers/arangodb | 3rdParty/V8/V8-5.0.71.39/test/test262/data/test/language/expressions/generators/prototype-relation-to-function.js | JavaScript | apache-2.0 | 461 |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#pragma once
#include <aws/ec2/EC2_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSString.h>
namespace Aws
{
namespace EC2
{
namespace Model
{
enum class InstanceMetadataEndpointState
{
NOT_SET,
disabled,
enabled
};
namespace InstanceMetadataEndpointStateMapper
{
AWS_EC2_API InstanceMetadataEndpointState GetInstanceMetadataEndpointStateForName(const Aws::String& name);
AWS_EC2_API Aws::String GetNameForInstanceMetadataEndpointState(InstanceMetadataEndpointState value);
} // namespace InstanceMetadataEndpointStateMapper
} // namespace Model
} // namespace EC2
} // namespace Aws
| jt70471/aws-sdk-cpp | aws-cpp-sdk-ec2/include/aws/ec2/model/InstanceMetadataEndpointState.h | C | apache-2.0 | 729 |
@(account: gitbucket.core.model.Account, groupNames: List[String], active: String,
isGroupManager: Boolean = false)(body: Html)(implicit context: gitbucket.core.controller.Context)
@import context._
@import gitbucket.core.view.helpers._
@html.main(account.userName){
<div class="container">
<div class="container-fluid">
<div class="row">
<div class="col-md-4">
<div class="block">
<div class="account-image">@avatar(account.userName, 270)</div>
<div class="account-fullname">@account.fullName</div>
<div class="account-username">@account.userName</div>
</div>
<div class="block">
@if(account.url.isDefined){
<div><i class="octicon octicon-home"></i> <a href="@account.url">@account.url</a></div>
}
<div><i class="octicon octicon-clock"></i> <span class="muted">Joined on</span> @date(account.registeredDate)</div>
</div>
@if(groupNames.nonEmpty){
<div>
<div>Groups</div>
@groupNames.map { groupName =>
<a href="@url(groupName)">@avatar(groupName, 36, tooltip = true)</a>
}
</div>
}
</div>
<div class="col-md-8">
<ul class="nav nav-tabs" style="margin-bottom: 5px;">
<li@if(active == "repositories"){ class="active"}><a href="@url(account.userName)?tab=repositories">Repositories</a></li>
@if(account.isGroupAccount){
<li@if(active == "members"){ class="active"}><a href="@url(account.userName)?tab=members">Members</a></li>
} else {
<li@if(active == "activity"){ class="active"}><a href="@url(account.userName)?tab=activity">Public Activity</a></li>
}
@if(loginAccount.isDefined && loginAccount.get.userName == account.userName){
<li class="pull-right">
<div class="button-group">
<a href="@url(account.userName)/_edit" class="btn btn-default">Edit Your Profile</a>
</div>
</li>
}
@if(loginAccount.isDefined && account.isGroupAccount && isGroupManager){
<li class="pull-right">
<div class="button-group">
<a href="@url(account.userName)/_editgroup" class="btn btn-default">Edit Group</a>
</div>
</li>
}
</ul>
@body
</div>
</div>
</div>
</div>
}
| noc06140728/gitbucket | src/main/twirl/gitbucket/core/account/main.scala.html | HTML | apache-2.0 | 2,543 |
/**
* Copyright 2005-2014 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.utils;
import java.util.Collection;
/**
* Represents a filter which ORs together a collection of filters,
* returning true if any of the filters are true
*/
public class CompositeFilter<T> implements Filter<T> {
private final Collection<Filter<T>> filters;
public CompositeFilter(Collection<Filter<T>> filters) {
this.filters = filters;
}
@Override
public String toString() {
return "CompsiteFilter" + filters;
}
@Override
public boolean matches(T t) {
for (Filter<T> filter : filters) {
if (filter.matches(t)) {
return true;
}
}
return false;
}
public boolean isEmpty() {
return filters.isEmpty();
}
}
| hekonsek/fabric8 | components/fabric8-utils/src/main/java/io/fabric8/utils/CompositeFilter.java | Java | apache-2.0 | 1,403 |
/* Generated by camel build tools - do NOT edit this file! */
package org.apache.camel.component.google.mail;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.spi.ExtendedPropertyConfigurerGetter;
import org.apache.camel.spi.PropertyConfigurerGetter;
import org.apache.camel.spi.ConfigurerStrategy;
import org.apache.camel.spi.GeneratedPropertyConfigurer;
import org.apache.camel.util.CaseInsensitiveMap;
import org.apache.camel.support.component.PropertyConfigurerSupport;
/**
* Generated by camel build tools - do NOT edit this file!
*/
@SuppressWarnings("unchecked")
public class GoogleMailEndpointConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("apiName", org.apache.camel.component.google.mail.internal.GoogleMailApiName.class);
map.put("methodName", java.lang.String.class);
map.put("applicationName", java.lang.String.class);
map.put("clientId", java.lang.String.class);
map.put("delegate", java.lang.String.class);
map.put("inBody", java.lang.String.class);
map.put("scopes", java.util.Collection.class);
map.put("bridgeErrorHandler", boolean.class);
map.put("sendEmptyMessageWhenIdle", boolean.class);
map.put("exceptionHandler", org.apache.camel.spi.ExceptionHandler.class);
map.put("exchangePattern", org.apache.camel.ExchangePattern.class);
map.put("pollStrategy", org.apache.camel.spi.PollingConsumerPollStrategy.class);
map.put("lazyStartProducer", boolean.class);
map.put("backoffErrorThreshold", int.class);
map.put("backoffIdleThreshold", int.class);
map.put("backoffMultiplier", int.class);
map.put("delay", long.class);
map.put("greedy", boolean.class);
map.put("initialDelay", long.class);
map.put("repeatCount", long.class);
map.put("runLoggingLevel", org.apache.camel.LoggingLevel.class);
map.put("scheduledExecutorService", java.util.concurrent.ScheduledExecutorService.class);
map.put("scheduler", java.lang.Object.class);
map.put("schedulerProperties", java.util.Map.class);
map.put("startScheduler", boolean.class);
map.put("timeUnit", java.util.concurrent.TimeUnit.class);
map.put("useFixedDelay", boolean.class);
map.put("accessToken", java.lang.String.class);
map.put("clientSecret", java.lang.String.class);
map.put("keyResource", java.lang.String.class);
map.put("refreshToken", java.lang.String.class);
ALL_OPTIONS = map;
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
GoogleMailEndpoint target = (GoogleMailEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesstoken":
case "accessToken": target.getConfiguration().setAccessToken(property(camelContext, java.lang.String.class, value)); return true;
case "applicationname":
case "applicationName": target.getConfiguration().setApplicationName(property(camelContext, java.lang.String.class, value)); return true;
case "backofferrorthreshold":
case "backoffErrorThreshold": target.setBackoffErrorThreshold(property(camelContext, int.class, value)); return true;
case "backoffidlethreshold":
case "backoffIdleThreshold": target.setBackoffIdleThreshold(property(camelContext, int.class, value)); return true;
case "backoffmultiplier":
case "backoffMultiplier": target.setBackoffMultiplier(property(camelContext, int.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "clientid":
case "clientId": target.getConfiguration().setClientId(property(camelContext, java.lang.String.class, value)); return true;
case "clientsecret":
case "clientSecret": target.getConfiguration().setClientSecret(property(camelContext, java.lang.String.class, value)); return true;
case "delay": target.setDelay(property(camelContext, long.class, value)); return true;
case "delegate": target.getConfiguration().setDelegate(property(camelContext, java.lang.String.class, value)); return true;
case "exceptionhandler":
case "exceptionHandler": target.setExceptionHandler(property(camelContext, org.apache.camel.spi.ExceptionHandler.class, value)); return true;
case "exchangepattern":
case "exchangePattern": target.setExchangePattern(property(camelContext, org.apache.camel.ExchangePattern.class, value)); return true;
case "greedy": target.setGreedy(property(camelContext, boolean.class, value)); return true;
case "inbody":
case "inBody": target.setInBody(property(camelContext, java.lang.String.class, value)); return true;
case "initialdelay":
case "initialDelay": target.setInitialDelay(property(camelContext, long.class, value)); return true;
case "keyresource":
case "keyResource": target.getConfiguration().setKeyResource(property(camelContext, java.lang.String.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "pollstrategy":
case "pollStrategy": target.setPollStrategy(property(camelContext, org.apache.camel.spi.PollingConsumerPollStrategy.class, value)); return true;
case "refreshtoken":
case "refreshToken": target.getConfiguration().setRefreshToken(property(camelContext, java.lang.String.class, value)); return true;
case "repeatcount":
case "repeatCount": target.setRepeatCount(property(camelContext, long.class, value)); return true;
case "runlogginglevel":
case "runLoggingLevel": target.setRunLoggingLevel(property(camelContext, org.apache.camel.LoggingLevel.class, value)); return true;
case "scheduledexecutorservice":
case "scheduledExecutorService": target.setScheduledExecutorService(property(camelContext, java.util.concurrent.ScheduledExecutorService.class, value)); return true;
case "scheduler": target.setScheduler(property(camelContext, java.lang.Object.class, value)); return true;
case "schedulerproperties":
case "schedulerProperties": target.setSchedulerProperties(property(camelContext, java.util.Map.class, value)); return true;
case "scopes": target.getConfiguration().setScopes(property(camelContext, java.util.Collection.class, value)); return true;
case "sendemptymessagewhenidle":
case "sendEmptyMessageWhenIdle": target.setSendEmptyMessageWhenIdle(property(camelContext, boolean.class, value)); return true;
case "startscheduler":
case "startScheduler": target.setStartScheduler(property(camelContext, boolean.class, value)); return true;
case "timeunit":
case "timeUnit": target.setTimeUnit(property(camelContext, java.util.concurrent.TimeUnit.class, value)); return true;
case "usefixeddelay":
case "useFixedDelay": target.setUseFixedDelay(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesstoken":
case "accessToken": return java.lang.String.class;
case "applicationname":
case "applicationName": return java.lang.String.class;
case "backofferrorthreshold":
case "backoffErrorThreshold": return int.class;
case "backoffidlethreshold":
case "backoffIdleThreshold": return int.class;
case "backoffmultiplier":
case "backoffMultiplier": return int.class;
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "clientid":
case "clientId": return java.lang.String.class;
case "clientsecret":
case "clientSecret": return java.lang.String.class;
case "delay": return long.class;
case "delegate": return java.lang.String.class;
case "exceptionhandler":
case "exceptionHandler": return org.apache.camel.spi.ExceptionHandler.class;
case "exchangepattern":
case "exchangePattern": return org.apache.camel.ExchangePattern.class;
case "greedy": return boolean.class;
case "inbody":
case "inBody": return java.lang.String.class;
case "initialdelay":
case "initialDelay": return long.class;
case "keyresource":
case "keyResource": return java.lang.String.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "pollstrategy":
case "pollStrategy": return org.apache.camel.spi.PollingConsumerPollStrategy.class;
case "refreshtoken":
case "refreshToken": return java.lang.String.class;
case "repeatcount":
case "repeatCount": return long.class;
case "runlogginglevel":
case "runLoggingLevel": return org.apache.camel.LoggingLevel.class;
case "scheduledexecutorservice":
case "scheduledExecutorService": return java.util.concurrent.ScheduledExecutorService.class;
case "scheduler": return java.lang.Object.class;
case "schedulerproperties":
case "schedulerProperties": return java.util.Map.class;
case "scopes": return java.util.Collection.class;
case "sendemptymessagewhenidle":
case "sendEmptyMessageWhenIdle": return boolean.class;
case "startscheduler":
case "startScheduler": return boolean.class;
case "timeunit":
case "timeUnit": return java.util.concurrent.TimeUnit.class;
case "usefixeddelay":
case "useFixedDelay": return boolean.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
GoogleMailEndpoint target = (GoogleMailEndpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesstoken":
case "accessToken": return target.getConfiguration().getAccessToken();
case "applicationname":
case "applicationName": return target.getConfiguration().getApplicationName();
case "backofferrorthreshold":
case "backoffErrorThreshold": return target.getBackoffErrorThreshold();
case "backoffidlethreshold":
case "backoffIdleThreshold": return target.getBackoffIdleThreshold();
case "backoffmultiplier":
case "backoffMultiplier": return target.getBackoffMultiplier();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "clientid":
case "clientId": return target.getConfiguration().getClientId();
case "clientsecret":
case "clientSecret": return target.getConfiguration().getClientSecret();
case "delay": return target.getDelay();
case "delegate": return target.getConfiguration().getDelegate();
case "exceptionhandler":
case "exceptionHandler": return target.getExceptionHandler();
case "exchangepattern":
case "exchangePattern": return target.getExchangePattern();
case "greedy": return target.isGreedy();
case "inbody":
case "inBody": return target.getInBody();
case "initialdelay":
case "initialDelay": return target.getInitialDelay();
case "keyresource":
case "keyResource": return target.getConfiguration().getKeyResource();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "pollstrategy":
case "pollStrategy": return target.getPollStrategy();
case "refreshtoken":
case "refreshToken": return target.getConfiguration().getRefreshToken();
case "repeatcount":
case "repeatCount": return target.getRepeatCount();
case "runlogginglevel":
case "runLoggingLevel": return target.getRunLoggingLevel();
case "scheduledexecutorservice":
case "scheduledExecutorService": return target.getScheduledExecutorService();
case "scheduler": return target.getScheduler();
case "schedulerproperties":
case "schedulerProperties": return target.getSchedulerProperties();
case "scopes": return target.getConfiguration().getScopes();
case "sendemptymessagewhenidle":
case "sendEmptyMessageWhenIdle": return target.isSendEmptyMessageWhenIdle();
case "startscheduler":
case "startScheduler": return target.isStartScheduler();
case "timeunit":
case "timeUnit": return target.getTimeUnit();
case "usefixeddelay":
case "useFixedDelay": return target.isUseFixedDelay();
default: return null;
}
}
@Override
public Object getCollectionValueType(Object target, String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "schedulerproperties":
case "schedulerProperties": return java.lang.Object.class;
case "scopes": return java.lang.String.class;
default: return null;
}
}
}
| apache/camel | components/camel-google/camel-google-mail/src/generated/java/org/apache/camel/component/google/mail/GoogleMailEndpointConfigurer.java | Java | apache-2.0 | 13,796 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.virtual;
import org.apache.druid.segment.ColumnInspector;
import org.apache.druid.segment.VirtualColumns;
import org.apache.druid.segment.column.ColumnCapabilities;
import javax.annotation.Nullable;
/**
* Provides {@link ColumnCapabilities} for both virtual and non-virtual columns by building on top of another base
* {@link ColumnInspector}.
*
* {@link VirtualColumns} are provided with the base inspector so that they may potentially infer output types to
* construct the appropriate capabilities for virtual columns, while the base inspector directly supplies the
* capabilities for non-virtual columns.
*/
public class VirtualizedColumnInspector implements ColumnInspector
{
protected final VirtualColumns virtualColumns;
protected final ColumnInspector baseInspector;
public VirtualizedColumnInspector(
ColumnInspector baseInspector,
VirtualColumns virtualColumns
)
{
this.virtualColumns = virtualColumns;
this.baseInspector = baseInspector;
}
@Nullable
@Override
public ColumnCapabilities getColumnCapabilities(String columnName)
{
if (virtualColumns.exists(columnName)) {
return virtualColumns.getColumnCapabilities(baseInspector, columnName);
} else {
return baseInspector.getColumnCapabilities(columnName);
}
}
}
| nishantmonu51/druid | processing/src/main/java/org/apache/druid/segment/virtual/VirtualizedColumnInspector.java | Java | apache-2.0 | 2,143 |
/******************************************************************************
* Copyright 2019 The Apollo Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#include "modules/data/tools/smart_recorder/hard_brake_trigger.h"
#include <cmath>
#include <memory>
#include "cyber/common/log.h"
#include "modules/common/adapters/adapter_gflags.h"
#include "modules/control/proto/control_cmd.pb.h"
namespace apollo {
namespace data {
using apollo::canbus::Chassis;
HardBrakeTrigger::HardBrakeTrigger() { trigger_name_ = "HardBrakeTrigger"; }
void HardBrakeTrigger::Pull(const cyber::record::RecordMessage& msg) {
if (!trigger_obj_->enabled()) {
return;
}
if (msg.channel_name == FLAGS_chassis_topic) {
Chassis chassis_msg;
chassis_msg.ParseFromString(msg.content);
const float speed = chassis_msg.speed_mps();
if (IsNoisy(speed)) {
return;
}
EnqueueMessage(speed);
if (IsHardBrake()) {
AINFO << "hard break trigger is pulled: " << msg.time << " - "
<< msg.channel_name;
TriggerIt(msg.time);
}
}
}
bool HardBrakeTrigger::IsNoisy(const float speed) const {
const float pre_speed_mps =
(current_speed_queue_.empty() ? 0.0f : current_speed_queue_.back());
return fabs(pre_speed_mps - speed) > noisy_diff_;
}
bool HardBrakeTrigger::IsHardBrake() const {
if (current_speed_queue_.size() < queue_size_ ||
history_speed_queue_.size() < queue_size_) {
return false;
}
const float delta =
(history_total_ - current_total_) / static_cast<float>(queue_size_);
return delta > max_delta_;
}
void HardBrakeTrigger::EnqueueMessage(const float speed) {
current_speed_queue_.emplace_back(speed);
current_total_ += speed;
if (current_speed_queue_.size() > queue_size_) {
const float current_front = current_speed_queue_.front();
current_speed_queue_.pop_front();
current_total_ -= current_front;
history_speed_queue_.emplace_back(current_front);
history_total_ += current_front;
if (history_speed_queue_.size() > queue_size_) {
history_total_ -= history_speed_queue_.front();
history_speed_queue_.pop_front();
}
}
}
} // namespace data
} // namespace apollo
| xiaoxq/apollo | modules/data/tools/smart_recorder/hard_brake_trigger.cc | C++ | apache-2.0 | 2,821 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package org.apache.polygene.envisage.school.domain.person.assembly;
import org.apache.polygene.bootstrap.Assembler;
import org.apache.polygene.bootstrap.AssemblyException;
import org.apache.polygene.bootstrap.ModuleAssembly;
import org.apache.polygene.envisage.school.domain.person.initialdata.SamplePersonInitialData;
import static org.apache.polygene.api.common.Visibility.application;
import static org.apache.polygene.api.common.Visibility.layer;
public final class PersonModelAssembler
implements Assembler
{
@Override
public final void assemble( ModuleAssembly module )
throws AssemblyException
{
module.entities( PersonEntity.class )
.visibleIn( layer );
module.services( SamplePersonInitialData.class )
.visibleIn( application )
.instantiateOnStartup();
}
}
| apache/zest-qi4j | tools/envisage/src/test/java/org/apache/polygene/envisage/school/domain/person/assembly/PersonModelAssembler.java | Java | apache-2.0 | 1,677 |
package org.apache.hadoop.dfs;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.net.NetworkTopology;
import junit.framework.TestCase;
public class TestReplicationPolicy extends TestCase {
private static final int BLOCK_SIZE = 1024;
private static final int NUM_OF_DATANODES = 6;
private static final Configuration CONF = new Configuration();
private static final NetworkTopology cluster;
private static NameNode namenode;
private static FSNamesystem.Replicator replicator;
private static DatanodeDescriptor dataNodes[] =
new DatanodeDescriptor[] {
new DatanodeDescriptor(new DatanodeID("h1:5020", "0", -1), "/d1/r1"),
new DatanodeDescriptor(new DatanodeID("h2:5020", "0", -1), "/d1/r1"),
new DatanodeDescriptor(new DatanodeID("h3:5020", "0", -1), "/d1/r2"),
new DatanodeDescriptor(new DatanodeID("h4:5020", "0", -1), "/d1/r2"),
new DatanodeDescriptor(new DatanodeID("h5:5020", "0", -1), "/d2/r3"),
new DatanodeDescriptor(new DatanodeID("h6:5020", "0", -1), "/d2/r3")
};
private final static DatanodeDescriptor NODE =
new DatanodeDescriptor(new DatanodeID("h7:5020", "0", -1), "/d2/r4");
static {
try {
CONF.set("fs.default.name", "localhost:8020");
namenode = new NameNode(CONF);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
FSNamesystem fsNamesystem = FSNamesystem.getFSNamesystem();
replicator = fsNamesystem.replicator;
cluster = fsNamesystem.clusterMap;
// construct network topology
for( int i=0; i<NUM_OF_DATANODES; i++) {
cluster.add( dataNodes[i] );
}
for( int i=0; i<NUM_OF_DATANODES; i++) {
dataNodes[i].updateHeartbeat(
2*FSConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE,
2*FSConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0);
}
}
/**
* In this testcase, client is dataNodes[0]. So the 1st replica should be
* placed on dataNodes[0], the 2nd replica should be placed on dataNodes[1],
* and the rest should be placed on different racks.
* The only excpetion is when the <i>numOfReplicas</i> is 2, the 1st is on
* dataNodes[0] and the 2nd is on a different rack.
* @throws Exception
*/
public void testChooseTarget1() throws Exception {
DatanodeDescriptor[] targets;
targets = replicator.chooseTarget(
0, dataNodes[0], null, BLOCK_SIZE);
assertEquals(targets.length, 0);
targets = replicator.chooseTarget(
1, dataNodes[0], null, BLOCK_SIZE);
assertEquals(targets.length, 1);
assertEquals(targets[0], dataNodes[0]);
targets = replicator.chooseTarget(
2, dataNodes[0], null, BLOCK_SIZE);
assertEquals(targets.length, 2);
assertEquals(targets[0], dataNodes[0]);
assertFalse(cluster.isOnSameRack(targets[0], targets[1]));
targets = replicator.chooseTarget(
3, dataNodes[0], null, BLOCK_SIZE);
assertEquals(targets.length, 3);
assertEquals(targets[0], dataNodes[0]);
assertTrue(cluster.isOnSameRack(targets[0], targets[1]));
assertFalse(cluster.isOnSameRack(targets[0], targets[2]));
targets = replicator.chooseTarget(
4, dataNodes[0], null, BLOCK_SIZE);
assertEquals(targets.length, 4);
assertEquals(targets[0], dataNodes[0]);
assertTrue(cluster.isOnSameRack(targets[0], targets[1]));
assertFalse(cluster.isOnSameRack(targets[0], targets[2]));
assertFalse(cluster.isOnSameRack(targets[0], targets[3]));
}
/**
* In this testcase, client is dataNodes[0], but the dataNodes[1] is
* not allowed to be choosen. So the 1st replica should be
* placed on dataNodes[0], the 2nd replica should be placed on a different
* rack, the 3rd should the same rack as the 3nd replic, and the rest
* should be placed on a third rack.
* @throws Exception
*/
public void testChooseTarget2() throws Exception {
List<DatanodeDescriptor> excludedNodes;
DatanodeDescriptor[] targets;
excludedNodes = new ArrayList<DatanodeDescriptor>();
excludedNodes.add(dataNodes[1]);
targets = replicator.chooseTarget(
0, dataNodes[0], excludedNodes, BLOCK_SIZE);
assertEquals(targets.length, 0);
excludedNodes = new ArrayList<DatanodeDescriptor>();
excludedNodes.add(dataNodes[1]);
targets = replicator.chooseTarget(
1, dataNodes[0], excludedNodes, BLOCK_SIZE);
assertEquals(targets.length, 1);
assertEquals(targets[0], dataNodes[0]);
excludedNodes = new ArrayList<DatanodeDescriptor>();
excludedNodes.add(dataNodes[1]);
targets = replicator.chooseTarget(
2, dataNodes[0], excludedNodes, BLOCK_SIZE);
assertEquals(targets.length, 2);
assertEquals(targets[0], dataNodes[0]);
assertFalse(cluster.isOnSameRack(targets[0], targets[1]));
excludedNodes = new ArrayList<DatanodeDescriptor>();
excludedNodes.add(dataNodes[1]);
targets = replicator.chooseTarget(
3, dataNodes[0], excludedNodes, BLOCK_SIZE);
assertEquals(targets.length, 3);
assertEquals(targets[0], dataNodes[0]);
assertFalse(cluster.isOnSameRack(targets[0], targets[1]));
assertTrue(cluster.isOnSameRack(targets[1], targets[2]));
excludedNodes = new ArrayList<DatanodeDescriptor>();
excludedNodes.add(dataNodes[1]);
targets = replicator.chooseTarget(
4, dataNodes[0], excludedNodes, BLOCK_SIZE);
assertEquals(targets.length, 4);
assertEquals(targets[0], dataNodes[0]);
for(int i=1; i<4; i++) {
assertFalse(cluster.isOnSameRack(targets[0], targets[i]));
}
assertTrue(cluster.isOnSameRack(targets[1], targets[2]) ||
cluster.isOnSameRack(targets[2], targets[3]));
assertFalse(cluster.isOnSameRack(targets[1], targets[3]));
}
/**
* In this testcase, client is dataNodes[0], but dataNodes[0] is not qualified
* to be choosen. So the 1st replica should be placed on dataNodes[1],
* the 2nd replica should be placed on a different rack,
* the 3rd replica should be placed on the same rack as the 2nd replica,
* and the rest should be placed on the third rack.
* @throws Exception
*/
public void testChooseTarget3() throws Exception {
// make data node 0 to be not qualified to choose
dataNodes[0].updateHeartbeat(
2*FSConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE,
FSConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 4); // overloaded
DatanodeDescriptor[] targets;
targets = replicator.chooseTarget(
0, dataNodes[0], null, BLOCK_SIZE);
assertEquals(targets.length, 0);
targets = replicator.chooseTarget(
1, dataNodes[0], null, BLOCK_SIZE);
assertEquals(targets.length, 1);
assertEquals(targets[0], dataNodes[1]);
targets = replicator.chooseTarget(
2, dataNodes[0], null, BLOCK_SIZE);
assertEquals(targets.length, 2);
assertEquals(targets[0], dataNodes[1]);
assertFalse(cluster.isOnSameRack(targets[0], targets[1]));
targets = replicator.chooseTarget(
3, dataNodes[0], null, BLOCK_SIZE);
assertEquals(targets.length, 3);
assertEquals(targets[0], dataNodes[1]);
assertTrue(cluster.isOnSameRack(targets[1], targets[2]));
assertFalse(cluster.isOnSameRack(targets[0], targets[1]));
targets = replicator.chooseTarget(
4, dataNodes[0], null, BLOCK_SIZE);
assertEquals(targets.length, 4);
assertEquals(targets[0], dataNodes[1]);
for(int i=1; i<4; i++) {
assertFalse(cluster.isOnSameRack(targets[0], targets[i]));
}
assertTrue(cluster.isOnSameRack(targets[1], targets[2]) ||
cluster.isOnSameRack(targets[2], targets[3]));
assertFalse(cluster.isOnSameRack(targets[1], targets[3]));
dataNodes[0].updateHeartbeat(
2*FSConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE,
FSConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0);
}
/**
* In this testcase, client is dataNodes[0], but none of the nodes on rack 1
* is qualified to be choosen. So the 1st replica should be placed on either
* rack 2 or rack 3.
* the 2nd replica should be placed on a different rack,
* the 3rd replica should be placed on the same rack as the 1st replica,
* @throws Exception
*/
public void testChoooseTarget4() throws Exception {
// make data node 0 & 1 to be not qualified to choose: not enough disk space
for(int i=0; i<2; i++) {
dataNodes[i].updateHeartbeat(
2*FSConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE,
(FSConstants.MIN_BLOCKS_FOR_WRITE-1)*BLOCK_SIZE, 0);
}
DatanodeDescriptor[] targets;
targets = replicator.chooseTarget(
0, dataNodes[0], null, BLOCK_SIZE);
assertEquals(targets.length, 0);
targets = replicator.chooseTarget(
1, dataNodes[0], null, BLOCK_SIZE);
assertEquals(targets.length, 1);
assertFalse(cluster.isOnSameRack(targets[0], dataNodes[0]));
targets = replicator.chooseTarget(
2, dataNodes[0], null, BLOCK_SIZE);
assertEquals(targets.length, 2);
assertFalse(cluster.isOnSameRack(targets[0], dataNodes[0]));
assertFalse(cluster.isOnSameRack(targets[0], targets[1]));
targets = replicator.chooseTarget(
3, dataNodes[0], null, BLOCK_SIZE);
assertEquals(targets.length, 3);
for(int i=0; i<3; i++) {
assertFalse(cluster.isOnSameRack(targets[i], dataNodes[0]));
}
assertTrue(cluster.isOnSameRack(targets[0], targets[1]) ||
cluster.isOnSameRack(targets[1], targets[2]));
assertFalse(cluster.isOnSameRack(targets[0], targets[2]));
for(int i=0; i<2; i++) {
dataNodes[i].updateHeartbeat(
2*FSConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE,
FSConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0);
}
}
/**
* In this testcase, client is is a node outside of file system.
* So the 1st replica can be placed on any node.
* the 2nd replica should be placed on a different rack,
* the 3rd replica should be placed on the same rack as the 1st replica,
* @throws Exception
*/
public void testChooseTarget5() throws Exception {
DatanodeDescriptor[] targets;
targets = replicator.chooseTarget(
0, NODE, null, BLOCK_SIZE);
assertEquals(targets.length, 0);
targets = replicator.chooseTarget(
1, NODE, null, BLOCK_SIZE);
assertEquals(targets.length, 1);
targets = replicator.chooseTarget(
2, NODE, null, BLOCK_SIZE);
assertEquals(targets.length, 2);
assertFalse(cluster.isOnSameRack(targets[0], targets[1]));
targets = replicator.chooseTarget(
3, NODE, null, BLOCK_SIZE);
assertEquals(targets.length, 3);
assertTrue(cluster.isOnSameRack(targets[0], targets[1]));
assertFalse(cluster.isOnSameRack(targets[0], targets[2]));
}
/**
* This testcase tests re-replication, when dataNodes[0] is already choosen.
* So the 1st replica can be placed on rack 1.
* the 2nd replica should be placed on a different rack,
* the 3rd replica can be placed randomly,
* @throws Exception
*/
public void testRereplicate1() throws Exception {
List<DatanodeDescriptor> choosenNodes = new ArrayList<DatanodeDescriptor>();
choosenNodes.add(dataNodes[0]);
DatanodeDescriptor[] targets;
targets = replicator.chooseTarget(
0, dataNodes[0], choosenNodes, null, BLOCK_SIZE);
assertEquals(targets.length, 0);
targets = replicator.chooseTarget(
1, dataNodes[0], choosenNodes, null, BLOCK_SIZE);
assertEquals(targets.length, 1);
assertFalse(cluster.isOnSameRack(dataNodes[0], targets[0]));
targets = replicator.chooseTarget(
2, dataNodes[0], choosenNodes, null, BLOCK_SIZE);
assertEquals(targets.length, 2);
assertTrue(cluster.isOnSameRack(dataNodes[0], targets[0]));
assertFalse(cluster.isOnSameRack(dataNodes[0], targets[1]));
targets = replicator.chooseTarget(
3, dataNodes[0], choosenNodes, null, BLOCK_SIZE);
assertEquals(targets.length, 3);
assertTrue(cluster.isOnSameRack(dataNodes[0], targets[0]));
assertFalse(cluster.isOnSameRack(dataNodes[0], targets[1]));
assertFalse(cluster.isOnSameRack(dataNodes[0], targets[2]));
}
/**
* This testcase tests re-replication,
* when dataNodes[0] and dataNodes[1] are already choosen.
* So the 1st replica should be placed on a different rack than rack 1.
* the rest replicas can be placed randomly,
* @throws Exception
*/
public void testRereplicate2() throws Exception {
List<DatanodeDescriptor> choosenNodes = new ArrayList<DatanodeDescriptor>();
choosenNodes.add(dataNodes[0]);
choosenNodes.add(dataNodes[1]);
DatanodeDescriptor[] targets;
targets = replicator.chooseTarget(
0, dataNodes[0], choosenNodes, null, BLOCK_SIZE);
assertEquals(targets.length, 0);
targets = replicator.chooseTarget(
1, dataNodes[0], choosenNodes, null, BLOCK_SIZE);
assertEquals(targets.length, 1);
assertFalse(cluster.isOnSameRack(dataNodes[0], targets[0]));
targets = replicator.chooseTarget(
2, dataNodes[0], choosenNodes, null, BLOCK_SIZE);
assertEquals(targets.length, 2);
assertFalse(cluster.isOnSameRack(dataNodes[0], targets[0]));
assertFalse(cluster.isOnSameRack(dataNodes[0], targets[1]));
}
/**
* This testcase tests re-replication,
* when dataNodes[0] and dataNodes[2] are already choosen.
* So the 1st replica should be placed on rack 1.
* the rest replicas can be placed randomly,
* @throws Exception
*/
public void testRereplicate3() throws Exception {
List<DatanodeDescriptor> choosenNodes = new ArrayList<DatanodeDescriptor>();
choosenNodes.add(dataNodes[0]);
choosenNodes.add(dataNodes[2]);
DatanodeDescriptor[] targets;
targets = replicator.chooseTarget(
0, dataNodes[0], choosenNodes, null, BLOCK_SIZE);
assertEquals(targets.length, 0);
targets = replicator.chooseTarget(
1, dataNodes[0], choosenNodes, null, BLOCK_SIZE);
assertEquals(targets.length, 1);
assertTrue(cluster.isOnSameRack(dataNodes[0], targets[0]));
targets = replicator.chooseTarget(
2, dataNodes[0], choosenNodes, null, BLOCK_SIZE);
assertEquals(targets.length, 2);
assertTrue(cluster.isOnSameRack(dataNodes[0], targets[0]));
assertFalse(cluster.isOnSameRack(dataNodes[0], targets[1]));
}
}
| moreus/hadoop | hadoop-0.11.2/src/test/org/apache/hadoop/dfs/TestReplicationPolicy.java | Java | apache-2.0 | 14,551 |
/*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.internal.tasks.testing.junit.result;
public enum TestCaseRerunHandling {
DISCRETE,
SUREFIRE_FORMAT
}
| gradle/gradle | subprojects/testing-base/src/main/java/org/gradle/api/internal/tasks/testing/junit/result/TestCaseRerunHandling.java | Java | apache-2.0 | 750 |
from Node import error
SYNTAX_NODE_SERIALIZATION_CODES = {
# 0 is 'Token'. Needs to be defined manually
# 1 is 'Unknown'. Needs to be defined manually
'UnknownDecl': 2,
'TypealiasDecl': 3,
'AssociatedtypeDecl': 4,
'IfConfigDecl': 5,
'PoundErrorDecl': 6,
'PoundWarningDecl': 7,
'PoundSourceLocation': 8,
'ClassDecl': 9,
'StructDecl': 10,
'ProtocolDecl': 11,
'ExtensionDecl': 12,
'FunctionDecl': 13,
'InitializerDecl': 14,
'DeinitializerDecl': 15,
'SubscriptDecl': 16,
'ImportDecl': 17,
'AccessorDecl': 18,
'VariableDecl': 19,
'EnumCaseDecl': 20,
'EnumDecl': 21,
'OperatorDecl': 22,
'PrecedenceGroupDecl': 23,
'UnknownExpr': 24,
'InOutExpr': 25,
'PoundColumnExpr': 26,
'TryExpr': 27,
'IdentifierExpr': 28,
'SuperRefExpr': 29,
'NilLiteralExpr': 30,
'DiscardAssignmentExpr': 31,
'AssignmentExpr': 32,
'SequenceExpr': 33,
'PoundLineExpr': 34,
'PoundFileExpr': 35,
'PoundFunctionExpr': 36,
'PoundDsohandleExpr': 37,
'SymbolicReferenceExpr': 38,
'PrefixOperatorExpr': 39,
'BinaryOperatorExpr': 40,
'ArrowExpr': 41,
'FloatLiteralExpr': 42,
'TupleExpr': 43,
'ArrayExpr': 44,
'DictionaryExpr': 45,
'ImplicitMemberExpr': 46,
'IntegerLiteralExpr': 47,
'StringLiteralExpr': 48,
'BooleanLiteralExpr': 49,
'TernaryExpr': 50,
'MemberAccessExpr': 51,
'DotSelfExpr': 52,
'IsExpr': 53,
'AsExpr': 54,
'TypeExpr': 55,
'ClosureExpr': 56,
'UnresolvedPatternExpr': 57,
'FunctionCallExpr': 58,
'SubscriptExpr': 59,
'OptionalChainingExpr': 60,
'ForcedValueExpr': 61,
'PostfixUnaryExpr': 62,
'SpecializeExpr': 63,
'StringInterpolationExpr': 64,
'KeyPathExpr': 65,
'KeyPathBaseExpr': 66,
'ObjcKeyPathExpr': 67,
'ObjcSelectorExpr': 68,
'EditorPlaceholderExpr': 69,
'ObjectLiteralExpr': 70,
'UnknownStmt': 71,
'ContinueStmt': 72,
'WhileStmt': 73,
'DeferStmt': 74,
'ExpressionStmt': 75,
'RepeatWhileStmt': 76,
'GuardStmt': 77,
'ForInStmt': 78,
'SwitchStmt': 79,
'DoStmt': 80,
'ReturnStmt': 81,
'FallthroughStmt': 82,
'BreakStmt': 83,
'DeclarationStmt': 84,
'ThrowStmt': 85,
'IfStmt': 86,
'Decl': 87,
'Expr': 88,
'Stmt': 89,
'Type': 90,
'Pattern': 91,
'CodeBlockItem': 92,
'CodeBlock': 93,
'DeclNameArgument': 94,
'DeclNameArguments': 95,
'FunctionCallArgument': 96,
'TupleElement': 97,
'ArrayElement': 98,
'DictionaryElement': 99,
'ClosureCaptureItem': 100,
'ClosureCaptureSignature': 101,
'ClosureParam': 102,
'ClosureSignature': 103,
'StringSegment': 104,
'ExpressionSegment': 105,
'ObjcNamePiece': 106,
'TypeInitializerClause': 107,
'ParameterClause': 108,
'ReturnClause': 109,
'FunctionSignature': 110,
'IfConfigClause': 111,
'PoundSourceLocationArgs': 112,
'DeclModifier': 113,
'InheritedType': 114,
'TypeInheritanceClause': 115,
'MemberDeclBlock': 116,
'MemberDeclListItem': 117,
'SourceFile': 118,
'InitializerClause': 119,
'FunctionParameter': 120,
'AccessLevelModifier': 121,
'AccessPathComponent': 122,
'AccessorParameter': 123,
'AccessorBlock': 124,
'PatternBinding': 125,
'EnumCaseElement': 126,
'OperatorPrecedenceAndTypes': 127,
'PrecedenceGroupRelation': 128,
'PrecedenceGroupNameElement': 129,
'PrecedenceGroupAssignment': 130,
'PrecedenceGroupAssociativity': 131,
'Attribute': 132,
'LabeledSpecializeEntry': 133,
'ImplementsAttributeArguments': 134,
'ObjCSelectorPiece': 135,
'WhereClause': 136,
'ConditionElement': 137,
'AvailabilityCondition': 138,
'MatchingPatternCondition': 139,
'OptionalBindingCondition': 140,
'ElseIfContinuation': 141,
'ElseBlock': 142,
'SwitchCase': 143,
'SwitchDefaultLabel': 144,
'CaseItem': 145,
'SwitchCaseLabel': 146,
'CatchClause': 147,
'GenericWhereClause': 148,
'SameTypeRequirement': 149,
'GenericParameter': 150,
'GenericParameterClause': 151,
'ConformanceRequirement': 152,
'CompositionTypeElement': 153,
'TupleTypeElement': 154,
'GenericArgument': 155,
'GenericArgumentClause': 156,
'TypeAnnotation': 157,
'TuplePatternElement': 158,
'AvailabilityArgument': 159,
'AvailabilityLabeledArgument': 160,
'AvailabilityVersionRestriction': 161,
'VersionTuple': 162,
'CodeBlockItemList': 163,
'FunctionCallArgumentList': 164,
'TupleElementList': 165,
'ArrayElementList': 166,
'DictionaryElementList': 167,
'StringInterpolationSegments': 168,
'DeclNameArgumentList': 169,
'ExprList': 170,
'ClosureCaptureItemList': 171,
'ClosureParamList': 172,
'ObjcName': 173,
'FunctionParameterList': 174,
'IfConfigClauseList': 175,
'InheritedTypeList': 176,
'MemberDeclList': 177,
'ModifierList': 178,
'AccessPath': 179,
'AccessorList': 180,
'PatternBindingList': 181,
'EnumCaseElementList': 182,
'PrecedenceGroupAttributeList': 183,
'PrecedenceGroupNameList': 184,
'TokenList': 185,
'NonEmptyTokenList': 186,
'AttributeList': 187,
'SpecializeAttributeSpecList': 188,
'ObjCSelector': 189,
'SwitchCaseList': 190,
'CatchClauseList': 191,
'CaseItemList': 192,
'ConditionElementList': 193,
'GenericRequirementList': 194,
'GenericParameterList': 195,
'CompositionTypeElementList': 196,
'TupleTypeElementList': 197,
'GenericArgumentList': 198,
'TuplePatternElementList': 199,
'AvailabilitySpecList': 200,
'UnknownPattern': 201,
'EnumCasePattern': 202,
'IsTypePattern': 203,
'OptionalPattern': 204,
'IdentifierPattern': 205,
'AsTypePattern': 206,
'TuplePattern': 207,
'WildcardPattern': 208,
'ExpressionPattern': 209,
'ValueBindingPattern': 210,
'UnknownType': 211,
'SimpleTypeIdentifier': 212,
'MemberTypeIdentifier': 213,
'ClassRestrictionType': 214,
'ArrayType': 215,
'DictionaryType': 216,
'MetatypeType': 217,
'OptionalType': 218,
'ImplicitlyUnwrappedOptionalType': 219,
'CompositionType': 220,
'TupleType': 221,
'FunctionType': 222,
'AttributedType': 223,
'YieldStmt': 224,
'YieldList': 225,
'IdentifierList': 226,
'NamedAttributeStringArgument': 227,
'DeclName': 228,
'PoundAssertStmt': 229,
}
def verify_syntax_node_serialization_codes(nodes, serialization_codes):
# Verify that all nodes have serialization codes
for node in nodes:
if not node.is_base() and node.syntax_kind not in serialization_codes:
error('Node %s has no serialization code' % node.syntax_kind)
# Verify that no serialization code is used twice
used_codes = set()
for serialization_code in serialization_codes.values():
if serialization_code in used_codes:
error("Serialization code %d used twice" % serialization_code)
used_codes.add(serialization_code)
def get_serialization_code(syntax_kind):
return SYNTAX_NODE_SERIALIZATION_CODES[syntax_kind]
| amraboelela/swift | utils/gyb_syntax_support/NodeSerializationCodes.py | Python | apache-2.0 | 7,221 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.model;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlTransient;
import org.apache.camel.Endpoint;
import org.apache.camel.builder.EndpointProducerBuilder;
import org.apache.camel.spi.Metadata;
/**
* Sends the message to an endpoint
*/
@XmlAccessorType(XmlAccessType.FIELD)
public abstract class SendDefinition<Type extends ProcessorDefinition<Type>> extends NoOutputDefinition<Type>
implements EndpointRequiredDefinition {
@XmlTransient
protected Endpoint endpoint;
@XmlTransient
protected EndpointProducerBuilder endpointProducerBuilder;
@XmlAttribute
@Metadata(required = true)
protected String uri;
public SendDefinition() {
}
public SendDefinition(String uri) {
this.uri = uri;
}
@Override
public String getEndpointUri() {
if (endpointProducerBuilder != null) {
return endpointProducerBuilder.getUri();
} else if (endpoint != null) {
return endpoint.getEndpointUri();
} else {
return uri;
}
}
public String getUri() {
return uri;
}
/**
* Sets the uri of the endpoint to send to.
*
* @param uri the uri of the endpoint
*/
public void setUri(String uri) {
clear();
this.uri = uri;
}
/**
* Gets the endpoint if an {@link Endpoint} instance was set.
* <p/>
* This implementation may return <tt>null</tt> which means you need to use {@link #getEndpointUri()} to get
* information about the endpoint.
*
* @return the endpoint instance, or <tt>null</tt>
*/
public Endpoint getEndpoint() {
return endpoint;
}
public void setEndpoint(Endpoint endpoint) {
clear();
this.endpoint = endpoint;
this.uri = endpoint != null ? endpoint.getEndpointUri() : null;
}
public EndpointProducerBuilder getEndpointProducerBuilder() {
return endpointProducerBuilder;
}
public void setEndpointProducerBuilder(EndpointProducerBuilder endpointProducerBuilder) {
clear();
this.endpointProducerBuilder = endpointProducerBuilder;
}
public String getPattern() {
return null;
}
@Override
public String getLabel() {
String uri = getEndpointUri();
return uri != null ? uri : "no uri supplied";
}
protected void clear() {
this.endpointProducerBuilder = null;
this.endpoint = null;
this.uri = null;
}
}
| christophd/camel | core/camel-core-model/src/main/java/org/apache/camel/model/SendDefinition.java | Java | apache-2.0 | 3,458 |
// Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
info: >
The String.prototype.toLocaleLowerCase.length property has the attribute
ReadOnly
es5id: 15.5.4.17_A10
description: >
Checking if varying the String.prototype.toLocaleLowerCase.length
property fails
includes: [propertyHelper.js]
---*/
//////////////////////////////////////////////////////////////////////////////
//CHECK#1
if (!(String.prototype.toLocaleLowerCase.hasOwnProperty('length'))) {
$ERROR('#1: String.prototype.toLocaleLowerCase.hasOwnProperty(\'length\') return true. Actual: '+String.prototype.toLocaleLowerCase.hasOwnProperty('length'));
}
//
//////////////////////////////////////////////////////////////////////////////
var __obj = String.prototype.toLocaleLowerCase.length;
verifyNotWritable(String.prototype.toLocaleLowerCase, "length", null, function(){return "shifted";});
//////////////////////////////////////////////////////////////////////////////
//CHECK#2
if (String.prototype.toLocaleLowerCase.length !== __obj) {
$ERROR('#2: __obj = String.prototype.toLocaleLowerCase.length; String.prototype.toLocaleLowerCase.length = function(){return "shifted";}; String.prototype.toLocaleLowerCase.length === __obj. Actual: '+String.prototype.toLocaleLowerCase.length );
}
//
//////////////////////////////////////////////////////////////////////////////
| m0ppers/arangodb | 3rdParty/V8/V8-5.0.71.39/test/test262/data/test/built-ins/String/prototype/toLocaleLowerCase/S15.5.4.17_A10.js | JavaScript | apache-2.0 | 1,443 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache.wan;
/**
*
* @since GemFire 7.0
*/
public interface GatewayReceiverFactory {
/**
* Sets the start port for the <code>GatewayReceiver</code>. If set the GatewayReceiver will start
* at one of the port between startPort and endPort. The default startPort 50505.
*
*/
GatewayReceiverFactory setStartPort(int startPort);
/**
* Sets the end port for the GatewayReceiver. If set the GatewayReceiver will start at one of the
* port between startPort and endPort. The default endPort 50605.
*
*/
GatewayReceiverFactory setEndPort(int endPort);
/**
* Sets the buffer size in bytes of the socket connection for this <code>GatewayReceiver</code>.
* The default is 32768 bytes.
*
* @param socketBufferSize The size in bytes of the socket buffer
*/
GatewayReceiverFactory setSocketBufferSize(int socketBufferSize);
/**
* Sets the ip address or host name that this <code>GatewayReceiver</code> is to listen on for
* GatewaySender Connection
*
* @param address String representing ip address or host name
*/
GatewayReceiverFactory setBindAddress(String address);
/**
* Adds a <code>GatewayTransportFilter</code>
*
* @param filter GatewayTransportFilter
*/
GatewayReceiverFactory addGatewayTransportFilter(GatewayTransportFilter filter);
/**
* Removes a <code>GatewayTransportFilter</code>
*
* @param filter GatewayTransportFilter
*/
GatewayReceiverFactory removeGatewayTransportFilter(GatewayTransportFilter filter);
/**
* Sets the maximum amount of time between client pings.The default is 60000 ms.
*
* @param time The maximum amount of time between client pings
*/
GatewayReceiverFactory setMaximumTimeBetweenPings(int time);
/**
* Sets the ip address or host name that server locators will tell GatewaySenders that this
* GatewayReceiver is listening on.
*
* @param address String representing ip address or host name
*/
GatewayReceiverFactory setHostnameForSenders(String address);
/**
* Sets the manual start boolean property for this <code>GatewayReceiver</code>.
*
* @since GemFire 8.1 Default is true i.e. the <code>GatewayReceiver</code> will not start
* automatically once created. Ideal default value should be false to match with
* GatewaySender counterpart. But to not to break the existing functionality default value
* is set to true. For next major releases, default value will be changed to false.
*
* @param start the manual start boolean property for this <code>GatewayReceiver</code>
*/
GatewayReceiverFactory setManualStart(boolean start);
/**
* Creates and returns an instance of <code>GatewayReceiver</code>
*
* @return instance of GatewayReceiver
*/
GatewayReceiver create();
}
| deepakddixit/incubator-geode | geode-core/src/main/java/org/apache/geode/cache/wan/GatewayReceiverFactory.java | Java | apache-2.0 | 3,629 |
/*******************************************************************************
*
* Copyright (c) 2004-2010 Oracle Corporation.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
*
* Kohsuke Kawaguchi
*
*
*******************************************************************************/
package hudson;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Mutable representation of string with HTML mark up.
*
* <p> This class is used to put mark up on plain text. See <a
* href="https://github.com/hudson/hudson/blob/master/hudson-core/src/test/java/hudson/MarkupTextTest.java">
* the test code</a> for a typical usage and its result.
*
* @author Kohsuke Kawaguchi
* @since 1.70
*/
public class MarkupText extends AbstractMarkupText {
private final String text;
/**
* Added mark up tags.
*/
private final List<Tag> tags = new ArrayList<Tag>();
/**
* Represents one mark up inserted into text.
*/
private static final class Tag implements Comparable<Tag> {
/**
* Char position of this tag in {@link MarkupText#text}. This tag is
* placed in front of the character of this index.
*/
private final int pos;
private final String markup;
public Tag(int pos, String markup) {
this.pos = pos;
this.markup = markup;
}
public int compareTo(Tag that) {
return this.pos - that.pos;
}
}
/**
* Represents a substring of a {@link MarkupText}.
*/
public final class SubText extends AbstractMarkupText {
private final int start, end;
private final int[] groups;
public SubText(Matcher m, int textOffset) {
start = m.start() + textOffset;
end = m.end() + textOffset;
int cnt = m.groupCount();
groups = new int[cnt * 2];
for (int i = 0; i < cnt; i++) {
groups[i * 2] = m.start(i + 1) + textOffset;
groups[i * 2 + 1] = m.end(i + 1) + textOffset;
}
}
public SubText(int start, int end) {
this.start = start;
this.end = end;
groups = new int[0];
}
@Override
public SubText subText(int start, int end) {
return MarkupText.this.subText(this.start + start,
end < 0 ? this.end + 1 + end : this.start + end);
}
@Override
public String getText() {
return text.substring(start, end);
}
@Override
public void addMarkup(int startPos, int endPos, String startTag, String endTag) {
MarkupText.this.addMarkup(startPos + start, endPos + start, startTag, endTag);
}
/**
* Surrounds this subtext with the specified start tag and the end tag.
*
* <p> Start/end tag text can contain special tokens "$0", "$1", ... and
* they will be replaced by their {@link #group(int) group match}. "\$"
* can be used to escape characters.
*/
public void surroundWith(String startTag, String endTag) {
addMarkup(0, length(), replace(startTag), replace(endTag));
}
/**
* Works like {@link #surroundWith(String, String)} except that the
* token replacement is not performed on parameters.
*/
public void surroundWithLiteral(String startTag, String endTag) {
addMarkup(0, length(), startTag, endTag);
}
/**
* Surrounds this subtext with <a>...</a>.
*/
public void href(String url) {
addHyperlink(0, length(), url);
}
/**
* Gets the start index of the captured group within
* {@link MarkupText#getText()}.
*
* @param groupIndex 0 means the start of the whole subtext. 1, 2, ...
* are groups captured by '(...)' in the regexp.
*/
public int start(int groupIndex) {
if (groupIndex == 0) {
return start;
}
return groups[groupIndex * 2 - 2];
}
/**
* Gets the start index of this subtext within
* {@link MarkupText#getText()}.
*/
public int start() {
return start;
}
/**
* Gets the end index of the captured group within
* {@link MarkupText#getText()}.
*/
public int end(int groupIndex) {
if (groupIndex == 0) {
return end;
}
return groups[groupIndex * 2 - 1];
}
/**
* Gets the end index of this subtext within
* {@link MarkupText#getText()}.
*/
public int end() {
return end;
}
/**
* Gets the text that represents the captured group.
*/
public String group(int groupIndex) {
if (start(groupIndex) == -1) {
return null;
}
return text.substring(start(groupIndex), end(groupIndex));
}
/**
* How many captured groups are in this subtext.
*
* @since 1.357
*/
public int groupCount() {
return groups.length / 2;
}
/**
* Replaces the group tokens like "$0", "$1", and etc with their actual
* matches.
*/
public String replace(String s) {
StringBuffer buf = new StringBuffer();
for (int i = 0; i < s.length(); i++) {
char ch = s.charAt(i);
if (ch == '\\') {// escape char
i++;
buf.append(s.charAt(i));
} else if (ch == '$') {// replace by group
i++;
ch = s.charAt(i);
// get the group number
int groupId = ch - '0';
if (groupId < 0 || groupId > 9) {
buf.append('$').append(ch);
} else {
// add the group text
String group = group(groupId);
if (group != null) {
buf.append(group);
}
}
} else {
// other chars
buf.append(ch);
}
}
return buf.toString();
}
@Override
protected SubText createSubText(Matcher m) {
return new SubText(m, start);
}
}
/**
*
* @param text Plain text. This shouldn't include any markup nor escape.
* Those are done later in {@link #toString(boolean)}.
*/
public MarkupText(String text) {
this.text = text;
}
@Override
public String getText() {
return text;
}
/**
* Returns a subtext.
*
* @param end If negative, -N means "trim the last N-1 chars". That is,
* (s,-1) is the same as (s,length)
*/
public SubText subText(int start, int end) {
return new SubText(start, end < 0 ? text.length() + 1 + end : end);
}
@Override
public void addMarkup(int startPos, int endPos, String startTag, String endTag) {
rangeCheck(startPos);
rangeCheck(endPos);
if (startPos > endPos) {
throw new IndexOutOfBoundsException();
}
// when multiple tags are added to the same range, we want them to show up like
// <b><i>abc</i></b>, not <b><i>abc</b></i>. Also, we'd like <b>abc</b><i>def</i>,
// not <b>abc<i></b>def</i>. Do this by inserting them to different places.
tags.add(new Tag(startPos, startTag));
tags.add(0, new Tag(endPos, endTag));
}
public void addMarkup(int pos, String tag) {
rangeCheck(pos);
tags.add(new Tag(pos, tag));
}
private void rangeCheck(int pos) {
if (pos < 0 || pos > text.length()) {
throw new IndexOutOfBoundsException();
}
}
/**
* Returns the fully marked-up text.
*
* @deprecated as of 1.350. Use {@link #toString(boolean)} to be explicit
* about the escape mode.
*/
@Override
public String toString() {
return toString(false);
}
/**
* Returns the fully marked-up text.
*
* @param preEscape If true, the escaping is for the <PRE> context. This
* leave SP and CR/LF intact. If false, the escape is for the normal HTML,
* thus SP becomes &nbsp; and CR/LF becomes <BR>
*/
public String toString(boolean preEscape) {
if (tags.isEmpty()) {
return preEscape ? Util.xmlEscape(text) : Util.escape(text); // the most common case
}
Collections.sort(tags);
StringBuilder buf = new StringBuilder();
int copied = 0; // # of chars already copied from text to buf
for (Tag tag : tags) {
if (copied < tag.pos) {
String portion = text.substring(copied, tag.pos);
buf.append(preEscape ? Util.xmlEscape(portion) : Util.escape(portion));
copied = tag.pos;
}
buf.append(tag.markup);
}
if (copied < text.length()) {
String portion = text.substring(copied, text.length());
buf.append(preEscape ? Util.xmlEscape(portion) : Util.escape(portion));
}
return buf.toString();
}
// perhaps this method doesn't need to be here to remain binary compatible with past versions,
// but having this seems to be safer.
@Override
public List<SubText> findTokens(Pattern pattern) {
return super.findTokens(pattern);
}
@Override
protected SubText createSubText(Matcher m) {
return new SubText(m, 0);
}
}
| sap-production/hudson-3.x | hudson-core/src/main/java/hudson/MarkupText.java | Java | apache-2.0 | 10,245 |
# dnssec
## Name
*dnssec* - enables on-the-fly DNSSEC signing of served data.
## Description
With *dnssec*, any reply that doesn't (or can't) do DNSSEC will get signed on the fly. Authenticated
denial of existence is implemented with NSEC black lies. Using ECDSA as an algorithm is preferred as
this leads to smaller signatures (compared to RSA). NSEC3 is *not* supported.
This plugin can only be used once per Server Block.
## Syntax
~~~
dnssec [ZONES... ] {
key file KEY...
cache_capacity CAPACITY
}
~~~
The signing behavior depends on the keys specified. If multiple keys are specified of which there is
at least one key with the SEP bit set and at least one key with the SEP bit unset, signing will happen
in split ZSK/KSK mode. DNSKEY records will be signed with all keys that have the SEP bit set. All other
records will be signed with all keys that do not have the SEP bit set.
In any other case, each specified key will be treated as a CSK (common signing key), forgoing the
ZSK/KSK split. All signing operations are done online.
Authenticated denial of existence is implemented with NSEC black lies. Using ECDSA as an algorithm
is preferred as this leads to smaller signatures (compared to RSA). NSEC3 is *not* supported.
As the *dnssec* plugin can't see the original TTL of the RRSets it signs, it will always use 3600s
as the value.
If multiple *dnssec* plugins are specified in the same zone, the last one specified will be
used.
* **ZONES** zones that should be signed. If empty, the zones from the configuration block
are used.
* `key file` indicates that **KEY** file(s) should be read from disk. When multiple keys are specified, RRsets
will be signed with all keys. Generating a key can be done with `dnssec-keygen`: `dnssec-keygen -a
ECDSAP256SHA256 <zonename>`. A key created for zone *A* can be safely used for zone *B*. The name of the
key file can be specified in one of the following formats
* basename of the generated key `Kexample.org+013+45330`
* generated public key `Kexample.org+013+45330.key`
* generated private key `Kexample.org+013+45330.private`
* `cache_capacity` indicates the capacity of the cache. The dnssec plugin uses a cache to store
RRSIGs. The default for **CAPACITY** is 10000.
## Metrics
If monitoring is enabled (via the *prometheus* plugin) then the following metrics are exported:
* `coredns_dnssec_cache_entries{server, type}` - total elements in the cache, type is "signature".
* `coredns_dnssec_cache_hits_total{server}` - Counter of cache hits.
* `coredns_dnssec_cache_misses_total{server}` - Counter of cache misses.
The label `server` indicated the server handling the request, see the *metrics* plugin for details.
## Examples
Sign responses for `example.org` with the key "Kexample.org.+013+45330.key".
~~~ corefile
example.org {
dnssec {
key file Kexample.org.+013+45330
}
whoami
}
~~~
Sign responses for a kubernetes zone with the key "Kcluster.local+013+45129.key".
~~~
cluster.local {
kubernetes
dnssec {
key file Kcluster.local+013+45129
}
}
~~~
| yongtang/coredns | plugin/dnssec/README.md | Markdown | apache-2.0 | 3,103 |
/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package apiserver
import (
"context"
"fmt"
"net/http"
"net/url"
"sync/atomic"
"github.com/golang/glog"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/httpstream"
"k8s.io/apimachinery/pkg/util/httpstream/spdy"
utilnet "k8s.io/apimachinery/pkg/util/net"
"k8s.io/apiserver/pkg/endpoints/handlers/responsewriters"
genericapirequest "k8s.io/apiserver/pkg/endpoints/request"
genericfeatures "k8s.io/apiserver/pkg/features"
genericrest "k8s.io/apiserver/pkg/registry/generic/rest"
utilfeature "k8s.io/apiserver/pkg/util/feature"
restclient "k8s.io/client-go/rest"
"k8s.io/client-go/transport"
apiregistrationapi "k8s.io/kube-aggregator/pkg/apis/apiregistration"
)
// proxyHandler provides a http.Handler which will proxy traffic to locations
// specified by items implementing Redirector.
type proxyHandler struct {
contextMapper genericapirequest.RequestContextMapper
// localDelegate is used to satisfy local APIServices
localDelegate http.Handler
// proxyClientCert/Key are the client cert used to identify this proxy. Backing APIServices use
// this to confirm the proxy's identity
proxyClientCert []byte
proxyClientKey []byte
proxyTransport *http.Transport
// Endpoints based routing to map from cluster IP to routable IP
routing ServiceResolver
handlingInfo atomic.Value
}
type proxyHandlingInfo struct {
// local indicates that this APIService is locally satisfied
local bool
// restConfig holds the information for building a roundtripper
restConfig *restclient.Config
// transportBuildingError is an error produced while building the transport. If this
// is non-nil, it will be reported to clients.
transportBuildingError error
// proxyRoundTripper is the re-useable portion of the transport. It does not vary with any request.
proxyRoundTripper http.RoundTripper
// serviceName is the name of the service this handler proxies to
serviceName string
// namespace is the namespace the service lives in
serviceNamespace string
}
func (r *proxyHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) {
value := r.handlingInfo.Load()
if value == nil {
r.localDelegate.ServeHTTP(w, req)
return
}
handlingInfo := value.(proxyHandlingInfo)
if handlingInfo.local {
if r.localDelegate == nil {
http.Error(w, "", http.StatusNotFound)
return
}
r.localDelegate.ServeHTTP(w, req)
return
}
if handlingInfo.transportBuildingError != nil {
http.Error(w, handlingInfo.transportBuildingError.Error(), http.StatusInternalServerError)
return
}
ctx, ok := r.contextMapper.Get(req)
if !ok {
http.Error(w, "missing context", http.StatusInternalServerError)
return
}
user, ok := genericapirequest.UserFrom(ctx)
if !ok {
http.Error(w, "missing user", http.StatusInternalServerError)
return
}
// write a new location based on the existing request pointed at the target service
location := &url.URL{}
location.Scheme = "https"
rloc, err := r.routing.ResolveEndpoint(handlingInfo.serviceNamespace, handlingInfo.serviceName)
if err != nil {
http.Error(w, fmt.Sprintf("missing route (%s)", err.Error()), http.StatusInternalServerError)
return
}
location.Host = rloc.Host
location.Path = req.URL.Path
location.RawQuery = req.URL.Query().Encode()
// WithContext creates a shallow clone of the request with the new context.
newReq := req.WithContext(context.Background())
newReq.Header = utilnet.CloneHeader(req.Header)
newReq.URL = location
if handlingInfo.proxyRoundTripper == nil {
http.Error(w, "", http.StatusNotFound)
return
}
// we need to wrap the roundtripper in another roundtripper which will apply the front proxy headers
proxyRoundTripper, upgrade, err := maybeWrapForConnectionUpgrades(handlingInfo.restConfig, handlingInfo.proxyRoundTripper, req)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
proxyRoundTripper = transport.NewAuthProxyRoundTripper(user.GetName(), user.GetGroups(), user.GetExtra(), proxyRoundTripper)
// if we are upgrading, then the upgrade path tries to use this request with the TLS config we provide, but it does
// NOT use the roundtripper. Its a direct call that bypasses the round tripper. This means that we have to
// attach the "correct" user headers to the request ahead of time. After the initial upgrade, we'll be back
// at the roundtripper flow, so we only have to muck with this request, but we do have to do it.
if upgrade {
transport.SetAuthProxyHeaders(newReq, user.GetName(), user.GetGroups(), user.GetExtra())
}
handler := genericrest.NewUpgradeAwareProxyHandler(location, proxyRoundTripper, true, upgrade, &responder{w: w})
handler.ServeHTTP(w, newReq)
}
// maybeWrapForConnectionUpgrades wraps the roundtripper for upgrades. The bool indicates if it was wrapped
func maybeWrapForConnectionUpgrades(restConfig *restclient.Config, rt http.RoundTripper, req *http.Request) (http.RoundTripper, bool, error) {
if !httpstream.IsUpgradeRequest(req) {
return rt, false, nil
}
tlsConfig, err := restclient.TLSConfigFor(restConfig)
if err != nil {
return nil, true, err
}
followRedirects := utilfeature.DefaultFeatureGate.Enabled(genericfeatures.StreamingProxyRedirects)
upgradeRoundTripper := spdy.NewRoundTripper(tlsConfig, followRedirects)
wrappedRT, err := restclient.HTTPWrappersForConfig(restConfig, upgradeRoundTripper)
if err != nil {
return nil, true, err
}
return wrappedRT, true, nil
}
// responder implements rest.Responder for assisting a connector in writing objects or errors.
type responder struct {
w http.ResponseWriter
}
// TODO this should properly handle content type negotiation
// if the caller asked for protobuf and you write JSON bad things happen.
func (r *responder) Object(statusCode int, obj runtime.Object) {
responsewriters.WriteRawJSON(statusCode, obj, r.w)
}
func (r *responder) Error(err error) {
http.Error(r.w, err.Error(), http.StatusInternalServerError)
}
// these methods provide locked access to fields
func (r *proxyHandler) updateAPIService(apiService *apiregistrationapi.APIService) {
if apiService.Spec.Service == nil {
r.handlingInfo.Store(proxyHandlingInfo{local: true})
return
}
newInfo := proxyHandlingInfo{
restConfig: &restclient.Config{
TLSClientConfig: restclient.TLSClientConfig{
Insecure: apiService.Spec.InsecureSkipTLSVerify,
ServerName: apiService.Spec.Service.Name + "." + apiService.Spec.Service.Namespace + ".svc",
CertData: r.proxyClientCert,
KeyData: r.proxyClientKey,
CAData: apiService.Spec.CABundle,
},
},
serviceName: apiService.Spec.Service.Name,
serviceNamespace: apiService.Spec.Service.Namespace,
}
newInfo.proxyRoundTripper, newInfo.transportBuildingError = restclient.TransportFor(newInfo.restConfig)
if newInfo.transportBuildingError == nil && r.proxyTransport.Dial != nil {
switch transport := newInfo.proxyRoundTripper.(type) {
case *http.Transport:
transport.Dial = r.proxyTransport.Dial
default:
newInfo.transportBuildingError = fmt.Errorf("unable to set dialer for %s/%s as rest transport is of type %T", apiService.Spec.Service.Namespace, apiService.Spec.Service.Name, newInfo.proxyRoundTripper)
glog.Warning(newInfo.transportBuildingError.Error())
}
}
r.handlingInfo.Store(newInfo)
}
| huangyuqi/kubernetes | staging/src/k8s.io/kube-aggregator/pkg/apiserver/handler_proxy.go | GO | apache-2.0 | 7,873 |
#!/usr/bin/env python3
###############################################################################
# Copyright 2019 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
""" Restore record file by replacing its video frames with image frames. """
import datetime
import errno
import glob
import os
import shutil
import time
from absl import app
from absl import flags
from absl import logging
import cv2
from cyber.python.cyber_py3.record import RecordReader, RecordWriter
from modules.drivers.proto.sensor_image_pb2 import CompressedImage
flags.DEFINE_string('from_record', None, 'The source record file that needs to be restored.')
flags.DEFINE_string('to_record', None, 'The restored record file.')
# The compressed channels that have videos we need to decode
IMAGE_FRONT_6MM_CHANNEL = '/apollo/sensor/camera/front_6mm/image/compressed'
IMAGE_FRONT_12MM_CHANNEL = '/apollo/sensor/camera/front_12mm/image/compressed'
IMAGE_REAR_6MM_CHANNEL = '/apollo/sensor/camera/rear_6mm/image/compressed'
IMAGE_LEFT_FISHEYE_CHANNEL = '/apollo/sensor/camera/left_fisheye/image/compressed'
IMAGE_RIGHT_FISHEYE_CHANNEL = '/apollo/sensor/camera/right_fisheye/image/compressed'
VIDEO_FRONT_6MM_CHANNEL = '/apollo/sensor/camera/front_6mm/video/compressed'
VIDEO_FRONT_12MM_CHANNEL = '/apollo/sensor/camera/front_12mm/video/compressed'
VIDEO_REAR_6MM_CHANNEL = '/apollo/sensor/camera/rear_6mm/video/compressed'
VIDEO_LEFT_FISHEYE_CHANNEL = '/apollo/sensor/camera/left_fisheye/video/compressed'
VIDEO_RIGHT_FISHEYE_CHANNEL = '/apollo/sensor/camera/right_fisheye/video/compressed'
VIDEO_CHANNELS = [
IMAGE_FRONT_6MM_CHANNEL,
IMAGE_FRONT_12MM_CHANNEL,
IMAGE_REAR_6MM_CHANNEL,
IMAGE_LEFT_FISHEYE_CHANNEL,
IMAGE_RIGHT_FISHEYE_CHANNEL,
VIDEO_FRONT_6MM_CHANNEL,
VIDEO_FRONT_12MM_CHANNEL,
VIDEO_REAR_6MM_CHANNEL,
VIDEO_LEFT_FISHEYE_CHANNEL,
VIDEO_RIGHT_FISHEYE_CHANNEL,
]
VIDEO_IMAGE_MAP = {
IMAGE_FRONT_6MM_CHANNEL: IMAGE_FRONT_6MM_CHANNEL,
IMAGE_FRONT_12MM_CHANNEL: IMAGE_FRONT_12MM_CHANNEL,
IMAGE_REAR_6MM_CHANNEL: IMAGE_REAR_6MM_CHANNEL,
IMAGE_LEFT_FISHEYE_CHANNEL: IMAGE_LEFT_FISHEYE_CHANNEL,
IMAGE_RIGHT_FISHEYE_CHANNEL: IMAGE_RIGHT_FISHEYE_CHANNEL,
VIDEO_FRONT_6MM_CHANNEL: IMAGE_FRONT_6MM_CHANNEL,
VIDEO_FRONT_12MM_CHANNEL: IMAGE_FRONT_12MM_CHANNEL,
VIDEO_REAR_6MM_CHANNEL: IMAGE_REAR_6MM_CHANNEL,
VIDEO_LEFT_FISHEYE_CHANNEL: IMAGE_LEFT_FISHEYE_CHANNEL,
VIDEO_RIGHT_FISHEYE_CHANNEL: IMAGE_RIGHT_FISHEYE_CHANNEL,
}
class VideoConverter(object):
"""Convert video into images."""
def __init__(self, work_dir, topic):
# Initial type of video frames that defined in apollo video drive proto
# The initial frame has meta data information shared by the following tens of frames
self.initial_frame_type = 1
self.image_ids = []
self.first_initial_found = False
video_dir = os.path.join(work_dir, 'videos')
self.video_file = os.path.join(video_dir, '{}.h265'.format(topic))
self.image_dir = '{}_images'.format(self.video_file)
makedirs(video_dir)
makedirs(self.image_dir)
self.frame_writer = open(self.video_file, 'wb+')
def close_writer(self):
"""Close the video frames writer"""
self.frame_writer.close()
def write_frame(self, py_message):
"""Write video frames into binary format file"""
if not self.first_initial_found:
proto = image_message_to_proto(py_message)
if proto.frame_type != self.initial_frame_type:
return
self.first_initial_found = True
self.frame_writer.write(py_message.message)
self.image_ids.append(get_message_id(py_message.timestamp, py_message.topic))
def decode(self):
"""Decode video file into images"""
video_decoder_exe = '/apollo/bazel-bin/modules/drivers/video/tools/decode_video/video2jpg'
return_code = os.system('{} --input_video={} --output_dir={}'.format(
video_decoder_exe, self.video_file, self.image_dir))
if return_code != 0:
logging.error('Failed to execute video2jpg for video {}'.format(self.video_file))
return False
generated_images = sorted(glob.glob('{}/*.jpg'.format(self.image_dir)))
if len(generated_images) != len(self.image_ids):
logging.error('Mismatch between original {} and generated frames {}'.format(
len(self.image_ids), len(generated_images)))
return False
for idx in range(len(generated_images)):
os.rename(generated_images[idx], os.path.join(self.image_dir, self.image_ids[idx]))
return True
def move_images(self, overall_image_dir):
"""Move self's images to overall image dir"""
for image_file in os.listdir(self.image_dir):
shutil.move(os.path.join(self.image_dir, image_file),
os.path.join(overall_image_dir, image_file))
def restore_record(input_record, output_record):
"""Entrance of processing."""
# Define working dirs that store intermediate results in the middle of processing
work_dir = 'restore_video_work_dir_{}'.format(
datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d-%H-%M-%S'))
# Decode videos
converters = {}
for topic in VIDEO_CHANNELS:
converters[topic] = VideoConverter(work_dir, topic)
reader = RecordReader(input_record)
for message in reader.read_messages():
if message.topic in VIDEO_CHANNELS:
converters[message.topic].write_frame(message)
image_dir = os.path.join(work_dir, 'images')
makedirs(image_dir)
for topic in VIDEO_CHANNELS:
converters[topic].close_writer()
converters[topic].decode()
converters[topic].move_images(image_dir)
# Restore target record file
writer = RecordWriter(0, 0)
writer.open(output_record)
topic_descs = {}
counter = 0
reader = RecordReader(input_record)
for message in reader.read_messages():
message_content = message.message
message_topic = message.topic
if message.topic in VIDEO_CHANNELS:
message_content = retrieve_image(image_dir, message)
message_topic = VIDEO_IMAGE_MAP[message.topic]
if not message_content:
continue
counter += 1
if counter % 1000 == 0:
logging.info('rewriting {} th message to record {}'.format(counter, output_record))
writer.write_message(message_topic, message_content, message.timestamp)
if message_topic not in topic_descs:
topic_descs[message_topic] = reader.get_protodesc(message_topic)
writer.write_channel(message_topic, message.data_type, topic_descs[message_topic])
writer.close()
logging.info('All Done, converted record: {}'.format(output_record))
def retrieve_image(image_dir, message):
"""Actually change the content of message from video bytes to image bytes"""
message_id = get_message_id(message.timestamp, message.topic)
message_path = os.path.join(image_dir, message_id)
if not os.path.exists(message_path):
logging.error('message {} not found in image dir'.format(message_id))
return None
img_bin = cv2.imread(message_path)
# Check by using NoneType explicitly to avoid ambitiousness
if img_bin is None:
logging.error('failed to read original message: {}'.format(message_path))
return None
encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), 95]
result, encode_img = cv2.imencode('.jpg', img_bin, encode_param)
if not result:
logging.error('failed to encode message {}'.format(message_id))
return None
message_proto = image_message_to_proto(message)
message_proto.format = '; jpeg compressed bgr8'
message_proto.data = message_proto.data.replace(message_proto.data[:], bytearray(encode_img))
return message_proto.SerializeToString()
def get_message_id(timestamp, topic):
"""Unify the way to get a unique identifier for the given message"""
return '{}{}'.format(timestamp, topic.replace('/', '_'))
def image_message_to_proto(py_message):
"""Message to prototype"""
message_proto = CompressedImage()
message_proto.ParseFromString(py_message.message)
return message_proto
def makedirs(dir_path):
"""Make directories recursively."""
if os.path.exists(dir_path):
return
try:
os.makedirs(dir_path)
except OSError as error:
if error.errno != errno.EEXIST:
logging.error('Failed to makedir ' + dir_path)
raise
def main(argv):
"""Main process."""
if not flags.FLAGS.from_record or not os.path.exists(flags.FLAGS.from_record):
logging.error('Please provide valid source record file.')
return
to_record = flags.FLAGS.to_record
if not to_record:
to_record = '{}_restored'.format(flags.FLAGS.from_record)
logging.warn('The default restored record file is set as {}'.format(to_record))
restore_record(flags.FLAGS.from_record, to_record)
if __name__ == '__main__':
app.run(main)
| xiaoxq/apollo | modules/tools/restore_video_record/restore_video_record.py | Python | apache-2.0 | 9,788 |
package test.configuration;
import java.util.Arrays;
import java.util.List;
import org.testng.Assert;
import org.testng.TestNG;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
public class GroupsTest {
private TestNG m_testNg;
@BeforeMethod
public void setUp() {
m_testNg = new TestNG();
m_testNg.setVerbose(0);
}
@Test
public void verifyDataProviderAfterGroups() {
runTest(ConfigurationGroupDataProviderSampleTest.class,
ConfigurationGroupDataProviderSampleTest.m_list,
Arrays.asList(new Integer[] {
1, 2, 2, 2, 3
}));
}
@Test
public void verifyParametersAfterGroups() {
runTest(ConfigurationGroupInvocationCountSampleTest.class,
ConfigurationGroupInvocationCountSampleTest.m_list,
Arrays.asList(new Integer[] {
1, 2, 2, 2, 3
}));
}
@Test
public void verifyBothAfterGroups() {
runTest(ConfigurationGroupBothSampleTest.class,
ConfigurationGroupBothSampleTest.m_list,
Arrays.asList(new Integer[] {
1, 2, 2, 2, 2, 2, 2, 3
}));
}
private void runTest(Class cls, List<Integer> list, List<Integer> expected) {
m_testNg.setTestClasses(new Class[] {
cls
});
m_testNg.run();
Assert.assertEquals(list, expected);
}
}
| ludovicc/testng-debian | test/src/test/configuration/GroupsTest.java | Java | apache-2.0 | 1,353 |
/*
* Copyright 2017 The Error Prone Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.errorprone.bugpatterns.android.testdata;
/** @author jasonlong@google.com (Jason Long) */
public class CustomFragmentNotInstantiablePositiveCases {
// BUG: Diagnostic contains: public
static class PrivateFragment extends CustomFragment {
public PrivateFragment() {}
}
public static class PrivateConstructor extends CustomFragment {
// BUG: Diagnostic contains: public
PrivateConstructor() {}
}
// BUG: Diagnostic contains: nullary constructor
public static class NoConstructor extends CustomFragment {
public NoConstructor(int x) {}
}
// BUG: Diagnostic contains: nullary constructor
public static class NoConstructorV4 extends android.support.v4.app.Fragment {
public NoConstructorV4(int x) {}
}
public static class ParentFragment extends CustomFragment {
public ParentFragment() {}
}
public static class ParentFragmentV4 extends android.support.v4.app.Fragment {
public ParentFragmentV4() {}
}
// BUG: Diagnostic contains: nullary constructor
public static class DerivedFragmentNoConstructor extends ParentFragment {
public DerivedFragmentNoConstructor(int x) {}
}
// BUG: Diagnostic contains: nullary constructor
public static class DerivedFragmentNoConstructorV4 extends ParentFragmentV4 {
public DerivedFragmentNoConstructorV4(boolean b) {}
}
public class EnclosingClass {
// BUG: Diagnostic contains: static
public class InnerFragment extends CustomFragment {
public InnerFragment() {}
}
public CustomFragment create1() {
// BUG: Diagnostic contains: public
return new CustomFragment() {};
}
public CustomFragment create2() {
// BUG: Diagnostic contains: public
class LocalFragment extends CustomFragment {}
return new LocalFragment();
}
}
}
| cushon/error-prone | core/src/test/java/com/google/errorprone/bugpatterns/android/testdata/CustomFragmentNotInstantiablePositiveCases.java | Java | apache-2.0 | 2,433 |
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include <memory>
#include <string>
#include <vector>
#include "tensorflow/core/distributed_runtime/rpc/grpc_state.h"
#include "tensorflow/core/distributed_runtime/rpc/grpc_util.h"
#include "tensorflow/core/framework/op_kernel.h"
#include "tensorflow/core/framework/tensor_types.h"
#include "tensorflow/core/lib/core/errors.h"
#include "tensorflow/core/lib/core/refcount.h"
#include "tensorflow/core/util/rpc/call_container.h"
#include "tensorflow/core/util/rpc/rpc_factory.h"
#include "tensorflow/core/distributed_runtime/rpc/grpc_rpc_factory.h"
namespace tensorflow {
namespace internal {
class GrpcCall {
public:
explicit GrpcCall(CallContainer<GrpcCall>* container, int index, bool try_rpc,
const string* request_msg, string* response_msg,
int32* status_code, string* status_message)
: container_(container),
index_(index),
try_rpc_(try_rpc),
request_msg_(request_msg),
response_msg_(response_msg),
status_code_(status_code),
status_message_(status_message) {}
void StartCancel() { call_opts_.StartCancel(); }
void Done(const Status& s) {
DCHECK(container_ != nullptr);
if (!s.ok() && try_rpc_) {
DCHECK(status_code_ != nullptr);
DCHECK(status_message_ != nullptr);
*status_code_ = s.code();
*status_message_ = s.error_message();
}
container_->Done(s, index_);
}
CallOptions* call_opts() { return &call_opts_; }
int index() { return index_; }
const string& request() const { return *request_msg_; }
string* response() const { return response_msg_; }
private:
CallContainer<GrpcCall>* const container_;
const int index_;
bool try_rpc_;
CallOptions call_opts_;
const string* request_msg_;
string* response_msg_;
int* status_code_;
string* status_message_;
};
} // namespace internal
using internal::GrpcCall;
GrpcRPCFactory::GrpcRPCFactory(OpKernelConstruction* ctx, bool fail_fast,
int64 timeout_in_ms)
: RPCFactory(), fail_fast_(fail_fast), timeout_in_ms_(timeout_in_ms) {
// TODO(ebrevdo): Investigate possible performance improvements by
// replacing this thread with a threadpool.
polling_thread_ =
ctx->env()->StartThread(ThreadOptions(), "rpc_op_grpc_factory", [this]() {
void* tag;
bool ok;
while (completion_queue_.Next(&tag, &ok)) {
GrpcClientCQTag* callback_tag = static_cast<GrpcClientCQTag*>(tag);
callback_tag->OnCompleted(ok);
}
});
}
GrpcRPCFactory::~GrpcRPCFactory() {
// The amount of time we wait depends on several parameters, including:
// - the value of the fail_fast attribute.
// - the timeout option of the rpc call in the proto declaration.
// - the network roundtrip time and service's execution time.
//
// If a connection is made but the service doesn't ever respond, and
// there is no timeout option set for this rpc call, then it is
// possible the RPC request will wait forever.
//
completion_queue_.Shutdown();
delete polling_thread_;
}
void GrpcRPCFactory::Call(OpKernelContext* ctx, int64 num_elements,
const Tensor& address_t, const Tensor& method_t,
const Tensor& request_t, const bool try_rpc,
Tensor* response_t, Tensor* status_code_t,
Tensor* status_message_t,
AsyncOpKernel::DoneCallback done) {
if (try_rpc) {
// In this case status_code will never be set in the response,
// so we just set it to OK.
DCHECK(status_code_t != nullptr);
status_code_t->flat<int32>().setConstant(
static_cast<int>(errors::Code::OK));
}
CallContainer<GrpcCall>::CreateCallFn create_call_fn =
[this, &request_t, &try_rpc, response_t, status_code_t, status_message_t](
CallContainer<GrpcCall>* container, int index) {
CreateCall(request_t, try_rpc, index, container, response_t,
status_code_t, status_message_t);
};
CallContainer<GrpcCall>::StartCallFn start_call_fn =
[this, &address_t, &method_t](GrpcCall* call) {
StartCall(address_t, method_t, call);
};
// This object will delete itself when done.
new CallContainer<GrpcCall>(ctx, num_elements, fail_fast_, try_rpc,
std::move(done), std::move(create_call_fn),
std::move(start_call_fn));
}
::grpc::GenericStub* GrpcRPCFactory::GetOrCreateStubForAddress(
const string& address) {
mutex_lock lock(mu_);
auto stub = stubs_.find(address);
if (stub != stubs_.end()) return stub->second.get();
ChannelPtr channel = CreateChannelForAddress(address);
auto* created = new ::grpc::GenericStub(channel);
stubs_[address].reset(created);
return created;
}
GrpcRPCFactory::ChannelPtr GrpcRPCFactory::CreateChannelForAddress(
const string& address) {
::grpc::ChannelArguments args;
args.SetInt(GRPC_ARG_MAX_MESSAGE_LENGTH, std::numeric_limits<int32>::max());
// Set a standard backoff timeout of 1s instead of the
// (sometimes default) 20s.
args.SetInt(GRPC_ARG_MAX_RECONNECT_BACKOFF_MS, 1000);
return ::grpc::CreateCustomChannel(
/*target=*/address, ::grpc::InsecureChannelCredentials(), args);
}
void GrpcRPCFactory::CreateCall(const Tensor& request_t, const bool try_rpc,
int index, CallContainer<GrpcCall>* container,
Tensor* response_t, Tensor* status_code_t,
Tensor* status_message_t) {
auto request = request_t.flat<string>();
auto get_request_ptr = [&request](int64 ix) -> const string* {
return (request.size() > 1) ? &(request(ix)) : &(request(0));
};
auto response = response_t->flat<string>();
int32* status_code_ptr = nullptr;
string* status_message_ptr = nullptr;
if (try_rpc) {
status_code_ptr = status_code_t->flat<int32>().data();
status_message_ptr = status_message_t->flat<string>().data();
}
container->RegisterCall(container, index, try_rpc, get_request_ptr(index),
&response(index),
(try_rpc) ? &status_code_ptr[index] : nullptr,
(try_rpc) ? &status_message_ptr[index] : nullptr);
}
void GrpcRPCFactory::StartCall(const Tensor& address_t, const Tensor& method_t,
GrpcCall* call) {
auto address = address_t.flat<string>();
auto method = method_t.flat<string>();
// Stubs are maintained by the GrpcRPCFactory class and will be
// deleted when the class is destroyed.
::grpc::GenericStub* singleton_stub = nullptr;
if (address.size() == 1) {
singleton_stub = GetOrCreateStubForAddress(address(0));
}
auto get_stub = [&address, this,
singleton_stub](int64 ix) -> ::grpc::GenericStub* {
return (address.size() > 1) ? GetOrCreateStubForAddress(address(ix))
: singleton_stub;
};
auto get_method_ptr = [&method](int64 ix) -> const string* {
return (method.size() > 1) ? &(method(ix)) : &(method(0));
};
int index = call->index();
// This object will delete itself when done.
new RPCState<string>(
get_stub(index), &completion_queue_, *get_method_ptr(index),
call->request(), call->response(),
/*done=*/[call](const Status& s) { call->Done(s); }, call->call_opts(),
nullptr /*threadpool*/, fail_fast_, timeout_in_ms_);
}
} // namespace tensorflow
| hfp/tensorflow-xsmm | tensorflow/core/distributed_runtime/rpc/grpc_rpc_factory.cc | C++ | apache-2.0 | 8,205 |
<?php
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/security/privateca/v1beta1/resources.proto
namespace Google\Cloud\Security\PrivateCA\V1beta1\CertificateAuthority\CertificateAuthorityPolicy;
use Google\Protobuf\Internal\GPBType;
use Google\Protobuf\Internal\RepeatedField;
use Google\Protobuf\Internal\GPBUtil;
/**
* [AllowedSubjectAltNames][google.cloud.security.privateca.v1beta1.CertificateAuthority.CertificateAuthorityPolicy.AllowedSubjectAltNames] specifies the allowed values for
* [SubjectAltNames][google.cloud.security.privateca.v1beta1.SubjectAltNames] by the [CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority] when issuing
* [Certificates][google.cloud.security.privateca.v1beta1.Certificate].
*
* Generated from protobuf message <code>google.cloud.security.privateca.v1beta1.CertificateAuthority.CertificateAuthorityPolicy.AllowedSubjectAltNames</code>
*/
class AllowedSubjectAltNames extends \Google\Protobuf\Internal\Message
{
/**
* Optional. Contains valid, fully-qualified host names. Glob patterns are also
* supported. To allow an explicit wildcard certificate, escape with
* backlash (i.e. "\*").
* E.g. for globbed entries: '*bar.com' will allow foo.bar.com, but not
* *.bar.com, unless the [allow_globbing_dns_wildcards][google.cloud.security.privateca.v1beta1.CertificateAuthority.CertificateAuthorityPolicy.AllowedSubjectAltNames.allow_globbing_dns_wildcards] field is set.
* E.g. for wildcard entries: '\*.bar.com' will allow '*.bar.com', but not
* 'foo.bar.com'.
*
* Generated from protobuf field <code>repeated string allowed_dns_names = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
private $allowed_dns_names;
/**
* Optional. Contains valid RFC 3986 URIs. Glob patterns are also supported. To
* match across path seperators (i.e. '/') use the double star glob
* pattern (i.e. '**').
*
* Generated from protobuf field <code>repeated string allowed_uris = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
private $allowed_uris;
/**
* Optional. Contains valid RFC 2822 E-mail addresses. Glob patterns are also
* supported.
*
* Generated from protobuf field <code>repeated string allowed_email_addresses = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
private $allowed_email_addresses;
/**
* Optional. Contains valid 32-bit IPv4 addresses and subnet ranges or RFC 4291 IPv6
* addresses and subnet ranges. Subnet ranges are specified using the
* '/' notation (e.g. 10.0.0.0/8, 2001:700:300:1800::/64). Glob patterns
* are supported only for ip address entries (i.e. not for subnet ranges).
*
* Generated from protobuf field <code>repeated string allowed_ips = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
private $allowed_ips;
/**
* Optional. Specifies if glob patterns used for [allowed_dns_names][google.cloud.security.privateca.v1beta1.CertificateAuthority.CertificateAuthorityPolicy.AllowedSubjectAltNames.allowed_dns_names] allows
* wildcard certificates.
*
* Generated from protobuf field <code>bool allow_globbing_dns_wildcards = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
private $allow_globbing_dns_wildcards = false;
/**
* Optional. Specifies if to allow custom X509Extension values.
*
* Generated from protobuf field <code>bool allow_custom_sans = 6 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
private $allow_custom_sans = false;
/**
* Constructor.
*
* @param array $data {
* Optional. Data for populating the Message object.
*
* @type string[]|\Google\Protobuf\Internal\RepeatedField $allowed_dns_names
* Optional. Contains valid, fully-qualified host names. Glob patterns are also
* supported. To allow an explicit wildcard certificate, escape with
* backlash (i.e. "\*").
* E.g. for globbed entries: '*bar.com' will allow foo.bar.com, but not
* *.bar.com, unless the [allow_globbing_dns_wildcards][google.cloud.security.privateca.v1beta1.CertificateAuthority.CertificateAuthorityPolicy.AllowedSubjectAltNames.allow_globbing_dns_wildcards] field is set.
* E.g. for wildcard entries: '\*.bar.com' will allow '*.bar.com', but not
* 'foo.bar.com'.
* @type string[]|\Google\Protobuf\Internal\RepeatedField $allowed_uris
* Optional. Contains valid RFC 3986 URIs. Glob patterns are also supported. To
* match across path seperators (i.e. '/') use the double star glob
* pattern (i.e. '**').
* @type string[]|\Google\Protobuf\Internal\RepeatedField $allowed_email_addresses
* Optional. Contains valid RFC 2822 E-mail addresses. Glob patterns are also
* supported.
* @type string[]|\Google\Protobuf\Internal\RepeatedField $allowed_ips
* Optional. Contains valid 32-bit IPv4 addresses and subnet ranges or RFC 4291 IPv6
* addresses and subnet ranges. Subnet ranges are specified using the
* '/' notation (e.g. 10.0.0.0/8, 2001:700:300:1800::/64). Glob patterns
* are supported only for ip address entries (i.e. not for subnet ranges).
* @type bool $allow_globbing_dns_wildcards
* Optional. Specifies if glob patterns used for [allowed_dns_names][google.cloud.security.privateca.v1beta1.CertificateAuthority.CertificateAuthorityPolicy.AllowedSubjectAltNames.allowed_dns_names] allows
* wildcard certificates.
* @type bool $allow_custom_sans
* Optional. Specifies if to allow custom X509Extension values.
* }
*/
public function __construct($data = NULL) {
\GPBMetadata\Google\Cloud\Security\Privateca\V1Beta1\Resources::initOnce();
parent::__construct($data);
}
/**
* Optional. Contains valid, fully-qualified host names. Glob patterns are also
* supported. To allow an explicit wildcard certificate, escape with
* backlash (i.e. "\*").
* E.g. for globbed entries: '*bar.com' will allow foo.bar.com, but not
* *.bar.com, unless the [allow_globbing_dns_wildcards][google.cloud.security.privateca.v1beta1.CertificateAuthority.CertificateAuthorityPolicy.AllowedSubjectAltNames.allow_globbing_dns_wildcards] field is set.
* E.g. for wildcard entries: '\*.bar.com' will allow '*.bar.com', but not
* 'foo.bar.com'.
*
* Generated from protobuf field <code>repeated string allowed_dns_names = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return \Google\Protobuf\Internal\RepeatedField
*/
public function getAllowedDnsNames()
{
return $this->allowed_dns_names;
}
/**
* Optional. Contains valid, fully-qualified host names. Glob patterns are also
* supported. To allow an explicit wildcard certificate, escape with
* backlash (i.e. "\*").
* E.g. for globbed entries: '*bar.com' will allow foo.bar.com, but not
* *.bar.com, unless the [allow_globbing_dns_wildcards][google.cloud.security.privateca.v1beta1.CertificateAuthority.CertificateAuthorityPolicy.AllowedSubjectAltNames.allow_globbing_dns_wildcards] field is set.
* E.g. for wildcard entries: '\*.bar.com' will allow '*.bar.com', but not
* 'foo.bar.com'.
*
* Generated from protobuf field <code>repeated string allowed_dns_names = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param string[]|\Google\Protobuf\Internal\RepeatedField $var
* @return $this
*/
public function setAllowedDnsNames($var)
{
$arr = GPBUtil::checkRepeatedField($var, \Google\Protobuf\Internal\GPBType::STRING);
$this->allowed_dns_names = $arr;
return $this;
}
/**
* Optional. Contains valid RFC 3986 URIs. Glob patterns are also supported. To
* match across path seperators (i.e. '/') use the double star glob
* pattern (i.e. '**').
*
* Generated from protobuf field <code>repeated string allowed_uris = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return \Google\Protobuf\Internal\RepeatedField
*/
public function getAllowedUris()
{
return $this->allowed_uris;
}
/**
* Optional. Contains valid RFC 3986 URIs. Glob patterns are also supported. To
* match across path seperators (i.e. '/') use the double star glob
* pattern (i.e. '**').
*
* Generated from protobuf field <code>repeated string allowed_uris = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param string[]|\Google\Protobuf\Internal\RepeatedField $var
* @return $this
*/
public function setAllowedUris($var)
{
$arr = GPBUtil::checkRepeatedField($var, \Google\Protobuf\Internal\GPBType::STRING);
$this->allowed_uris = $arr;
return $this;
}
/**
* Optional. Contains valid RFC 2822 E-mail addresses. Glob patterns are also
* supported.
*
* Generated from protobuf field <code>repeated string allowed_email_addresses = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return \Google\Protobuf\Internal\RepeatedField
*/
public function getAllowedEmailAddresses()
{
return $this->allowed_email_addresses;
}
/**
* Optional. Contains valid RFC 2822 E-mail addresses. Glob patterns are also
* supported.
*
* Generated from protobuf field <code>repeated string allowed_email_addresses = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param string[]|\Google\Protobuf\Internal\RepeatedField $var
* @return $this
*/
public function setAllowedEmailAddresses($var)
{
$arr = GPBUtil::checkRepeatedField($var, \Google\Protobuf\Internal\GPBType::STRING);
$this->allowed_email_addresses = $arr;
return $this;
}
/**
* Optional. Contains valid 32-bit IPv4 addresses and subnet ranges or RFC 4291 IPv6
* addresses and subnet ranges. Subnet ranges are specified using the
* '/' notation (e.g. 10.0.0.0/8, 2001:700:300:1800::/64). Glob patterns
* are supported only for ip address entries (i.e. not for subnet ranges).
*
* Generated from protobuf field <code>repeated string allowed_ips = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return \Google\Protobuf\Internal\RepeatedField
*/
public function getAllowedIps()
{
return $this->allowed_ips;
}
/**
* Optional. Contains valid 32-bit IPv4 addresses and subnet ranges or RFC 4291 IPv6
* addresses and subnet ranges. Subnet ranges are specified using the
* '/' notation (e.g. 10.0.0.0/8, 2001:700:300:1800::/64). Glob patterns
* are supported only for ip address entries (i.e. not for subnet ranges).
*
* Generated from protobuf field <code>repeated string allowed_ips = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param string[]|\Google\Protobuf\Internal\RepeatedField $var
* @return $this
*/
public function setAllowedIps($var)
{
$arr = GPBUtil::checkRepeatedField($var, \Google\Protobuf\Internal\GPBType::STRING);
$this->allowed_ips = $arr;
return $this;
}
/**
* Optional. Specifies if glob patterns used for [allowed_dns_names][google.cloud.security.privateca.v1beta1.CertificateAuthority.CertificateAuthorityPolicy.AllowedSubjectAltNames.allowed_dns_names] allows
* wildcard certificates.
*
* Generated from protobuf field <code>bool allow_globbing_dns_wildcards = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return bool
*/
public function getAllowGlobbingDnsWildcards()
{
return $this->allow_globbing_dns_wildcards;
}
/**
* Optional. Specifies if glob patterns used for [allowed_dns_names][google.cloud.security.privateca.v1beta1.CertificateAuthority.CertificateAuthorityPolicy.AllowedSubjectAltNames.allowed_dns_names] allows
* wildcard certificates.
*
* Generated from protobuf field <code>bool allow_globbing_dns_wildcards = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param bool $var
* @return $this
*/
public function setAllowGlobbingDnsWildcards($var)
{
GPBUtil::checkBool($var);
$this->allow_globbing_dns_wildcards = $var;
return $this;
}
/**
* Optional. Specifies if to allow custom X509Extension values.
*
* Generated from protobuf field <code>bool allow_custom_sans = 6 [(.google.api.field_behavior) = OPTIONAL];</code>
* @return bool
*/
public function getAllowCustomSans()
{
return $this->allow_custom_sans;
}
/**
* Optional. Specifies if to allow custom X509Extension values.
*
* Generated from protobuf field <code>bool allow_custom_sans = 6 [(.google.api.field_behavior) = OPTIONAL];</code>
* @param bool $var
* @return $this
*/
public function setAllowCustomSans($var)
{
GPBUtil::checkBool($var);
$this->allow_custom_sans = $var;
return $this;
}
}
// Adding a class alias for backwards compatibility with the previous class name.
class_alias(AllowedSubjectAltNames::class, \Google\Cloud\Security\PrivateCA\V1beta1\CertificateAuthority_CertificateAuthorityPolicy_AllowedSubjectAltNames::class);
| googleapis/google-cloud-php-security-private-ca | src/V1beta1/CertificateAuthority/CertificateAuthorityPolicy/AllowedSubjectAltNames.php | PHP | apache-2.0 | 13,584 |
package com.couchbase.lite;
import com.couchbase.lite.util.Log;
import com.couchbase.touchdb.RevCollator;
import com.couchbase.touchdb.TDCollateJSON;
import com.fasterxml.jackson.databind.ObjectMapper;
import junit.framework.Assert;
public class CollationTest extends LiteTestCase {
public static String TAG = "Collation";
private static final int kTDCollateJSON_Unicode = 0;
private static final int kTDCollateJSON_Raw = 1;
private static final int kTDCollateJSON_ASCII = 2;
// create the same JSON encoding used by TouchDB
// this lets us test comparisons as they would be encoded
public String encode(Object obj) {
ObjectMapper mapper = new ObjectMapper();
try {
byte[] bytes = mapper.writeValueAsBytes(obj);
String result = new String(bytes);
return result;
} catch (Exception e) {
Log.e(TAG, "Error encoding JSON", e);
return null;
}
}
public void testCollateScalars() {
int mode = kTDCollateJSON_Unicode;
Assert.assertEquals(1, TDCollateJSON.testCollateJSONWrapper(mode, "true", "false"));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "false", "true"));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "null", "17"));
Assert.assertEquals(0, TDCollateJSON.testCollateJSONWrapper(mode, "1", "1"));
Assert.assertEquals(1, TDCollateJSON.testCollateJSONWrapper(mode, "123", "1"));
Assert.assertEquals(0, TDCollateJSON.testCollateJSONWrapper(mode, "123", "0123.0"));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "123", "\"123\""));
Assert.assertEquals(1, TDCollateJSON.testCollateJSONWrapper(mode, "\"1234\"", "\"123\""));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "\"123\"", "\"1234\""));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "\"1234\"", "\"1235\""));
Assert.assertEquals(0, TDCollateJSON.testCollateJSONWrapper(mode, "\"1234\"", "\"1234\""));
Assert.assertEquals(0, TDCollateJSON.testCollateJSONWrapper(mode, "\"12\\/34\"", "\"12/34\""));
Assert.assertEquals(0, TDCollateJSON.testCollateJSONWrapper(mode, "\"\\/1234\"", "\"/1234\""));
Assert.assertEquals(0, TDCollateJSON.testCollateJSONWrapper(mode, "\"1234\\/\"", "\"1234/\""));
Assert.assertEquals(0, TDCollateJSON.testCollateJSONWrapper(mode, "123", "00000000000000000000000000000000000000000000000000123"));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "\"a\"", "\"A\""));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "\"A\"", "\"aa\""));
Assert.assertEquals(1, TDCollateJSON.testCollateJSONWrapper(mode, "\"B\"", "\"aa\""));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "\"~\"", "\"A\""));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "\"_\"", "\"A\""));
}
public void testCollateASCII() {
int mode = kTDCollateJSON_ASCII;
Assert.assertEquals(1, TDCollateJSON.testCollateJSONWrapper(mode, "true", "false"));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "false", "true"));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "null", "17"));
Assert.assertEquals(1, TDCollateJSON.testCollateJSONWrapper(mode, "123", "1"));
Assert.assertEquals(0, TDCollateJSON.testCollateJSONWrapper(mode, "123", "0123.0"));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "123", "\"123\""));
Assert.assertEquals(1, TDCollateJSON.testCollateJSONWrapper(mode, "\"1234\"", "\"123\""));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "\"1234\"", "\"1235\""));
Assert.assertEquals(0, TDCollateJSON.testCollateJSONWrapper(mode, "\"1234\"", "\"1234\""));
Assert.assertEquals(0, TDCollateJSON.testCollateJSONWrapper(mode, "\"12\\/34\"", "\"12/34\""));
Assert.assertEquals(0, TDCollateJSON.testCollateJSONWrapper(mode, "\"\\/1234\"", "\"/1234\""));
Assert.assertEquals(0, TDCollateJSON.testCollateJSONWrapper(mode, "\"1234\\/\"", "\"1234/\""));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "\"A\"", "\"a\""));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "\"B\"", "\"a\""));
}
public void testCollateRaw() {
int mode = kTDCollateJSON_Raw;
Assert.assertEquals(1, TDCollateJSON.testCollateJSONWrapper(mode, "false", "17"));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "false", "true"));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "null", "true"));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "[\"A\"]", "\"A\""));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "\"A\"", "\"a\""));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "[\"b\"]", "[\"b\",\"c\",\"a\"]"));
}
public void testCollateArrays() {
int mode = kTDCollateJSON_Unicode;
Assert.assertEquals(1, TDCollateJSON.testCollateJSONWrapper(mode, "[]", "\"foo\""));
Assert.assertEquals(0, TDCollateJSON.testCollateJSONWrapper(mode, "[]", "[]"));
Assert.assertEquals(0, TDCollateJSON.testCollateJSONWrapper(mode, "[true]", "[true]"));
Assert.assertEquals(1, TDCollateJSON.testCollateJSONWrapper(mode, "[false]", "[null]"));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "[]", "[null]"));
Assert.assertEquals(1, TDCollateJSON.testCollateJSONWrapper(mode, "[123]", "[45]"));
Assert.assertEquals(1, TDCollateJSON.testCollateJSONWrapper(mode, "[123]", "[45,67]"));
Assert.assertEquals(1, TDCollateJSON.testCollateJSONWrapper(mode, "[123.4,\"wow\"]", "[123.40,789]"));
}
public void testCollateNestedArray() {
int mode = kTDCollateJSON_Unicode;
Assert.assertEquals(1, TDCollateJSON.testCollateJSONWrapper(mode, "[[]]", "[]"));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, "[1,[2,3],4]", "[1,[2,3.1],4,5,6]"));
}
public void testCollateUnicodeStrings() {
int mode = kTDCollateJSON_Unicode;
Assert.assertEquals(0, TDCollateJSON.testCollateJSONWrapper(mode, encode("fr�d"), encode("fr�d")));
// Assert.assertEquals(1, TDCollateJSON.testCollateJSONWrapper(mode, encode("�m�"), encode("omo")));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, encode("\t"), encode(" ")));
Assert.assertEquals(-1, TDCollateJSON.testCollateJSONWrapper(mode, encode("\001"), encode(" ")));
}
public void testConvertEscape() {
Assert.assertEquals('\\', TDCollateJSON.testEscape("\\\\"));
Assert.assertEquals('\t', TDCollateJSON.testEscape("\\t"));
Assert.assertEquals('E', TDCollateJSON.testEscape("\\u0045"));
Assert.assertEquals(1, TDCollateJSON.testEscape("\\u0001"));
Assert.assertEquals(0, TDCollateJSON.testEscape("\\u0000"));
}
public void testDigitToInt() {
Assert.assertEquals(1, TDCollateJSON.testDigitToInt('1'));
Assert.assertEquals(7, TDCollateJSON.testDigitToInt('7'));
Assert.assertEquals(0xc, TDCollateJSON.testDigitToInt('c'));
Assert.assertEquals(0xc, TDCollateJSON.testDigitToInt('C'));
}
public void testCollateRevIds() {
Assert.assertEquals(RevCollator.testCollateRevIds("1-foo", "1-foo"), 0);
Assert.assertEquals(RevCollator.testCollateRevIds("2-bar", "1-foo"), 1);
Assert.assertEquals(RevCollator.testCollateRevIds("1-foo", "2-bar"), -1);
// Multi-digit:
Assert.assertEquals(RevCollator.testCollateRevIds("123-bar", "456-foo"), -1);
Assert.assertEquals(RevCollator.testCollateRevIds("456-foo", "123-bar"), 1);
Assert.assertEquals(RevCollator.testCollateRevIds("456-foo", "456-foo"), 0);
Assert.assertEquals(RevCollator.testCollateRevIds("456-foo", "456-foofoo"), -1);
// Different numbers of digits:
Assert.assertEquals(RevCollator.testCollateRevIds("89-foo", "123-bar"), -1);
Assert.assertEquals(RevCollator.testCollateRevIds("123-bar", "89-foo"), 1);
// Edge cases:
Assert.assertEquals(RevCollator.testCollateRevIds("123-", "89-"), 1);
Assert.assertEquals(RevCollator.testCollateRevIds("123-a", "123-a"), 0);
// Invalid rev IDs:
Assert.assertEquals(RevCollator.testCollateRevIds("-a", "-b"), -1);
Assert.assertEquals(RevCollator.testCollateRevIds("-", "-"), 0);
Assert.assertEquals(RevCollator.testCollateRevIds("", ""), 0);
Assert.assertEquals(RevCollator.testCollateRevIds("", "-b"), -1);
Assert.assertEquals(RevCollator.testCollateRevIds("bogus", "yo"), -1);
Assert.assertEquals(RevCollator.testCollateRevIds("bogus-x", "yo-y"), -1);
}
}
| cesine/couchbase-lite-android | src/androidTest/java/com/couchbase/lite/CollationTest.java | Java | apache-2.0 | 9,058 |
<?php
/**
* Attachment form.
*
* @package form
* @subpackage attachment
* @version SVN: $Id: AttachmentForm.class.php 24068 2009-11-17 06:39:35Z Kris.Wallsmith $
*/
class AttachmentForm extends BaseAttachmentForm
{
public function configure()
{
$this->widgetSchema['file'] = new sfWidgetFormInputFile();
$this->validatorSchema['file'] = new sfValidatorFile(array(
'path' => sfConfig::get('sf_cache_dir'),
'mime_type_guessers' => array(),
));
}
}
| kamguir/salleSport | plugins/sfPropelORMPlugin/test/functional/fixtures/lib/form/AttachmentForm.class.php | PHP | apache-2.0 | 509 |
/*
* Copyright 2021 Apollo Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.ctrip.framework.apollo.core.schedule;
/**
* @author Jason Song(song_s@ctrip.com)
*/
public class ExponentialSchedulePolicy implements SchedulePolicy {
private final long delayTimeLowerBound;
private final long delayTimeUpperBound;
private long lastDelayTime;
public ExponentialSchedulePolicy(long delayTimeLowerBound, long delayTimeUpperBound) {
this.delayTimeLowerBound = delayTimeLowerBound;
this.delayTimeUpperBound = delayTimeUpperBound;
}
@Override
public long fail() {
long delayTime = lastDelayTime;
if (delayTime == 0) {
delayTime = delayTimeLowerBound;
} else {
delayTime = Math.min(lastDelayTime << 1, delayTimeUpperBound);
}
lastDelayTime = delayTime;
return delayTime;
}
@Override
public void success() {
lastDelayTime = 0;
}
}
| ctripcorp/apollo | apollo-core/src/main/java/com/ctrip/framework/apollo/core/schedule/ExponentialSchedulePolicy.java | Java | apache-2.0 | 1,428 |
/*
* #%L
* ImageJ software for multidimensional image processing and analysis.
* %%
* Copyright (C) 2014 - 2017 Board of Regents of the University of
* Wisconsin-Madison, University of Konstanz and Brian Northan.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package net.imagej.ops.special;
/**
* Abstract base class for {@link BinaryOp} implementations.
*
* @author Curtis Rueden
*/
public abstract class AbstractBinaryOp<I1, I2, O> extends
AbstractUnaryOp<I1, O> implements BinaryOp<I1, I2, O>
{
// NB: No implementation needed.
}
| gab1one/imagej-ops | src/main/java/net/imagej/ops/special/AbstractBinaryOp.java | Java | bsd-2-clause | 1,823 |
cask "spark-ar-studio" do
version "127"
sha256 :no_check
url "https://www.facebook.com/sparkarmacos/download/"
name "Spark AR Studio"
desc "Create and share augmented reality experiences using the Facebook family of apps"
homepage "https://sparkar.facebook.com/ar-studio/"
livecheck do
url :url
strategy :header_match do |headers|
headers["location"][%r{/SparkARStudio[._-]?v?(\d+)\.dmg}i, 1]
end
end
app "Spark AR Studio.app"
zap trash: [
"~/Library/Caches/com.facebook.arstudio.skylight",
"~/Library/Preferences/com.facebook.arstudio.skylight.plist",
"~/Library/Saved Application State/com.facebook.arstudio.skylight.savedState",
]
end
| scottsuch/homebrew-cask | Casks/spark-ar-studio.rb | Ruby | bsd-2-clause | 694 |
package tonegod.gui.core;
import tonegod.gui.style.Style;
import com.jme3.app.Application;
import com.jme3.collision.CollisionResult;
import com.jme3.collision.CollisionResults;
import com.jme3.export.JmeExporter;
import com.jme3.export.JmeImporter;
import com.jme3.font.BitmapFont;
import com.jme3.input.KeyInput;
import com.jme3.input.event.KeyInputEvent;
import com.jme3.input.event.MouseButtonEvent;
import com.jme3.input.event.MouseMotionEvent;
import com.jme3.input.event.TouchEvent;
import com.jme3.material.Material;
import com.jme3.math.Ray;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
import com.jme3.math.Vector4f;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.ViewPort;
import com.jme3.scene.Geometry;
import com.jme3.scene.Node;
import com.jme3.scene.Spatial;
import com.jme3.scene.control.Control;
import com.jme3.texture.Texture;
import java.io.IOException;
import java.text.ParseException;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.logging.Level;
import java.util.logging.Logger;
import tonegod.gui.controls.form.Form;
import tonegod.gui.controls.lists.ComboBox;
import tonegod.gui.controls.menuing.AutoHide;
import tonegod.gui.controls.menuing.Menu;
import tonegod.gui.controls.text.TextField;
import tonegod.gui.controls.util.ModalBackground;
import tonegod.gui.controls.util.ToolTip;
import tonegod.gui.core.Element.Borders;
import tonegod.gui.core.utils.ScaleUtil;
import tonegod.gui.style.StyleManager.CursorType;
import tonegod.gui.core.utils.UIDUtil;
import tonegod.gui.effects.EffectManager;
import tonegod.gui.effects.cursor.CursorEffects;
import tonegod.gui.framework.core.AnimElement;
import tonegod.gui.framework.core.AnimLayer;
import tonegod.gui.framework.core.AnimManager;
import tonegod.gui.framework.core.QuadData;
import tonegod.gui.listeners.*;
/**
*
* @author t0neg0d
*/
public class SubScreen implements ElementManager, Control {
private String UID;
private Screen screen;
private Application app;
private SubScreenBridge bridge;
private Geometry geom;
protected Spatial spatial;
private Map<String, Element> elements = new HashMap();
private Ray elementZOrderRay = new Ray();
private Vector3f guiRayOrigin = new Vector3f();
private Vector2f tempElementOffset = new Vector2f();
private Map<Integer,Vector2f> elementOffsets = new HashMap();
private Map<Integer,Element> contactElements = new HashMap();
private Map<Integer,Element> eventElements = new HashMap();
private Map<Integer,Borders> eventElementResizeDirections = new HashMap();
private Element eventElement = null;
private Element targetElement = null;
private Element keyboardElement = null;
private Element tabFocusElement = null;
private Form focusForm = null;
private Vector2f eventElementOriginXY = new Vector2f();
private float eventElementOffsetX = 0;
private float eventElementOffsetY = 0;
private float targetElementOffsetX = 0;
private float targetElementOffsetY = 0;
private Borders eventElementResizeDirection = null;
private Element mouseFocusElement = null;
private Element contactElement = null;
private Element previousMouseFocusElement = null;
private boolean focusElementIsMovable = false;
private boolean mousePressed = false;
private boolean mouseLeftPressed = false;
private boolean mouseRightPressed = false;
private boolean mouseWheelPressed = false;
private float zOrderCurrent = .5f;
private float zOrderStepMajor = .01f;
private float zOrderStepMinor = 0.0001f;
private String clipboardText = "";
protected Node subScreenNode = new Node("t0neg0dGUI");
private Material mat;
private Vector2f mouseXY = new Vector2f(0,0);
private boolean SHIFT = false;
private boolean CTRL = false;
private boolean ALT = false;
private ElementQuadGrid mesh;
// AnimLayer & 2D framework support
private Map<String, AnimLayer> layers = new LinkedHashMap();
private float layerZOrderCurrent = .4999f;
private AnimElement eventAnimElement = null;
private QuadData eventQuad = null;
private AnimElement targetAnimElement = null;
private QuadData targetQuad = null;
private AnimElement mouseFocusAnimElement = null;
private AnimElement previousMouseFocusAnimElement = null;
private AnimElement mouseFocusQuad = null;
private float eventAnimOffsetX = 0;
private float eventAnimOffsetY = 0;
private float eventQuadOffsetX = 0;
private float eventQuadOffsetY = 0;
/**
* Creates an instance of the SubScreen control.
*
* @param screen The Application Screen class
*/
public SubScreen(Screen screen, Geometry geom) {
this(screen, UIDUtil.getUID(), geom);
}
/**
* Creates an instance of the SubScreen control.
*
* @param screen The Application Screen class
* @param UID A Unique String ID for the SubScreen
*/
public SubScreen(Screen screen, String UID, Geometry geom) {
this.UID = UID;
this.screen = screen;
this.app = screen.getApplication();
this.geom = geom;
this.elementZOrderRay.setDirection(Vector3f.UNIT_Z);
}
public String getUID() {
return this.UID;
}
public void setSubScreenBridge(int width, int height, Node root) {
root.addControl(this);
this.bridge = new SubScreenBridge(app.getRenderManager(), width, height, root);
screen.getGUINode().addControl(this.bridge);
mat = new Material(screen.getApplication().getAssetManager(), "Common/MatDefs/Misc/Unshaded.j3md");
mat.setTexture("ColorMap", this.bridge.getTexture());
geom.setMaterial(mat);
}
public Material getMaterial() {
return mat;
}
public Texture getTexture() {
return getSubScreenBridge().getTexture();
}
public Geometry getGeometry() {
return this.geom;
}
public SubScreenBridge getSubScreenBridge() {
return this.bridge;
}
public ElementQuadGrid getDefaultMesh() {
return mesh;
}
/**
* Returns the JME application associated with the Screen
* @return Application app
*/
@Override
public Application getApplication() {
return this.app;
}
/**
* Return the width of the current Viewport
*
* @return float width
*/
@Override
public float getWidth() {
return bridge.getCamera().getWidth();
}
/**
* Returns the height of the current Viewport
*
* @return float height
*/
@Override
public float getHeight() {
return bridge.getCamera().getHeight();
}
@Override
public void update(float tpf) { }
@Override
public void render(RenderManager rm, ViewPort vp) { }
/**
* Adds an Element to the Screen and scene graph
* @param element The Element to add
*/
@Override
public void addElement(Element element) {
if (element instanceof AutoHide)
element.hide();
if (getElementById(element.getUID()) != null) {
// try {
// throw new ConflictingIDException();
// } catch (ConflictingIDException ex) {
// Logger.getLogger(Element.class.getName()).log(Level.SEVERE, "The child element '" + element.getUID() + "' (" + element.getClass() + ") conflicts with a previously added child element in parent Screen.", ex);
// System.exit(0);
// }
} else {
elements.put(element.getUID(), element);
if (!element.getInitialized()) {
element.setY(getHeight()-element.getHeight()-element.getY());
element.orgPosition = element.getPosition().clone();
element.orgPosition.setY(element.getY());
element.setInitialized();
}
subScreenNode.attachChild(element);
// Set initla z-order
getNextZOrder(true);
// element.initZOrder(zOrderCurrent);
element.resize(element.getX()+element.getWidth(), element.getY()+element.getHeight(), Borders.SE);
}
}
/**
* Adds an Element to the Screen and scene graph
* @param element The Element to add
*/
@Override
public void addElement(Element element, boolean hide) {
if (element instanceof AutoHide)
element.hide();
if (getElementById(element.getUID()) != null) {
// try {
// throw new ConflictingIDException();
// } catch (ConflictingIDException ex) {
// Logger.getLogger(Element.class.getName()).log(Level.SEVERE, "The child element '" + element.getUID() + "' (" + element.getClass() + ") conflicts with a previously added child element in parent Screen.", ex);
// System.exit(0);
// }
} else {
elements.put(element.getUID(), element);
if (!element.getInitialized()) {
element.setY(getHeight()-element.getHeight()-element.getY());
element.orgPosition = element.getPosition().clone();
element.orgPosition.setY(element.getY());
element.setInitialized();
}
subScreenNode.attachChild(element);
// Set initla z-order
getNextZOrder(true);
// element.initZOrder(zOrderCurrent);
element.resize(element.getX()+element.getWidth(), element.getY()+element.getHeight(), Borders.SE);
if (hide)
element.hide();
}
}
/**
* Removes an Element from the Screen and scene graph
* @param element The Element to remove
*/
@Override
public void removeElement(Element element) {
elements.remove(element.getUID());
float shiftZ = element.getLocalTranslation().getZ();
for (Element el : elements.values()) {
if (!(el instanceof ToolTip)) {
if (el.getLocalTranslation().getZ() > shiftZ) {
el.move(0,0,-zOrderStepMajor);
}
}
}
zOrderCurrent -= zOrderStepMajor;
element.removeFromParent();
element.cleanup();
}
/**
* Returns the Element with the associated ID. If not found, returns null
* @param UID The String ID of Element to find
* @return Element element
*/
@Override
public Element getElementById(String UID) {
Element ret = null;
if (elements.containsKey(UID)) {
ret = elements.get(UID);
} else {
for (Element el : elements.values()) {
ret = el.getChildElementById(UID);
if (ret != null) {
break;
}
}
}
return ret;
}
/**
* Returns the guiNode used by the Screen
* @return Node
*/
@Override
public Node getGUINode() {
return subScreenNode;
}
@Override
public Texture getAtlasTexture() { return screen.getAtlasTexture(); }
@Override
public Texture createNewTexture(String texturePath) {
Texture newTex = app.getAssetManager().loadTexture(texturePath);
newTex.setMinFilter(Texture.MinFilter.BilinearNoMipMaps);
newTex.setMagFilter(Texture.MagFilter.Bilinear);
newTex.setWrap(Texture.WrapMode.Repeat);
return newTex;
}
@Override
public float[] parseAtlasCoords(String texturePath) {
float[] coords = new float[4];
if (texturePath != null) {
StringTokenizer st = new StringTokenizer(texturePath, "|");
if (st.countTokens() == 4) {
try {
String token = st.nextToken();
coords[0] = Float.parseFloat(token.substring(token.indexOf('=')+1));
token = st.nextToken();
coords[1] = Float.parseFloat(token.substring(token.indexOf('=')+1));
token = st.nextToken();
coords[2] = Float.parseFloat(token.substring(token.indexOf('=')+1));
token = st.nextToken();
coords[3] = Float.parseFloat(token.substring(token.indexOf('=')+1));
} catch (Exception ex) { throwParserException(); }
} else throwParserException();
}
return coords;
}
private void throwParserException() {
try {
throw new java.text.ParseException("The provided texture information does not conform to the expected standard of ?x=(int)&y=(int)&w=(int)&h=(int)", 0);
} catch (ParseException ex) {
Logger.getLogger(Screen.class.getName()).log(Level.SEVERE, "The provided texture information does not conform to the expected standard of ?x=(int)&y=(int)&w=(int)&h=(int)", ex);
}
}
// Z-ORDER
/**
* Returns the next available z-order
* @param stepMajor Return the z-order incremented by a major step if true, a minor step if false
* @return float zOrder
*/
public float getNextZOrder(boolean stepMajor) {
if (stepMajor)
zOrderCurrent += zOrderStepMajor;
else
zOrderCurrent += zOrderStepMinor;
return zOrderCurrent;
}
/**
* Brings the element specified to the front of the zOrder list shifting other below to keep all
* Elements within the current z-order range.
*
* @param topMost The Element to bring to the front
*/
@Override
public void updateZOrder(Element topMost) {
// zOrderCurrent = zOrderInit;
String topMostUID = topMost.getUID();
float shiftZ = topMost.getLocalTranslation().getZ();
for (Element el : elements.values()) {
if (topMost.getIsGlobalModal()) { }
else if (topMost.getIsModal()) { }
else {
if (!el.getIsGlobalModal() && !el.getIsModal()) {
if (el.getLocalTranslation().getZ() > shiftZ) {
el.move(0,0,-zOrderStepMajor);
}
}
}
}
topMost.setLocalTranslation(topMost.getLocalTranslation().setZ(Float.valueOf(zOrderCurrent)));
}
/**
* Returns the zOrder major step value
* @return float
*/
@Override
public float getZOrderStepMajor() {
return this.zOrderStepMajor;
}
/**
* Returns the zOrder minor step value
* @return float
*/
@Override
public float getZOrderStepMinor() {
return this.zOrderStepMinor;
}
/**
* Stored the current mouse position as a Vector2f
* @param x The mouse's current X coord
* @param y The mouse's current Y coord
*/
private void setMouseXY(float x, float y) {
mouseXY.set(x, y);
}
/**
* Returns a Vector2f containing the last stored mouse X/Y coords
* @return Vector2f mouseXY
*/
@Override
public Vector2f getMouseXY() {
return this.mouseXY;
}
@Override
public Vector2f getTouchXY() {
return this.screen.getTouchXY();
}
// Raw Input handlers
// public void beginInput() { }
// public void endInput() { }
// public void onJoyAxisEvent(JoyAxisEvent evt) { }
// public void onJoyButtonEvent(JoyButtonEvent evt) { }
public void onMouseMotionEvent(MouseMotionEvent oldEvt, MouseMotionEvent evt) {
setMouseXY(evt.getX(),evt.getY());
if (!mousePressed) {
mouseFocusElement = getEventElement(evt.getX(), evt.getY());
if (mouseFocusElement != previousMouseFocusElement) {
if (previousMouseFocusElement instanceof MouseFocusListener) {
((MouseFocusListener)previousMouseFocusElement).onLoseFocus(evt);
}
if (mouseFocusElement instanceof MouseFocusListener) {
((MouseFocusListener)mouseFocusElement).onGetFocus(evt);
}
previousMouseFocusElement = mouseFocusElement;
}
if (mouseFocusElement != null) {
focusElementIsMovable = mouseFocusElement.getIsMovable();
if (mouseFocusElement instanceof MouseWheelListener) {
if (evt.getDeltaWheel() > 0) {
((MouseWheelListener)mouseFocusElement).onMouseWheelDown(evt);
} else if (evt.getDeltaWheel() < 0) {
((MouseWheelListener)mouseFocusElement).onMouseWheelUp(evt);
}
oldEvt.setConsumed();
}
}
if (mouseFocusElement instanceof MouseMovementListener) {
((MouseMovementListener)mouseFocusElement).onMouseMove(evt);
}
} else {
if (eventElement != null) {
if (mouseLeftPressed) {
focusElementIsMovable = contactElement.getIsMovable();
if (eventElementResizeDirection != null) {
eventElement.resize(evt.getX(), evt.getY(), eventElementResizeDirection);
} else if (focusElementIsMovable) {
eventElement.moveTo(evt.getX()-eventElementOffsetX, evt.getY()-eventElementOffsetY);
}
}
if (eventElement instanceof MouseMovementListener) {
((MouseMovementListener)eventElement).onMouseMove(evt);
}
}
}
}
public void onMouseButtonEvent(MouseButtonEvent oldEvt, MouseButtonEvent evt) {
if (evt.isPressed()) {
mousePressed = true;
eventElement = getEventElement(evt.getX(), evt.getY());
if (eventElement != null) {
if (eventElement.getResetKeyboardFocus())
resetTabFocusElement();
} else
resetTabFocusElement();
switch (evt.getButtonIndex()) {
case 0:
mouseLeftPressed = true;
// eventElement = getEventElement(evt.getX(), evt.getY());
if (eventElement != null) {
if (eventElement.getEffectZOrder())
updateZOrder(eventElement.getAbsoluteParent());
if (eventElement.getResetKeyboardFocus())
this.setTabFocusElement(eventElement);
if (eventElement.getIsDragDropDragElement())
targetElement = null;
if (eventElement.getIsResizable()) {
float offsetX = evt.getX();
float offsetY = evt.getY();
Element el = eventElement;
if (offsetX > el.getAbsoluteX() && offsetX < el.getAbsoluteX()+el.getResizeBorderWestSize()) {
if (offsetY > el.getAbsoluteY() && offsetY < el.getAbsoluteY()+el.getResizeBorderNorthSize()) {
eventElementResizeDirection = Borders.NW;
} else if (offsetY > (el.getAbsoluteHeight()-el.getResizeBorderSouthSize()) && offsetY < el.getAbsoluteHeight()) {
eventElementResizeDirection = Borders.SW;
} else {
eventElementResizeDirection = Borders.W;
}
} else if (offsetX > (el.getAbsoluteWidth()-el.getResizeBorderEastSize()) && offsetX < el.getAbsoluteWidth()) {
if (offsetY > el.getAbsoluteY() && offsetY < el.getAbsoluteY()+el.getResizeBorderNorthSize()) {
eventElementResizeDirection = Borders.NE;
} else if (offsetY > (el.getAbsoluteHeight()-el.getResizeBorderSouthSize()) && offsetY < el.getAbsoluteHeight()) {
eventElementResizeDirection = Borders.SE;
} else {
eventElementResizeDirection = Borders.E;
}
} else {
if (offsetY > el.getAbsoluteY() && offsetY < el.getAbsoluteY()+el.getResizeBorderNorthSize()) {
eventElementResizeDirection = Borders.N;
} else if (offsetY > (el.getAbsoluteHeight()-el.getResizeBorderSouthSize()) && offsetY < el.getAbsoluteHeight()) {
eventElementResizeDirection = Borders.S;
}
}
if (keyboardElement != null && eventElement.getResetKeyboardFocus()) {
if (keyboardElement instanceof TextField) ((TextField)keyboardElement).resetTabFocus();
}
if (eventElement.getResetKeyboardFocus())
keyboardElement = null;
} else if (eventElement.getIsMovable() && eventElementResizeDirection == null) {
eventElementResizeDirection = null;
if (keyboardElement != null && eventElement.getResetKeyboardFocus()) {
if (keyboardElement instanceof TextField) ((TextField)keyboardElement).resetTabFocus();
}
if (eventElement.getResetKeyboardFocus())
keyboardElement = null;
eventElementOriginXY.set(eventElement.getPosition());
} else if (eventElement instanceof KeyboardListener) {
if (keyboardElement != null && eventElement.getResetKeyboardFocus()) {
if (keyboardElement instanceof TextField) ((TextField)keyboardElement).resetTabFocus();
}
if (eventElement.getResetKeyboardFocus())
keyboardElement = eventElement;
if (keyboardElement instanceof TextField) {
((TextField)keyboardElement).setTabFocus();
if (Screen.isAndroid()) screen.showVirtualKeyboard();
// ((TextField)keyboardElement).setCaretPositionByX(evt.getX());
}
// TODO: Update target element's font shader
} else {
eventElementResizeDirection = null;
if (keyboardElement != null && eventElement.getResetKeyboardFocus()) {
if (keyboardElement instanceof TextField) ((TextField)keyboardElement).resetTabFocus();
}
if (eventElement.getResetKeyboardFocus())
keyboardElement = null;
}
if (eventElement instanceof MouseButtonListener) {
((MouseButtonListener)eventElement).onMouseLeftPressed(evt);
}
if (keyboardElement == null)
if (Screen.isAndroid()) screen.hideVirtualKeyboard();
evt.setConsumed();
oldEvt.setConsumed();
} else {
if (keyboardElement == null)
if (Screen.isAndroid()) screen.hideVirtualKeyboard();
}
break;
case 1:
mouseRightPressed = true;
// eventElement = getEventElement(evt.getX(), evt.getY());
if (eventElement != null) {
if (eventElement.getEffectZOrder())
updateZOrder(eventElement.getAbsoluteParent());
if (eventElement instanceof MouseButtonListener) {
((MouseButtonListener)eventElement).onMouseRightPressed(evt);
}
evt.setConsumed();
oldEvt.setConsumed();
}
break;
case 2:
mouseWheelPressed = true;
// eventElement = getEventElement(evt.getX(), evt.getY());
if (eventElement != null) {
if (eventElement instanceof MouseWheelListener) {
((MouseWheelListener)eventElement).onMouseWheelPressed(evt);
}
evt.setConsumed();
oldEvt.setConsumed();
}
break;
}
} else if (evt.isReleased()) {
handleMenuState();
switch (evt.getButtonIndex()) {
case 0:
mouseLeftPressed = false;
eventElementResizeDirection = null;
// if (eventElement.getIsDragDropDragElement())
targetElement = getTargetElement(evt.getX(), evt.getY());
if (eventElement instanceof MouseButtonListener) {
((MouseButtonListener)eventElement).onMouseLeftReleased(evt);
}
if (eventElement != null) {
evt.setConsumed();
oldEvt.setConsumed();
}
break;
case 1:
mouseRightPressed = false;
if (eventElement instanceof MouseButtonListener) {
((MouseButtonListener)eventElement).onMouseRightReleased(evt);
}
if (eventElement != null) {
evt.setConsumed();
oldEvt.setConsumed();
}
break;
case 2:
mouseWheelPressed = false;
if (eventElement instanceof MouseWheelListener) {
((MouseWheelListener)eventElement).onMouseWheelReleased(evt);
}
if (eventElement != null) {
evt.setConsumed();
oldEvt.setConsumed();
}
break;
}
mousePressed = false;
eventElement = null;
}
}
@Override
public void onKeyEvent(KeyInputEvent evt) {
if (evt.getKeyCode() == KeyInput.KEY_LSHIFT || evt.getKeyCode() == KeyInput.KEY_RSHIFT) {
if (evt.isPressed()) SHIFT = true;
else SHIFT = false;
}
if (evt.getKeyCode() == KeyInput.KEY_LCONTROL || evt.getKeyCode() == KeyInput.KEY_RCONTROL) {
if (evt.isPressed()) CTRL = true;
else CTRL = false;
}
if (evt.getKeyCode() == KeyInput.KEY_LMENU || evt.getKeyCode() == KeyInput.KEY_RMENU) {
if (evt.isPressed()) ALT = true;
else ALT = false;
}
if (evt.getKeyCode() == KeyInput.KEY_TAB && evt.isPressed()) {
if (focusForm != null) {
if (!SHIFT) focusForm.tabNext();
else focusForm.tabPrev();
}
} else {
if (keyboardElement != null) {
if (keyboardElement.getParent() != null && keyboardElement.getIsVisible()) {
if (evt.isPressed()) {
((KeyboardListener)keyboardElement).onKeyPress(evt);
} else if (evt.isReleased()) {
((KeyboardListener)keyboardElement).onKeyRelease(evt);
}
}
}
}
}
public void onTouchEvent(TouchEvent evt) {
// if (screen.useMultiTouch) {
switch (evt.getType()) {
case DOWN:
androidTouchDownEvent(evt);
break;
case MOVE:
androidTouchMoveEvent(evt);
break;
case UP:
androidTouchUpEvent(evt);
break;
}
// }
}
private void androidTouchDownEvent(TouchEvent evt) {
mousePressed = true;
Element contact = getContactElement(evt.getX(), evt.getY());
Vector2f offset = tempElementOffset.clone();
Element target = getEventElement(evt.getX(), evt.getY());
Borders dir = null;
if (target != null) {
if (target.getResetKeyboardFocus())
resetTabFocusElement();
if (target.getEffectZOrder())
updateZOrder(target.getAbsoluteParent());
if (target.getResetKeyboardFocus())
this.setTabFocusElement(target);
if (target.getIsDragDropDragElement())
targetElement = null;
if (target.getIsResizable()) {
float offsetX = evt.getX();
float offsetY = evt.getY();
Element el = target;
if (offsetX > el.getAbsoluteX() && offsetX < el.getAbsoluteX()+el.getResizeBorderWestSize()) {
if (offsetY > el.getAbsoluteY() && offsetY < el.getAbsoluteY()+el.getResizeBorderNorthSize()) {
dir = Borders.NW;
} else if (offsetY > (el.getAbsoluteHeight()-el.getResizeBorderSouthSize()) && offsetY < el.getAbsoluteHeight()) {
dir = Borders.SW;
} else {
dir = Borders.W;
}
} else if (offsetX > (el.getAbsoluteWidth()-el.getResizeBorderEastSize()) && offsetX < el.getAbsoluteWidth()) {
if (offsetY > el.getAbsoluteY() && offsetY < el.getAbsoluteY()+el.getResizeBorderNorthSize()) {
dir = Borders.NE;
} else if (offsetY > (el.getAbsoluteHeight()-el.getResizeBorderSouthSize()) && offsetY < el.getAbsoluteHeight()) {
dir = Borders.SE;
} else {
dir = Borders.E;
}
} else {
if (offsetY > el.getAbsoluteY() && offsetY < el.getAbsoluteY()+el.getResizeBorderNorthSize()) {
dir = Borders.N;
} else if (offsetY > (el.getAbsoluteHeight()-el.getResizeBorderSouthSize()) && offsetY < el.getAbsoluteHeight()) {
dir = Borders.S;
}
}
if (keyboardElement != null && target.getResetKeyboardFocus()) {
if (keyboardElement instanceof TextField) ((TextField)keyboardElement).resetTabFocus();
}
if (target.getResetKeyboardFocus())
keyboardElement = null;
} else if (target.getIsMovable() && dir == null) {
dir = null;
if (keyboardElement != null && target.getResetKeyboardFocus()) {
if (keyboardElement instanceof TextField) ((TextField)keyboardElement).resetTabFocus();
}
if (target.getResetKeyboardFocus())
keyboardElement = null;
eventElementOriginXY.set(target.getPosition());
} else if (target instanceof KeyboardListener) {
if (keyboardElement != null && target.getResetKeyboardFocus()) {
if (keyboardElement instanceof TextField) ((TextField)keyboardElement).resetTabFocus();
}
if (target.getResetKeyboardFocus())
keyboardElement = target;
if (keyboardElement instanceof TextField) {
((TextField)keyboardElement).setTabFocus();
screen.showVirtualKeyboard();
}
} else {
dir = null;
if (keyboardElement != null && target.getResetKeyboardFocus()) {
if (keyboardElement instanceof TextField) ((TextField)keyboardElement).resetTabFocus();
}
if (target.getResetKeyboardFocus())
keyboardElement = null;
}
if (target instanceof MouseButtonListener) {
MouseButtonEvent mbEvt = new MouseButtonEvent(0,true,(int)evt.getX(),(int)evt.getY());
((MouseButtonListener)target).onMouseLeftPressed(mbEvt);
}
if (target instanceof TouchListener) {
((TouchListener)target).onTouchDown(evt);
}
if (keyboardElement == null)
screen.hideVirtualKeyboard();
evt.setConsumed();
contactElements.put(evt.getPointerId(),contact);
elementOffsets.put(evt.getPointerId(),offset);
eventElements.put(evt.getPointerId(),target);
eventElementResizeDirections.put(evt.getPointerId(), dir);
} else {
if (keyboardElement == null)
screen.hideVirtualKeyboard();
resetTabFocusElement();
}
}
private void androidTouchMoveEvent(TouchEvent evt) {
for (Integer key : eventElements.keySet()) {
if (key == evt.getPointerId()) {
Element target = eventElements.get(key);
if (target != null) {
Element contact = contactElements.get(key);
Vector2f offset = elementOffsets.get(key);
Borders dir = eventElementResizeDirections.get(key);
boolean movable = contact.getIsMovable();
if (dir != null) {
target.resize(evt.getX(), evt.getY(), dir);
} else if (movable) {
target.moveTo(evt.getX()-offset.x, evt.getY()-offset.y);
}
if (target instanceof MouseMovementListener) {
MouseMotionEvent mbEvt = new MouseMotionEvent((int)evt.getX(),(int)evt.getY(),(int)evt.getDeltaX(),(int)evt.getDeltaY(),0,0);
((MouseMovementListener)target).onMouseMove(mbEvt);
}
if (target instanceof TouchListener) {
((TouchListener)target).onTouchMove(evt);
}
}
}
}
}
private void androidTouchUpEvent(TouchEvent evt) {
Element target = eventElements.get(evt.getPointerId());
if (target != null) {
if (target instanceof MouseButtonListener) {
MouseButtonEvent mbEvt = new MouseButtonEvent(0, true, (int)evt.getX(), (int)evt.getY());
((MouseButtonListener)target).onMouseLeftReleased(mbEvt);
}
if (target instanceof TouchListener) {
((TouchListener)target).onTouchUp(evt);
}
if (!(target.getAbsoluteParent() instanceof AutoHide)) {
handleAndroidMenuState(target);
}
if (target != null)
evt.setConsumed();
eventElements.remove(evt.getPointerId());
contactElements.remove(evt.getPointerId());
elementOffsets.remove(evt.getPointerId());
eventElementResizeDirections.remove(evt.getPointerId());
} else
handleMenuState();
mousePressed = false;
}
/**
* Determines and returns the current mouse focus Element
* @param x The current mouse X coord
* @param y The current mouse Y coord
* @return Element eventElement
*/
@Override
public CollisionResult getLastCollision() { return screen.getLastCollision(); }
private Element getEventElement(float x, float y) {
guiRayOrigin.set(x, y, 0f);
elementZOrderRay.setOrigin(guiRayOrigin);
CollisionResults results = new CollisionResults();
subScreenNode.collideWith(elementZOrderRay, results);
float z = 0;
Element testEl = null, el = null;
for (CollisionResult result : results) {
boolean discard = false;
if (result.getGeometry().getParent() instanceof Element) {
testEl = ((Element)(result.getGeometry().getParent()));
if (testEl.getIgnoreMouse()) {
discard = true;
} else if (testEl.getIsClipped()) {
if (result.getContactPoint().getX() < testEl.getClippingBounds().getX() ||
result.getContactPoint().getX() > testEl.getClippingBounds().getZ() ||
result.getContactPoint().getY() < testEl.getClippingBounds().getY() ||
result.getContactPoint().getY() > testEl.getClippingBounds().getW()) {
discard = true;
}
}
}
if (!discard) {
if (result.getGeometry().getParent() instanceof Element) {
el = testEl;
}
}
}
if (el != null) {
contactElement = el;
Element parent = null;
if (el.getEffectParent() && mousePressed) {
parent = el.getElementParent();
} else if (el.getEffectAbsoluteParent() && mousePressed) {
parent = el.getAbsoluteParent();
}
if (parent != null) {
el = parent;
}
eventElementOffsetX = x-el.getX();
eventElementOffsetY = y-el.getY();
return el;
} else {
return null;
}
}
private Element getContactElement(float x, float y) {
guiRayOrigin.set(x, y, 0f);
elementZOrderRay.setOrigin(guiRayOrigin);
CollisionResults results = new CollisionResults();
subScreenNode.collideWith(elementZOrderRay, results);
float z = 0;
Element testEl = null, el = null;
for (CollisionResult result : results) {
boolean discard = false;
if (result.getGeometry().getParent() instanceof Element) {
testEl = ((Element)(result.getGeometry().getParent()));
if (testEl.getIgnoreMouse()) {
discard = true;
} else if (testEl.getIsClipped()) {
if (result.getContactPoint().getX() < testEl.getClippingBounds().getX() ||
result.getContactPoint().getX() > testEl.getClippingBounds().getZ() ||
result.getContactPoint().getY() < testEl.getClippingBounds().getY() ||
result.getContactPoint().getY() > testEl.getClippingBounds().getW()) {
discard = true;
}
}
}
if (!discard) {
if (result.getGeometry().getParent() instanceof Element) {
el = testEl;
}
}
}
if (el != null) {
Element parent = null;
if (el.getEffectParent() && mousePressed) {
parent = el.getElementParent();
} else if (el.getEffectAbsoluteParent() && mousePressed) {
parent = el.getAbsoluteParent();
}
if (parent != null)
tempElementOffset.set(x-parent.getX(),y-parent.getY());
else
tempElementOffset.set(x-el.getX(),y-el.getY());
return el;
} else {
return null;
}
}
public void forceEventElement(Element element) {
float x = element.getAbsoluteX()+1;
float y = element.getAbsoluteY()+1;
eventElement = getEventElement(x,y);
if (eventElement != null) {
if (eventElement.getEffectZOrder())
updateZOrder(eventElement.getAbsoluteParent());
this.setTabFocusElement(eventElement);
if (eventElement.getIsDragDropDragElement())
targetElement = null;
if (eventElement.getIsResizable()) {
float offsetX = x;
float offsetY = y;
Element el = eventElement;
if (keyboardElement != null && eventElement.getResetKeyboardFocus()) {
if (keyboardElement instanceof TextField) ((TextField)keyboardElement).resetTabFocus();
}
if (eventElement.getResetKeyboardFocus())
keyboardElement = null;
} else if (eventElement.getIsMovable() && eventElementResizeDirection == null) {
eventElementResizeDirection = null;
if (keyboardElement != null && eventElement.getResetKeyboardFocus()) {
if (keyboardElement instanceof TextField) ((TextField)keyboardElement).resetTabFocus();
}
if (eventElement.getResetKeyboardFocus())
keyboardElement = null;
eventElementOriginXY.set(eventElement.getPosition());
} else if (eventElement instanceof KeyboardListener) {
if (keyboardElement != null && eventElement.getResetKeyboardFocus()) {
if (keyboardElement instanceof TextField) ((TextField)keyboardElement).resetTabFocus();
}
if (eventElement.getResetKeyboardFocus())
keyboardElement = eventElement;
if (keyboardElement instanceof TextField) {
((TextField)keyboardElement).setTabFocus();
}
// TODO: Update target element's font shader
} else {
eventElementResizeDirection = null;
if (keyboardElement != null && eventElement.getResetKeyboardFocus()) {
if (keyboardElement instanceof TextField) ((TextField)keyboardElement).resetTabFocus();
}
if (eventElement.getResetKeyboardFocus())
keyboardElement = null;
}
}
}
/**
* Determines and returns the current mouse focus Element
* @param x The current mouse X coord
* @param y The current mouse Y coord
* @return Element eventElement
*/
private Element getTargetElement(float x, float y) {
guiRayOrigin.set(x, y, 0f);
elementZOrderRay.setOrigin(guiRayOrigin);
CollisionResults results = new CollisionResults();
subScreenNode.collideWith(elementZOrderRay, results);
float z = 0;
Element testEl = null, el = null;
for (CollisionResult result : results) {
boolean discard = false;
if (result.getGeometry().getParent() instanceof Element) {
testEl = ((Element)(result.getGeometry().getParent()));
if (testEl.getIgnoreMouse() || !testEl.getIsDragDropDropElement()) {
discard = true;
} else if (testEl.getIsClipped()) {
if (result.getContactPoint().getX() < testEl.getClippingBounds().getX() ||
result.getContactPoint().getX() > testEl.getClippingBounds().getZ() ||
result.getContactPoint().getY() < testEl.getClippingBounds().getY() ||
result.getContactPoint().getY() > testEl.getClippingBounds().getW()) {
discard = true;
}
}
}
// System.out.println(testEl.getUID() + ": " + discard + ": " + testEl.getLocalTranslation().getZ() + ": " + z + ": " + result.getContactPoint().getZ());
if (!discard) {
if (result.getContactPoint().getZ() > z) {
z = result.getContactPoint().getZ();
if (result.getGeometry().getParent() instanceof Element) {
el = testEl;//((Element)(result.getGeometry().getParent()));
}
}
}
}
if (el != null) {
Element parent = null;
if (el.getEffectParent() && mousePressed) {
parent = el.getElementParent();
} else if (el.getEffectAbsoluteParent() && mousePressed) {
parent = el.getAbsoluteParent();
}
if (parent != null) {
el = parent;
}
targetElementOffsetX = x-el.getX();
targetElementOffsetY = y-el.getY();
return el;
} else {
return null;
}
}
/**
* Returns the current Drag enabled Element
* @return Element
*/
public Element getDragElement() {
return this.eventElement;
}
/**
* Returns the current Drop enabled Element
* @return Element
*/
@Override
public Element getDropElement() {
return this.targetElement;
}
public void setGlobalUIScale(float widthPercent, float heightPercent) {
for (Element el : elements.values()) {
el.setPosition(el.getPosition().x*widthPercent, el.getPosition().y*heightPercent);
el.setDimensions(el.getDimensions().x*widthPercent, el.getDimensions().y*heightPercent);
el.setFontSize(el.getFontSize()*heightPercent);
el.setGlobalUIScale(widthPercent, heightPercent);
}
}
@Override
public Control cloneForSpatial(Spatial spatial) {
SubScreen subscreen = new SubScreen(this.screen, this.geom);
subscreen.elements.putAll(this.elements);
return subscreen;
}
@Override
public void setSpatial(Spatial spatial) {
this.spatial = spatial;
if (spatial != null) {
((Node)spatial).attachChild(subScreenNode);
}
}
@Override
public void write(JmeExporter ex) throws IOException { }
@Override
public void read(JmeImporter im) throws IOException { }
// Menu handling
private void handleMenuState() {
if (eventElement == null) {
for (Element el : elements.values()) {
if (el instanceof Menu) {
el.hide();
}
}
} else {
if (!(eventElement.getAbsoluteParent() instanceof Menu) && !(eventElement.getParent() instanceof ComboBox)) {
for (Element el : elements.values()) {
if (el instanceof Menu) {
el.hide();
}
}
} else if (eventElement.getAbsoluteParent() instanceof Menu) {
for (Element el : elements.values()) {
if (el instanceof Menu && el != eventElement.getAbsoluteParent()) {
el.hide();
}
}
} else if (eventElement.getParent() instanceof ComboBox) {
for (Element el : elements.values()) {
if (el instanceof Menu && el != ((ComboBox)eventElement.getParent()).getMenu()) {
el.hide();
}
}
}
}
}
@Override
public void handleAndroidMenuState(Element target) {
if (target == null) {
for (Element el : elements.values()) {
if (el instanceof Menu) {
el.hide();
}
}
} else {
if (!(target.getAbsoluteParent() instanceof Menu) && !(target.getParent() instanceof ComboBox)) {
for (Element el : elements.values()) {
if (el instanceof Menu) {
el.hide();
}
}
} else if (target.getAbsoluteParent() instanceof Menu) {
for (Element el : elements.values()) {
if (el instanceof Menu && el != target.getAbsoluteParent()) {
el.hide();
}
}
} else if (target.getParent() instanceof ComboBox) {
for (Element el : elements.values()) {
if (el instanceof Menu && el != ((ComboBox)target.getParent()).getMenu()) {
el.hide();
}
}
}
}
}
// Forms and tab focus
/**
* Method for setting the tab focus element
* @param element The Element to set tab focus to
*/
@Override
public void setTabFocusElement(Element element) {
resetFocusElement();
focusForm = element.getForm();
if (element.getResetKeyboardFocus()) {
if (focusForm != null) {
tabFocusElement = element;
focusForm.setSelectedTabIndex(element);
if (tabFocusElement instanceof TabFocusListener) {
((TabFocusListener)element).setTabFocus();
}
}
}
}
/**
* Resets the tab focus element to null after calling the TabFocusListener's
* resetTabFocus method.
*/
@Override
public void resetTabFocusElement() {
resetFocusElement();
this.tabFocusElement = null;
this.focusForm = null;
}
/**
* Send reset to the current Tab Focus Element
*/
private void resetFocusElement() {
if (tabFocusElement != null) {
if (tabFocusElement.getResetKeyboardFocus()) {
if (tabFocusElement instanceof TabFocusListener) {
((TabFocusListener)tabFocusElement).resetTabFocus();
}
}
}
}
/**
* Sets the current Keyboard focus Element
* @param element The Element to set keyboard focus to
*/
@Override
public void setKeyboardElement(Element element) {
if (element != null) {
if (element.getResetKeyboardFocus())
keyboardElement = element;
} else
keyboardElement = null;
}
@Override
public Style getStyle(String key) {
return screen.getStyle(key);
}
@Override
public void setClipboardText(String text) {
screen.setClipboardText(text);
}
@Override
public String getClipboardText() {
return screen.getClipboardText();
}
@Override
public boolean getUseTextureAtlas() {
return screen.getUseTextureAtlas();
}
@Override
public float getGlobalAlpha() {
return screen.getGlobalAlpha();
}
@Override
public EffectManager getEffectManager() {
return screen.getEffectManager();
}
@Override
public AnimManager getAnimManager() {
return screen.getAnimManager();
}
@Override
public boolean getUseUIAudio() {
return screen.getUseUIAudio();
}
@Override
public boolean getUseToolTips() {
return screen.getUseToolTips();
}
@Override
public void updateToolTipLocation() {
screen.updateToolTipLocation();
}
@Override
public Element getToolTipFocus() {
return screen.getToolTipFocus();
}
@Override
public void hideToolTip() {
screen.hideToolTip();
}
@Override
public void setCursor(CursorType cursorType) {
screen.setCursor(cursorType);
}
@Override
public void showVirtualKeyboard() {
screen.showVirtualKeyboard();
}
@Override
public void hideVirtualKeyboard() {
screen.hideVirtualKeyboard();
}
@Override
public void setGlobalAlpha(float alpha) {
screen.setGlobalAlpha(alpha);
}
@Override
public BitmapFont getDefaultGUIFont() {
return screen.getDefaultGUIFont();
}
@Override
public ScaleUtil getScaleManager() {
return screen.getScaleManager();
}
@Override
public float scaleFloat(float in) {
return screen.scaleFloat(in);
};
@Override
public Vector2f scaleVector2f(Vector2f in) {
return screen.scaleVector2f(in);
};
@Override
public Vector3f scaleVector3f(Vector3f in) {
return screen.scaleVector3f(in);
};
@Override
public Vector4f scaleVector4f(Vector4f in) {
return screen.scaleVector4f(in);
};
@Override
public float scaleFontSize(float in) {
return screen.scaleFontSize(in);
};
@Override
public void setUseUIAudio(boolean use) {
screen.setUseUIAudio(use);
}
@Override
public void setUIAudioVolume(float volume) {
screen.setUIAudioVolume(volume);
}
@Override
public void setUseToolTips(boolean use) {
screen.setUseToolTips(use);
}
@Override
public void setUseCustomCursors(boolean use) {
screen.setUseCustomCursors(use);
}
@Override
public boolean getUseCustomCursors() {
return screen.getUseCustomCursors();
}
@Override
public void setUseCursorEffects(boolean use) {
screen.setUseCursorEffects(use);
}
@Override
public CursorEffects getCursorEffects() {
return screen.getCursorEffects();
}
@Override
public ModalBackground getModalBackground() {
return screen.getModalBackground();
}
@Override
public void showAsModal(Element el, boolean showWithEffect) {
screen.showAsModal(el, showWithEffect);
}
@Override
public void hideModalBackground() {
screen.hideModalBackground();
}
//<editor-fold dewsc="2D Framework">
public AnimLayer addAnimLayer() {
return addAnimLayer(UIDUtil.getUID());
}
@Override
public AnimLayer addAnimLayer(String UID) {
if (getAnimLayerById(UID) != null) {
try {
throw new ConflictingIDException();
} catch (ConflictingIDException ex) {
Logger.getLogger(Element.class.getName()).log(Level.SEVERE, "The child layer '" + UID + "' (Element) conflicts with a previously added child layer in parent Screen.", ex);
System.exit(0);
}
return null;
} else {
AnimLayer layer = new AnimLayer(
this,
UID
);
layer.initZOrder(layerZOrderCurrent);
layerZOrderCurrent += this.getZOrderStepMajor();
layers.put(UID, layer);
if (!layer.getInitialized()) {
layer.orgPosition = layer.getPosition().clone();
layer.setInitialized();
}
subScreenNode.attachChild(layer);
subScreenNode.addControl(layer);
return layer;
}
}
@Override
public void addAnimLayer(String UID, AnimLayer layer) {
if (getAnimLayerById(UID) != null) {
try {
throw new ConflictingIDException();
} catch (ConflictingIDException ex) {
Logger.getLogger(Element.class.getName()).log(Level.SEVERE, "The child layer '" + UID + "' (Element) conflicts with a previously added child layer in parent Screen.", ex);
System.exit(0);
}
} else {
layer.initZOrder(layerZOrderCurrent);
layerZOrderCurrent += this.getZOrderStepMajor();
layers.put(UID, layer);
if (!layer.getInitialized()) {
layer.orgPosition = layer.getPosition().clone();
layer.setInitialized();
}
subScreenNode.attachChild(layer);
subScreenNode.addControl(layer);
}
}
@Override
public AnimLayer removeAnimLayer(String UID) {
AnimLayer animLayer = layers.get(UID);
if (animLayer != null) {
removeAnimLayer(animLayer);
return animLayer;
} else
return null;
}
@Override
public void removeAnimLayer(AnimLayer animLayer) {
if (layers.containsValue(animLayer)) {
subScreenNode.removeControl(animLayer);
layers.remove(animLayer.getUID());
float shiftZ = animLayer.getLocalTranslation().getZ();
for (AnimLayer el : layers.values()) {
if (el.getLocalTranslation().getZ() > shiftZ) {
el.move(0,0,-zOrderStepMajor);
}
}
layerZOrderCurrent -= zOrderStepMajor;
animLayer.removeFromParent();
animLayer.cleanup();
}
}
public AnimLayer getAnimLayerById(String UID) {
AnimLayer ret = null;
if (layers.containsKey(UID)) {
ret = layers.get(UID);
} else {
for (AnimLayer el : layers.values()) {
ret = (AnimLayer)el.getChildElementById(UID);
if (ret != null) {
break;
}
}
}
return ret;
}
private void setAnimElementZOrder() {
if (eventAnimElement != null) {
if (eventAnimElement.getZOrderEffect() == AnimElement.ZOrderEffect.Self ||
eventAnimElement.getZOrderEffect() == AnimElement.ZOrderEffect.Both)
if (eventAnimElement.getParentLayer() != null)
eventAnimElement.getParentLayer().bringAnimElementToFront(eventAnimElement);
if (eventAnimElement.getZOrderEffect() == AnimElement.ZOrderEffect.Child ||
eventAnimElement.getZOrderEffect() == AnimElement.ZOrderEffect.Both)
eventAnimElement.bringQuadToFront(eventQuad);
}
}
//</editor-fold>
}
| meltzow/tonegodgui | src/tonegod/gui/core/SubScreen.java | Java | bsd-2-clause | 47,888 |
<form name="dataSourceForm">
<div class="form-group">
<label for="dataSourceName">Name</label>
<input type="string" class="form-control" name="dataSourceName" ng-model="target.name" required>
</div>
<div class="form-group">
<label for="type">Type</label>
<select name="type" class="form-control" ng-options="type.type as type.name for type in types" ng-model="target.type"></select>
</div>
<div class="form-group" ng-class='{"has-error": !inner.input.$valid}' ng-form="inner" ng-repeat="(name, input) in type.configuration_schema.properties">
<label ng-if="input.type !== 'checkbox'">{{input.title || name | capitalize}}</label>
<input name="input" type="{{input.type}}" class="form-control" ng-model="target.options[name]" ng-required="input.required"
ng-if="input.type !== 'file' && input.type !== 'checkbox'" accesskey="tab" placeholder="{{input.default}}">
<label ng-if="input.type=='checkbox'">
<input name="input" type="{{input.type}}" ng-model="target.options[name]" ng-required="input.required"
ng-if="input.type !== 'file'" accesskey="tab" placeholder="{{input.default}}">
{{input.title || name | capitalize}}
</label>
<input name="input" type="file" class="form-control" ng-model="files[name]" ng-required="input.required && !target.options[name]"
base-sixty-four-input
ng-if="input.type === 'file'">
</div>
<button class="btn btn-primary" ng-disabled="!dataSourceForm.$valid" ng-click="saveChanges()">Save</button>
<span ng-repeat="action in actions">
<button class="btn"
ng-class="action.class"
ng-if="target.id"
ng-disabled="(action.disableWhenDirty && dataSourceForm.$dirty) || inProgressActions[action.name]"
ng-click="action.callback()" ng-bind-html="action.name"></button>
</span>
<span ng-transclude>
</span>
</form>
| easytaxibr/redash | rd_ui/app/views/directives/dynamic_form.html | HTML | bsd-2-clause | 2,012 |
package org.jvnet.jaxb2_commons.xjc.outline.concrete;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.commons.lang3.Validate;
import org.jvnet.jaxb2_commons.xjc.outline.MEnumConstantOutline;
import org.jvnet.jaxb2_commons.xjc.outline.MEnumOutline;
import org.jvnet.jaxb2_commons.xjc.outline.MModelOutline;
import org.jvnet.jaxb2_commons.xjc.outline.MPackageOutline;
import org.jvnet.jaxb2_commons.xml.bind.model.MEnumLeafInfo;
import com.sun.codemodel.JDefinedClass;
import com.sun.tools.xjc.model.nav.NClass;
import com.sun.tools.xjc.model.nav.NType;
public class CMEnumOutline implements MEnumOutline {
private final MModelOutline parent;
private final MPackageOutline packageOutline;
private final MEnumLeafInfo<NType, NClass> target;
private final JDefinedClass code;
private final List<MEnumConstantOutline> enumConstantOutlines = new ArrayList<MEnumConstantOutline>();
private final List<MEnumConstantOutline> _enumConstantOutlines = Collections
.unmodifiableList(enumConstantOutlines);
public CMEnumOutline(MModelOutline parent, MPackageOutline packageOutline,
MEnumLeafInfo<NType, NClass> target, JDefinedClass code) {
Validate.notNull(parent);
Validate.notNull(packageOutline);
Validate.notNull(target);
Validate.notNull(code);
this.parent = parent;
this.packageOutline = packageOutline;
this.target = target;
this.code = code;
}
public MModelOutline getParent() {
return parent;
}
public MPackageOutline getPackageOutline() {
return packageOutline;
}
public MEnumLeafInfo<NType, NClass> getTarget() {
return target;
}
public JDefinedClass getCode() {
return code;
}
public List<MEnumConstantOutline> getEnumConstantOutlines() {
return _enumConstantOutlines;
}
public void addEnumConstantOutline(MEnumConstantOutline enumConstantOutline) {
Validate.notNull(enumConstantOutline);
Validate.isTrue(enumConstantOutline.getEnumOutline() == this);
this.enumConstantOutlines.add(enumConstantOutline);
}
}
| Stephan202/jaxb2-basics | tools/src/main/java/org/jvnet/jaxb2_commons/xjc/outline/concrete/CMEnumOutline.java | Java | bsd-2-clause | 2,030 |
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef COMPONENTS_ARC_BLUETOOTH_BLUETOOTH_STRUCT_TRAITS_H_
#define COMPONENTS_ARC_BLUETOOTH_BLUETOOTH_STRUCT_TRAITS_H_
#include "components/arc/common/bluetooth.mojom.h"
#include "device/bluetooth/bluetooth_advertisement.h"
#include "device/bluetooth/bluetooth_common.h"
#include "device/bluetooth/bluetooth_uuid.h"
#include "device/bluetooth/bluez/bluetooth_service_attribute_value_bluez.h"
namespace mojo {
template <>
struct EnumTraits<arc::mojom::BluetoothDeviceType,
device::BluetoothTransport> {
static arc::mojom::BluetoothDeviceType ToMojom(
device::BluetoothTransport type) {
switch (type) {
case device::BLUETOOTH_TRANSPORT_CLASSIC:
return arc::mojom::BluetoothDeviceType::BREDR;
case device::BLUETOOTH_TRANSPORT_LE:
return arc::mojom::BluetoothDeviceType::BLE;
case device::BLUETOOTH_TRANSPORT_DUAL:
return arc::mojom::BluetoothDeviceType::DUAL;
default:
NOTREACHED() << "Invalid type: " << static_cast<uint8_t>(type);
// XXX: is there a better value to return here?
return arc::mojom::BluetoothDeviceType::DUAL;
}
}
static bool FromMojom(arc::mojom::BluetoothDeviceType mojom_type,
device::BluetoothTransport* type) {
switch (mojom_type) {
case arc::mojom::BluetoothDeviceType::BREDR:
*type = device::BLUETOOTH_TRANSPORT_CLASSIC;
break;
case arc::mojom::BluetoothDeviceType::BLE:
*type = device::BLUETOOTH_TRANSPORT_LE;
break;
case arc::mojom::BluetoothDeviceType::DUAL:
*type = device::BLUETOOTH_TRANSPORT_DUAL;
break;
default:
NOTREACHED() << "Invalid type: " << static_cast<uint32_t>(mojom_type);
return false;
}
return true;
}
};
template <>
struct EnumTraits<arc::mojom::BluetoothSdpAttributeType,
bluez::BluetoothServiceAttributeValueBlueZ::Type> {
static arc::mojom::BluetoothSdpAttributeType ToMojom(
bluez::BluetoothServiceAttributeValueBlueZ::Type input) {
switch (input) {
case bluez::BluetoothServiceAttributeValueBlueZ::NULLTYPE:
case bluez::BluetoothServiceAttributeValueBlueZ::UINT:
case bluez::BluetoothServiceAttributeValueBlueZ::INT:
case bluez::BluetoothServiceAttributeValueBlueZ::UUID:
case bluez::BluetoothServiceAttributeValueBlueZ::STRING:
case bluez::BluetoothServiceAttributeValueBlueZ::BOOL:
case bluez::BluetoothServiceAttributeValueBlueZ::SEQUENCE:
case bluez::BluetoothServiceAttributeValueBlueZ::URL:
return static_cast<arc::mojom::BluetoothSdpAttributeType>(input);
default:
NOTREACHED() << "Invalid type: " << static_cast<uint32_t>(input);
return arc::mojom::BluetoothSdpAttributeType::NULLTYPE;
}
}
static bool FromMojom(
arc::mojom::BluetoothSdpAttributeType input,
bluez::BluetoothServiceAttributeValueBlueZ::Type* output) {
switch (input) {
case arc::mojom::BluetoothSdpAttributeType::NULLTYPE:
case arc::mojom::BluetoothSdpAttributeType::UINT:
case arc::mojom::BluetoothSdpAttributeType::INT:
case arc::mojom::BluetoothSdpAttributeType::UUID:
case arc::mojom::BluetoothSdpAttributeType::STRING:
case arc::mojom::BluetoothSdpAttributeType::BOOL:
case arc::mojom::BluetoothSdpAttributeType::SEQUENCE:
case arc::mojom::BluetoothSdpAttributeType::URL:
*output = static_cast<bluez::BluetoothServiceAttributeValueBlueZ::Type>(
input);
return true;
default:
NOTREACHED() << "Invalid type: " << static_cast<uint32_t>(input);
return false;
}
}
};
template <>
struct StructTraits<arc::mojom::BluetoothUUIDDataView, device::BluetoothUUID> {
static std::vector<uint8_t> uuid(const device::BluetoothUUID& input);
static bool Read(arc::mojom::BluetoothUUIDDataView data,
device::BluetoothUUID* output);
};
template <>
struct StructTraits<arc::mojom::BluetoothAdvertisementDataView,
std::unique_ptr<device::BluetoothAdvertisement::Data>> {
static bool Read(
arc::mojom::BluetoothAdvertisementDataView advertisement,
std::unique_ptr<device::BluetoothAdvertisement::Data>* output);
// Dummy methods.
static arc::mojom::BluetoothAdvertisementType type(
std::unique_ptr<device::BluetoothAdvertisement::Data>& input) {
NOTREACHED();
return arc::mojom::BluetoothAdvertisementType::ADV_TYPE_NON_CONNECTABLE;
}
static bool include_tx_power(
std::unique_ptr<device::BluetoothAdvertisement::Data>& input) {
NOTREACHED();
return false;
}
static mojo::Array<arc::mojom::BluetoothAdvertisingDataPtr> data(
std::unique_ptr<device::BluetoothAdvertisement::Data>& input) {
NOTREACHED();
return mojo::Array<arc::mojom::BluetoothAdvertisingDataPtr>();
}
};
} // namespace mojo
#endif // COMPONENTS_ARC_BLUETOOTH_BLUETOOTH_STRUCT_TRAITS_H_
| ssaroha/node-webrtc | third_party/webrtc/include/chromium/src/components/arc/bluetooth/bluetooth_struct_traits.h | C | bsd-2-clause | 5,109 |
/*
Copyright (c) 2012, Tuomo Eljas Kaikkonen
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those
of the authors and should not be interpreted as representing official policies,
either expressed or implied, of the FreeBSD Project.
*/
#ifndef _MY_SERIAL_H
#define _MY_SERIAL_H
#include <Arduino.h>
#include "Pic32JTAGDevice.h"
uint16_t GlobalCheckSum = 0;
char RXChar(void)
{
// Wait for inc character
while(!Serial.available())
{
asm(" nop");
}
return Serial.read();
}
unsigned char Ascii2Hex(unsigned char a)
{
if (a >= '0' && a <='9')
{
return a-'0';
}
else if (a >= 'A' && a <='F')
{
return 10+(a-'A');
}
else if (a >= 'a' && a <='f')
{
return 10+(a-'a');
}
else
{
return 0;
}
}
unsigned char RXAsciiByte(void)
{
unsigned char b1,b2;
b1 = RXChar();
b2 = RXChar();
b1 = Ascii2Hex(b1);
b2 = Ascii2Hex(b2);
b1 = ((b1<<4) + b2);
return b1;
}
uint16_t RXAsciiWord(void)
{
unsigned char b1,b2;
b1 = RXAsciiByte();
b2 = RXAsciiByte();
return ((((uint16_t)b1)<<8) + b2);
}
void ConsumeRestOfFile( void )
{
uint16_t phase = 0;
Serial.println (F("\nPlease wait, throwing away the rest of the file."));
do
{
while ( Serial.available() > 0 )
Serial.read();
Serial.print(F("\x1b[1;0H")); // goto row 1, column 0
switch ((phase++)&0x3)
{
case 0: Serial.print(F("-")); break;
case 1: Serial.print(F("\\")); break;
case 2: Serial.print(F("|")); break;
case 3: Serial.print(F("/")); break;
}
delay(1000);
} while( Serial.available() > 0 );
Serial.println();
Serial.println();
Serial.println();
}
void HexPgm( Pic32JTAGDevice pic32, bool program, bool verify )
{
uint32_t flashAddr;
uint8_t data[128];
uint16_t line = 0;
uint16_t startCode = 0x0a;
uint16_t byteCount;
uint32_t address;
uint32_t addressHi = 0;
uint16_t recordType = 0;
uint16_t i;
uint16_t Status;
uint16_t phase = 0;
Serial.println (F("Send your .hex -file now."));
while (recordType != 01) // end
{
++line;
do
{
startCode = RXChar();
GlobalCheckSum = 0;
if ( line == 1 )
{
// ANSI clear screen
Serial.print(F("\x1b[2J\x1b[0;0H"));
}
} while (startCode <= ' ');
if (startCode != ':')
{
Serial.print(F("Start code fail!"));
// error
ConsumeRestOfFile();
return;
}
Serial.print(F("\x1b[1;0H")); // goto row 1, column 0
switch ((phase++)&0x3)
{
case 0: Serial.print(F("-")); break;
case 1: Serial.print(F("\\")); break;
case 2: Serial.print(F("|")); break;
case 3: Serial.print(F("/")); break;
}
byteCount = (uint16_t)RXAsciiByte();
address = (uint32_t)RXAsciiWord();
recordType = (uint16_t)RXAsciiByte();
switch (recordType)
{
case 0:
for (i = 0; i < byteCount; ++i)
{
data[i] = RXAsciiByte();
}
RXAsciiByte(); /* checksum */
if ((GlobalCheckSum&0x00FF) == 0)
{
flashAddr = (addressHi<<16) + (address);
if ( program )
{
uint32_t *curData;
uint32_t byte = 0;
uint32_t pgmAddr = flashAddr;
curData = (uint32_t*)(data);
while ( byte < byteCount )
{
//Serial.print (F("Flashing 0x"));Serial.print(pgmAddr, HEX);
//Serial.print (F(" = 0x"));Serial.println( (*curData), HEX);
pic32.DownloadData( 0, (*curData) );
pic32.FlashOperation( NVMOP_WRITE_WORD, pgmAddr, 0 );
byte += 4;
pgmAddr += 4;
++curData;
}
}
if ( verify )
{
uint32_t *curData;
int16_t byte = 0;
curData = (uint32_t*)(data);
while ( byte < byteCount )
{
//Serial.print (F("Verifying 0x"));Serial.print(flashAddr, HEX);
uint32_t fdata = pic32.ReadFlashData( flashAddr );
if ( (*curData) != fdata )
{
Serial.print (F("Verify failed at 0x"));
Serial.println ( flashAddr, HEX );
Serial.print ( F(" 0x"));
Serial.print ( fdata, HEX );
Serial.print ( F(" <> 0x") );
Serial.println ( (*curData), HEX );
ConsumeRestOfFile();
return;
}
byte += 4;
flashAddr += 4;
++curData;
}
}
}
break;
case 1:
//end
RXAsciiByte(); /*checksum */
break;
case 4:
addressHi = RXAsciiWord();
RXAsciiByte(); /* checksum */
break;
default:
Serial.println(F("HEXfile error!"));
ConsumeRestOfFile();
return;
}
if ((GlobalCheckSum&0x00FF) != 00)
{
// checksum error
Serial.print(F("Chksum fail line "));
Serial.println(line);
ConsumeRestOfFile();
return;
}
}
Serial.println(F(""));
Serial.println(F("Done!"));
}
#endif
| RickGlimmer/ardupic32 | MySerial.h | C | bsd-2-clause | 7,744 |
#include <gtest/gtest.h>
#include <cpuinfo.h>
#include <cpuinfo-mock.h>
TEST(PROCESSORS, count) {
ASSERT_EQ(2, cpuinfo_processors_count);
}
TEST(PROCESSORS, non_null) {
ASSERT_TRUE(cpuinfo_get_processors());
}
TEST(PROCESSORS, vendor) {
for (uint32_t i = 0; i < cpuinfo_processors_count; i++) {
ASSERT_EQ(cpuinfo_vendor_cavium, cpuinfo_get_processors()[i].vendor);
}
}
TEST(PROCESSORS, uarch) {
for (uint32_t i = 0; i < cpuinfo_processors_count; i++) {
ASSERT_EQ(cpuinfo_uarch_thunderx, cpuinfo_get_processors()[i].uarch);
}
}
TEST(ISA, thumb) {
ASSERT_TRUE(cpuinfo_isa.thumb);
}
TEST(ISA, thumb2) {
ASSERT_TRUE(cpuinfo_isa.thumb2);
}
TEST(ISA, thumbee) {
ASSERT_FALSE(cpuinfo_isa.thumbee);
}
TEST(ISA, jazelle) {
ASSERT_FALSE(cpuinfo_isa.jazelle);
}
TEST(ISA, armv5e) {
ASSERT_TRUE(cpuinfo_isa.armv5e);
}
TEST(ISA, armv6) {
ASSERT_TRUE(cpuinfo_isa.armv6);
}
TEST(ISA, armv6k) {
ASSERT_TRUE(cpuinfo_isa.armv6k);
}
TEST(ISA, armv7) {
ASSERT_TRUE(cpuinfo_isa.armv7);
}
TEST(ISA, armv7mp) {
ASSERT_TRUE(cpuinfo_isa.armv7mp);
}
TEST(ISA, idiv) {
ASSERT_TRUE(cpuinfo_isa.idiv);
}
TEST(ISA, vfpv2) {
ASSERT_FALSE(cpuinfo_isa.vfpv2);
}
TEST(ISA, vfpv3) {
ASSERT_TRUE(cpuinfo_isa.vfpv3);
}
TEST(ISA, d32) {
ASSERT_TRUE(cpuinfo_isa.d32);
}
TEST(ISA, fp16) {
ASSERT_TRUE(cpuinfo_isa.fp16);
}
TEST(ISA, fma) {
ASSERT_TRUE(cpuinfo_isa.fma);
}
TEST(ISA, wmmx) {
ASSERT_FALSE(cpuinfo_isa.wmmx);
}
TEST(ISA, wmmx2) {
ASSERT_FALSE(cpuinfo_isa.wmmx2);
}
TEST(ISA, neon) {
ASSERT_TRUE(cpuinfo_isa.neon);
}
TEST(ISA, aes) {
ASSERT_TRUE(cpuinfo_isa.aes);
}
TEST(ISA, sha1) {
ASSERT_TRUE(cpuinfo_isa.sha1);
}
TEST(ISA, sha2) {
ASSERT_TRUE(cpuinfo_isa.sha2);
}
TEST(ISA, pmull) {
ASSERT_TRUE(cpuinfo_isa.pmull);
}
TEST(ISA, crc32) {
ASSERT_TRUE(cpuinfo_isa.crc32);
}
#if CPUINFO_ARCH_ARM64
TEST(ISA, atomics) {
ASSERT_TRUE(cpuinfo_isa.atomics);
}
TEST(ISA, rdm) {
ASSERT_FALSE(cpuinfo_isa.rdm);
}
TEST(ISA, fp16arith) {
ASSERT_FALSE(cpuinfo_isa.fp16arith);
}
TEST(ISA, jscvt) {
ASSERT_FALSE(cpuinfo_isa.jscvt);
}
TEST(ISA, fcma) {
ASSERT_FALSE(cpuinfo_isa.fcma);
}
#endif /* CPUINFO_ARCH_ARM64 */
TEST(L1I, count) {
ASSERT_EQ(2, cpuinfo_get_l1i_caches_count());
}
TEST(L1I, non_null) {
ASSERT_TRUE(cpuinfo_get_l1i_caches());
}
TEST(L1I, size) {
for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) {
ASSERT_EQ(78 * 1024, cpuinfo_get_l1i_cache(i)->size);
}
}
TEST(L1I, associativity) {
for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) {
ASSERT_EQ(4, cpuinfo_get_l1i_cache(i)->associativity);
}
}
TEST(L1I, sets) {
for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) {
ASSERT_EQ(312, cpuinfo_get_l1i_cache(i)->sets);
}
}
TEST(L1I, partitions) {
for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) {
ASSERT_EQ(1, cpuinfo_get_l1i_cache(i)->partitions);
}
}
TEST(L1I, line_size) {
for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) {
ASSERT_EQ(64, cpuinfo_get_l1i_cache(i)->line_size);
}
}
TEST(L1I, flags) {
for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) {
ASSERT_EQ(0, cpuinfo_get_l1i_cache(i)->flags);
}
}
TEST(L1I, processors) {
for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) {
ASSERT_EQ(i, cpuinfo_get_l1i_cache(i)->processor_start);
ASSERT_EQ(1, cpuinfo_get_l1i_cache(i)->processor_count);
}
}
TEST(L1D, count) {
ASSERT_EQ(2, cpuinfo_get_l1d_caches_count());
}
TEST(L1D, non_null) {
ASSERT_TRUE(cpuinfo_get_l1d_caches());
}
TEST(L1D, size) {
for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) {
ASSERT_EQ(32 * 1024, cpuinfo_get_l1d_cache(i)->size);
}
}
TEST(L1D, associativity) {
for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) {
ASSERT_EQ(4, cpuinfo_get_l1d_cache(i)->associativity);
}
}
TEST(L1D, sets) {
for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) {
ASSERT_EQ(128, cpuinfo_get_l1d_cache(i)->sets);
}
}
TEST(L1D, partitions) {
for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) {
ASSERT_EQ(1, cpuinfo_get_l1d_cache(i)->partitions);
}
}
TEST(L1D, line_size) {
for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) {
ASSERT_EQ(64, cpuinfo_get_l1d_cache(i)->line_size);
}
}
TEST(L1D, flags) {
for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) {
ASSERT_EQ(0, cpuinfo_get_l1d_cache(i)->flags);
}
}
TEST(L1D, processors) {
for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) {
ASSERT_EQ(i, cpuinfo_get_l1d_cache(i)->processor_start);
ASSERT_EQ(1, cpuinfo_get_l1d_cache(i)->processor_count);
}
}
TEST(L2, count) {
ASSERT_EQ(1, cpuinfo_get_l2_caches_count());
}
TEST(L2, non_null) {
ASSERT_TRUE(cpuinfo_get_l2_caches());
}
TEST(L2, size) {
for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) {
ASSERT_EQ(16 * 1024 * 1024, cpuinfo_get_l2_cache(i)->size);
}
}
TEST(L2, associativity) {
for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) {
ASSERT_EQ(8, cpuinfo_get_l2_cache(i)->associativity);
}
}
TEST(L2, sets) {
for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) {
ASSERT_EQ(32768, cpuinfo_get_l2_cache(i)->sets);
}
}
TEST(L2, partitions) {
for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) {
ASSERT_EQ(1, cpuinfo_get_l2_cache(i)->partitions);
}
}
TEST(L2, line_size) {
for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) {
ASSERT_EQ(64, cpuinfo_get_l2_cache(i)->line_size);
}
}
TEST(L2, flags) {
for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) {
ASSERT_EQ(0, cpuinfo_get_l2_cache(i)->flags);
}
}
TEST(L2, processors) {
for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) {
ASSERT_EQ(0, cpuinfo_get_l2_cache(i)->processor_start);
ASSERT_EQ(2, cpuinfo_get_l2_cache(i)->processor_count);
}
}
TEST(L3, none) {
ASSERT_EQ(0, cpuinfo_get_l3_caches_count());
ASSERT_FALSE(cpuinfo_get_l3_caches());
}
TEST(L4, none) {
ASSERT_EQ(0, cpuinfo_get_l4_caches_count());
ASSERT_FALSE(cpuinfo_get_l4_caches());
}
#include <scaleway.h>
int main(int argc, char* argv[]) {
cpuinfo_mock_filesystem(filesystem);
cpuinfo_initialize();
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
} | Maratyszcza/cpuinfo | test/mock/scaleway.cc | C++ | bsd-2-clause | 6,195 |
'''
Datastore via remote webdav connection
'''
from __future__ import unicode_literals
from future import standard_library
standard_library.install_aliases()
import os
import tarfile
import logging
from fs.contrib.davfs import DAVFS
from urllib.parse import urlparse
from contextlib import closing
from sumatra.core import component
from .archivingfs import ArchivingFileSystemDataStore, ArchivedDataFile, TIMESTAMP_FORMAT
class DavFsDataItem(ArchivedDataFile):
"""Base class for data item classes, that may represent files or database records."""
def __init__(self, path, store):
# needs to be first cause _get_info is called in Base __init__
self.store = store
super(DavFsDataItem, self).__init__(path, store)
def get_content(self, max_length=None):
obj = self.store.dav_fs.open(self.tarfile_path, 'rb')
with closing(tarfile.open(fileobj=obj)) as data_archive:
f = data_archive.extractfile(self.path)
if max_length:
content = f.read(max_length)
else:
content = f.read()
f.close()
return content
# mandatory repeat
content = property(fget=get_content)
def _get_info(self):
obj = self.store.dav_fs.open(self.tarfile_path, 'rb')
with closing(tarfile.open(fileobj=obj)) as data_archive:
return data_archive.getmember(self.path)
return tarfile.TarInfo()
@component
class DavFsDataStore(ArchivingFileSystemDataStore):
"""ArchivingFileSystemDataStore that archives to webdav storage"""
data_item_class = DavFsDataItem
def __init__(self, root, dav_url, dav_user=None, dav_pw=None):
super(DavFsDataStore, self).__init__(root)
parsed = urlparse(dav_url)
self.dav_user = dav_user or parsed.username
self.dav_pw = dav_pw or parsed.password
self.dav_url = parsed.geturl()
self.dav_fs = DAVFS(url=self.dav_url, credentials={'username': self.dav_user, 'password': self.dav_pw})
def __getstate__(self):
return {'root': self.root, 'dav_url': self.dav_url, 'dav_user': self.dav_user, 'dav_pw': self.dav_pw}
def find_new_data(self, timestamp):
"""Finds newly created/changed data items"""
new_files = self._find_new_data_files(timestamp)
label = timestamp.strftime(TIMESTAMP_FORMAT)
archive_paths = self._archive(label, new_files)
return [DavFsDataItem(path, self).generate_key()
for path in archive_paths]
def _archive(self, label, files, delete_originals=True):
"""
Archives files and, by default, deletes the originals.
"""
fs = self.dav_fs
if not fs.isdir(self.archive_store):
fs.makedir(self.archive_store, recursive=True)
tf_obj = fs.open(os.path.join(self.archive_store, label + ".tar.gz"), mode='wb')
with tarfile.open(fileobj=tf_obj, mode='w:gz') as tf:
logging.info("Archiving data to file %s" % tf.name)
# Add data files
archive_paths = []
for file_path in files:
archive_path = os.path.join(label, file_path)
tf.add(os.path.join(self.root, file_path), archive_path)
archive_paths.append(archive_path)
tf.close()
tf_obj.close()
# Delete original files.
if delete_originals:
for file_path in files:
os.remove(os.path.join(self.root, file_path))
self._last_label = label # useful for testing
return archive_paths
| open-research/sumatra | sumatra/datastore/davfs.py | Python | bsd-2-clause | 3,593 |
class Atlantis < Formula
desc "Terraform Pull Request Automation tool"
homepage "https://www.runatlantis.io/"
url "https://github.com/runatlantis/atlantis/archive/v0.12.0.tar.gz"
sha256 "df7c17fef4b37347fc22e1c5bf3d2b29ad1b219e70b6bff111e38d8ee01102e4"
bottle do
cellar :any_skip_relocation
sha256 "5c71acb4eaaa764b34c41449ea2ad0d8a4078daee94d96849d26316e01dba671" => :catalina
sha256 "f0f4f37b72be4d8c0c66ed95cfb402f1c8693a11fcf9e470f88b0b2576b585a7" => :mojave
sha256 "bee44196471c4ed1064b90daffbad24cbcb588bcada6013625e65fa7912005b1" => :high_sierra
end
depends_on "go" => :build
depends_on "terraform"
def install
system "go", "build", "-ldflags", "-s -w", "-trimpath", "-o", bin/"atlantis"
end
test do
system bin/"atlantis", "version"
port = 4141
loglevel = "info"
gh_args = "--gh-user INVALID --gh-token INVALID --gh-webhook-secret INVALID --repo-whitelist INVALID"
command = bin/"atlantis server --atlantis-url http://invalid/ --port #{port} #{gh_args} --log-level #{loglevel}"
pid = Process.spawn(command)
system "sleep", "5"
output = `curl -vk# 'http://localhost:#{port}/' 2>&1`
assert_match %r{HTTP\/1.1 200 OK}m, output
assert_match "atlantis", output
Process.kill("TERM", pid)
end
end
| edporras/homebrew-core | Formula/atlantis.rb | Ruby | bsd-2-clause | 1,289 |
class DSF_SIC_Map(object):
"""docstring for SIC_Map"""
def __init__(self, dsffile = 'crsp/dsf.csv', sicfile = 'sic_codes.txt'):
self.dsf = pd.read_csv("dsf.csv", dtype = {'CUSIP': np.str, 'PRC': np.float}, na_values = {'PRC': '-'})
self.sic = pd.read_table(sicfile, header = 1)
self.sic.columns = ['HSICCD', 'SICNAME']
def process(self, day = 20100101, columns = ['PERMNO', 'DATE', 'PRC', 'VOL', 'SHROUT', 'RET', 'HSICCD']):
self.dsf_startdate(date = day)
self.dsf_subset(to_keep = columns)
self.sic_merge()
def dsf_startdate(self, date = 20100101):
self.dsf = self.dsf[self.dsf.DATE >= date]
def dsf_subset(self, to_keep = ['PERMNO', 'DATE', 'PRC', 'VOL', 'SHROUT', 'RET', 'HSICCD']):
self.dsf = self.dsf[to_keep]
def sic_merge(self):
self.clean_dsf = self.dsf.merge(self.sic, how = "left") | dlab-projects/python-taq | marketflow/dsf_with_sic.py | Python | bsd-2-clause | 887 |
class Clojure14 < Formula
homepage "http://clojure.org/"
url "http://repo1.maven.org/maven2/org/clojure/clojure/1.4.0/clojure-1.4.0.zip"
sha256 "27a5a151d5cc1bc3e52dff47c66111e637fefeb42d9bedfa1284a1a31d080171"
bottle :unneeded
def script; <<-EOS.undent
#!/bin/sh
# Clojure wrapper script.
# With no arguments runs Clojure's REPL.
# Put the Clojure jar from the cellar and the current folder in the classpath.
CLOJURE=$CLASSPATH:#{prefix}/#{jar}:${PWD}
if [ "$#" -eq 0 ]; then
java -cp "$CLOJURE" clojure.main --repl
else
java -cp "$CLOJURE" clojure.main "$@"
fi
EOS
end
def jar
"clojure-#{version}.jar"
end
def install
prefix.install jar
(prefix+jar).chmod(0644) # otherwise it's 0600
(prefix+"classes").mkpath
(bin+"clj").write script
end
def caveats; <<-EOS.undent
If you `brew install repl` then you may find this wrapper script from
MacPorts useful:
http://trac.macports.org/browser/trunk/dports/lang/clojure/files/clj-rlwrap.sh?format=txt
EOS
end
test do
system "#{bin}/clj", "-e", '(println "Hello World")'
end
end
| UniqMartin/homebrew-versions | clojure14.rb | Ruby | bsd-2-clause | 1,152 |
/*
Copyright (c) 2014, Alexey Frunze
2-clause BSD license.
*/
#include "istdio.h"
int fscanf(FILE* f, char* fmt, ...)
{
return __doscan(f, fmt, (char*)&fmt + sizeof(char*));
}
| gdos/SmallerC | v0100/srclib/fscanf.c | C | bsd-2-clause | 183 |
cask 'epubmdimporter' do
version '1.8'
sha256 '740c288a6ad2c98d5c94cf6eccba3ef535faaeda5ad408a897f84d4b324d16e0'
url "https://github.com/jaketmp/ePub-quicklook/releases/download/v#{version}/epub.mdimporter.zip"
appcast 'https://github.com/jaketmp/ePub-quicklook/releases.atom',
checkpoint: '4ce0d3ba1834210a5eabc0b3fc1424c0f8b7a304f8db73beb6c2ffd11fe78756'
name 'EPUB Spotlight'
homepage 'https://github.com/jaketmp/ePub-quicklook'
artifact 'epub.mdimporter', target: "#{ENV['HOME']}/Library/Spotlight/AA_epub.mdimporter"
postflight do
system_command '/usr/bin/mdimport', args: ['-r', "#{ENV['HOME']}/Library/Spotlight/AA_epub.mdimporter"]
end
end
| malford/homebrew-cask | Casks/epubmdimporter.rb | Ruby | bsd-2-clause | 683 |
// Copyright (c) 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef PDFIUM_THIRD_PARTY_BASE_LOGGING_H_
#define PDFIUM_THIRD_PARTY_BASE_LOGGING_H_
#include <stdlib.h>
#define CHECK(condition) \
if (!(condition)) { \
abort(); \
*(reinterpret_cast<volatile char*>(NULL) + 42) = 0x42; \
}
#define NOTREACHED() abort()
#endif // PDFIUM_THIRD_PARTY_BASE_LOGGING_H_
| tojocky/node-pdfium | third_party/pdfium/third_party/base/logging.h | C | bsd-2-clause | 655 |
cask "typora" do
version "1.0.4"
sha256 "a59b7a653a55eb750b0da50245f866c60c0faa030ec3cadef93d6ae165a697ea"
url "https://download.typora.io/mac/Typora-#{version}.dmg"
name "Typora"
desc "Configurable document editor that supports Markdown"
homepage "https://typora.io/"
livecheck do
url "https://www.typora.io/download/dev_update.xml"
regex(/Typora-(\d+(?:\.\d+)+)\.dmg/i)
end
auto_updates true
depends_on macos: ">= :high_sierra"
app "Typora.app"
zap trash: [
"~/Library/Application Support/abnerworks.Typora",
"~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/abnerworks.typora.sfl*",
"~/Library/Application Support/Typora",
"~/Library/Caches/abnerworks.Typora",
"~/Library/Cookies/abnerworks.Typora.binarycookies",
"~/Library/Preferences/abnerworks.Typora.plist",
"~/Library/Saved Application State/abnerworks.Typora.savedState",
"~/Library/WebKit/abnerworks.Typora",
]
end
| scottsuch/homebrew-cask | Casks/typora.rb | Ruby | bsd-2-clause | 1,007 |
package core
import (
"github.com/MG-RAST/AWE/lib/core/cwl"
"github.com/MG-RAST/AWE/lib/logger"
)
type SetCounter struct {
Counter []int
Max []int
NumberOfSets int
Scatter_type string
//position_in_counter int
}
func NewSetCounter(numberOfSets int, array []cwl.Array, scatter_type string) (sc *SetCounter) {
logger.Debug(3, "(NewSetCounter) numberOfSets: %d", numberOfSets)
logger.Debug(3, "(NewSetCounter) array: %d", len(array))
logger.Debug(3, "(NewSetCounter) scatter_type: %s", scatter_type)
sc = &SetCounter{}
sc.NumberOfSets = numberOfSets
//sc.position_in_counter = sc.NumberOfSets
sc.Counter = make([]int, sc.NumberOfSets)
sc.Max = make([]int, sc.NumberOfSets)
for i := 0; i < sc.NumberOfSets; i++ {
sc.Counter[i] = 0
sc.Max[i] = array[i].Len() - 1 // indicates last valid position. Needed for carry-over, e.g. 9+1 = 0
}
sc.Scatter_type = scatter_type
return
}
func (sc *SetCounter) Increment() (ok bool) {
if sc.Scatter_type == "cross" {
//fmt.Printf("(SetCounter/Increment) cross\n")
for position_in_counter := sc.NumberOfSets - 1; position_in_counter >= 0; position_in_counter-- {
//fmt.Printf("(SetCounter/Increment) position_in_counter: %d\n", position_in_counter)
//fmt.Printf("(SetCounter/Increment) sc.Counter[position_in_counter]: %d\n", sc.Counter[position_in_counter])
if sc.Counter[position_in_counter] < sc.Max[position_in_counter] {
sc.Counter[position_in_counter] += 1
ok = true
return
}
// sc.Counter[position_in_counter] == sc.Max[position_in_counter]
sc.Counter[position_in_counter] = 0
// carry over - continue
}
} else {
//fmt.Printf("(SetCounter/Increment) dot\n")
// "dot" dotproduct
// this is not very efficient but keeps the code simpler, as only one counter is used
if sc.Counter[0] >= sc.Max[0] {
//end of counter
ok = false
return
}
// increment position in each array
for position_in_counter := sc.NumberOfSets - 1; position_in_counter >= 0; position_in_counter-- {
sc.Counter[position_in_counter] += 1
//fmt.Printf("(SetCounter/Increment) sc.Counter[position_in_counter]: %d \n", sc.Counter[position_in_counter])
}
ok = true
return
}
// carry over not possible, done.
ok = false
return
}
| wgerlach/AWE | lib/core/setcounter.go | GO | bsd-2-clause | 2,262 |
class Flavours < Cask
url 'http://flavours-updates.interacto.net/Flavours.dmg'
homepage 'http://flavours.interacto.net/'
version 'latest'
sha256 :no_check
link 'Flavours.app'
caveats do
files_in_usr_local
end
caskroom_only true # hack to make uninstall fire
uninstall :launchctl => 'net.interacto.flavours.helper',
:quit => 'net.interacto.Flavours',
:files => [
'/usr/local/bin/flavours.agent',
'/usr/local/bin/flavours.ignitor',
'/usr/local/bin/restoreui',
'/usr/local/lib/libflavours.dylib',
'/usr/local/lib/libflavoursui.dylib',
]
end
| okonomi/homebrew-cask | Casks/flavours.rb | Ruby | bsd-2-clause | 727 |
//
// Programmer: Craig Stuart Sapp <craig@ccrma.stanford.edu>
// Creation Date: Sat Aug 6 10:53:40 CEST 2016
// Last Modified: Sun Sep 18 14:16:18 PDT 2016
// Filename: MxmlPart.cpp
// URL: https://github.com/craigsapp/musicxml2hum/blob/master/src/MxmlPart.cpp
// Syntax: C++11; humlib
// vim: ts=3 noexpandtab
//
// Description: MusicXML parsing abstraction for part elements which
// contain a list of measures.
//
// part element documentation:
// http://usermanuals.musicxml.com/MusicXML/Content/EL-MusicXML-part.htm
//
#include "MxmlMeasure.h"
#include "MxmlPart.h"
#include "pugiconfig.hpp"
#include "pugixml.hpp"
#include <stdlib.h>
#include <algorithm>
#include <iostream>
#include <vector>
#include <map>
using namespace pugi;
using namespace std;
namespace hum {
// START_MERGE
class MxmlMeasure;
class MxmlPart;
//////////////////////////////
//
// MxmlPart::MxmlPart -- Constructor.
//
MxmlPart::MxmlPart(void) {
clear();
}
//////////////////////////////
//
// MxmlPart::~MxmlPart -- Deconstructor.
//
MxmlPart::~MxmlPart(void) {
clear();
}
//////////////////////////////
//
// MxmlPart::clear -- Clear all internal variables of object.
//
void MxmlPart::clear(void) {
for (int i=0; i<(int)m_measures.size(); i++) {
delete m_measures[i];
m_measures[i] = NULL;
}
m_measures.clear();
m_partnum = 0;
m_maxstaff = 0;
m_verseCount.resize(0);
m_harmonyCount = 0;
m_editorialAccidental = false;
}
//////////////////////////////
//
// MxmlPart::enableStems --
//
void MxmlPart::enableStems(void) {
m_stems = true;
}
//////////////////////////////
//
// MxmlPart::getQTicks -- Return the current divisions element value,
// which are the number of integer ticks representing a quarter-note
// duration.
//
long MxmlPart::getQTicks(void) const {
if (m_qtick.size() > 0) {
return m_qtick.back();
} else {
return 0;
}
}
//////////////////////////////
//
// MxmlPart::setQTicks -- Set the current attribute/divisions value,
// which is the number of integer ticks representing a quarter-note
// duration.
//
int MxmlPart::setQTicks(long value) {
if (value < 0) {
return (int)m_qtick.size();
}
if (m_qtick.size() > 0) {
if (m_qtick.back() == value) {
return (int)m_qtick.size();
}
}
m_qtick.push_back(value);
return (int)m_qtick.size();
}
//////////////////////////////
//
// MxmlPart::addMeasure -- Append a new measure to the list of measure element.
//
bool MxmlPart::addMeasure(xpath_node mel) {
return addMeasure(mel.node());
}
bool MxmlPart::addMeasure(xml_node mel) {
MxmlMeasure* meas = new MxmlMeasure(this);
if (m_stems) {
meas->enableStems();
}
if (m_measures.size() > 0) {
meas->setPreviousMeasure(m_measures.back());
m_measures.back()->setNextMeasure(meas);
}
m_measures.push_back(meas);
bool status = meas->parseMeasure(mel);
return status;
}
//////////////////////////////
//
// MxmlPart::getMeasureCount -- Return the number of stored measures.
//
int MxmlPart::getMeasureCount(void) const {
return (int)m_measures.size();
}
//////////////////////////////
//
// MxmlPart::getMeasure -- Get the measure number at the given index.
//
MxmlMeasure* MxmlPart::getMeasure(int index) const {
if (index < 0) {
return NULL;
}
int stupidwarningsuppression = (int)m_measures.size();
if ((index - stupidwarningsuppression) >= 0) {
return NULL;
}
return m_measures[index];
}
//////////////////////////////
//
// MxmlPart::getPreviousMeasure -- Given a measure, return the
// previous measure occuring before it.
//
MxmlMeasure* MxmlPart::getPreviousMeasure(MxmlMeasure* measure) const {
if (!measure) {
return NULL;
}
if (measure == *m_measures.begin()) {
return NULL;
}
if (m_measures.size() == 0) {
return NULL;
}
return measure->getPreviousMeasure();
}
//////////////////////////////
//
// MxmlPart::getDuration -- Return the duration of the part in units
// of quarter notes. This is a sum of the duration of all measures in
// the part.
//
HumNum MxmlPart::getDuration(void) const {
if (m_measures.size() == 0) {
return 0;
}
return m_measures.back()->getStartTime() + m_measures.back()->getDuration();
}
//////////////////////////////
//
// MxmlPart::setPartNumber -- Set the part number for the part. Typically
// starts at "1" for the top part in a system.
//
void MxmlPart::setPartNumber(int number) {
m_partnum = number;
}
//////////////////////////////
//
// MxmlPart::getPartNumber -- Return the part number for the part. Typically
// starts at "1" for the top part in a system.
//
int MxmlPart::getPartNumber(void) const {
return m_partnum;
}
//////////////////////////////
//
// MxmlPart::getPartIndex -- Return the part number for the part. Typically
// starts at "0" for the top part in a system.
//
int MxmlPart::getPartIndex(void) const {
return m_partnum - 1;
}
//////////////////////////////
//
// MxmlPart::getStaffCount -- Return the number of staves which the part
// contains, such as 2 for piano parts.
//
int MxmlPart::getStaffCount(void) const {
if (!m_maxstaff) {
return 1;
} else {
return m_maxstaff;
}
}
//////////////////////////////
//
// MxmlPart::getHarmonyCount -- Return the number of verses in the part.
//
int MxmlPart::getHarmonyCount(void) const {
return m_harmonyCount;
}
//////////////////////////////
//
// MxmlPart::hasEditorialAccidental -- Return true if part contains an editorial
// accidental (represented as parentheses around the accidental in MusicXML.
//
bool MxmlPart::hasEditorialAccidental(void) const {
return m_editorialAccidental;
}
//////////////////////////////
//
// MxmlPart::hasDynamics --
//
bool MxmlPart::hasDynamics(void) const {
return m_has_dynamics;
}
//////////////////////////////
//
// MxmlPart::hasFiguredBass --
//
bool MxmlPart::hasFiguredBass(void) const {
return m_has_figured_bass;
}
//////////////////////////////
//
// MxmlPart::getVerseCount -- Return the number of verses in the part.
//
int MxmlPart::getVerseCount(void) const {
if (m_verseCount.size() == 0) {
return 0;
} else {
return m_verseCount[0];
}
}
int MxmlPart::getVerseCount(int staffindex) const {
int staffnum = staffindex + 1;
if (staffnum < (int)m_verseCount.size()) {
return m_verseCount[staffnum];
} else {
return 0;
}
}
//////////////////////////////
//
// MxmlPart::getCaesura -- Returns the RDF marker for a caesura in **kern
// data (or an empty string if there is no marker defined).
//
string MxmlPart::getCaesura(void) const {
return m_caesura;
}
//////////////////////////////
//
// MxmlPart::receiveHarmonyCount --
//
void MxmlPart::receiveHarmonyCount(int count) {
m_harmonyCount = count;
}
//////////////////////////////
//
// MxmlPart::receiveDynamic --
//
void MxmlPart::receiveDynamic(void) {
m_has_dynamics = true;
}
//////////////////////////////
//
// MxmlPart::receiveFiguredBass --
//
void MxmlPart::receiveFiguredBass(void) {
m_has_figured_bass = true;
}
//////////////////////////////
//
// MxmlPart::receiveCaesura --
//
void MxmlPart::receiveCaesura(const string& letter) {
m_caesura = letter;
}
//////////////////////////////
//
// MxmlPart::receiveOrnament --
//
void MxmlPart::receiveOrnament(void) {
m_hasOrnaments = true;
}
//////////////////////////////
//
// MxmlPart::hasOrnaments --
//
bool MxmlPart::hasOrnaments(void) const {
return m_hasOrnaments;
}
//////////////////////////////
//
// MxmlPart::receiveEditorialAccidental --
//
void MxmlPart::receiveEditorialAccidental(void) {
m_editorialAccidental = true;
}
//////////////////////////////
//
// MxmlPart::receiveVerseCount --
//
void MxmlPart::receiveVerseCount(int count) {
if (count > 0) {
}
receiveVerseCount(0, count);
}
void MxmlPart::receiveVerseCount(int staffindex, int count) {
int staffnum = staffindex + 1;
if (staffnum < 0) {
return;
}
if (staffnum < (int)m_verseCount.size()) {
if (count > m_verseCount[staffnum]) {
m_verseCount[staffnum] = count;
}
} else {
int oldsize = (int)m_verseCount.size();
int newsize = staffnum + 1;
m_verseCount.resize(newsize);
for (int i=oldsize; i<newsize; i++) {
m_verseCount[i] = 0;
}
m_verseCount[staffnum] = count;
}
}
///////////////////////////////////////////////////////////////////////////
//
// private fuctions --
//
//////////////////////////////
//
// MxmlMeasure::receiveStaffNumberFromChild -- Receive a staff number
// placement for a note or rest and pass it along to the part class
// so that it can keep track of the maximum staff number used in
// the part.
//
void MxmlPart::receiveStaffNumberFromChild(int staffnum, int voicenum) {
if (m_maxstaff < staffnum) {
m_maxstaff = staffnum;
}
trackStaffVoices(staffnum, voicenum);
}
//////////////////////////////
//
// MxmlPart::trackStaffVoices -- Keep track of which staff voices
// occur on. This will be used later to assign voices to
// spines, and to make notes in the voice which are not on
// the home staff (cross-staff beaming, etc).
//
void MxmlPart::trackStaffVoices(int staffnum, int voicenum) {
vector<vector<int> >& sv = m_staffvoicehist;
if (staffnum < 0) {
return;
}
if (voicenum < 0) {
return;
}
if (staffnum >= (int)sv.size()) {
sv.resize(staffnum+1);
}
if (voicenum >= (int)sv[staffnum].size()) {
int oldsize = (int)sv[staffnum].size();
int newsize = voicenum + 1;
sv[staffnum].resize(newsize);
for (int i=oldsize; i<newsize; i++) {
sv[staffnum][i] = 0;
}
}
sv[staffnum][voicenum]++;
}
//////////////////////////////
//
// MxmlPart::prepareVoiceMapping -- Takes the histogram of staff/voice
// pairings and create a list of new voice indexes for each
// staff. In Finale & Sibelius, four voices are hardwired to each
// staff: staff1 {1, 2, 3, 4}, staff2 {5, 6, 7, 8}. But some
// software will not use this, instead: staff1 {1}, staff2 {2}.
// The m_voicemapping variable will re-index voice numbers independently
// for each staff:
// staff1 {1, 2, 3, 4}, staff2 {5, 6, 7, 8}
// staff1 {0, 1, 2, 3}, staff2 {0, 1, 2, 3}
// and:
// staff1 {1}, staff2 {2}
// staff1 {0}, staff2 {0}
// strange cases such as this should also work:
// staff1 {1, 3, 5, 7}, staff2 {2, 4, 6, 8}
// staff1 {0, 1, 2, 3}, staff2 {0, 1, 2, 3}
// A voice is assigned to the most common staff on which its note/rests
// occur.
//
// voicenum in MusicXML is mapped to a (staffindex, voiceindex) pair
// vector<pair<int, int> > m_voicemapping;
//
// Example mapping process:
// First, start with a histogram of staff/voice numbers in MusicXML file:
// STAFF 0: 55
// STAFF 1: 0 98
// STAFF 2: 39 0 41
// In this case staff1 has a single voice numbered "1" (with 98 counts)
// And staff2 has a single voice, numbered "2". The final mapping
// in m_voicemapping is:
// 0 (voice number 1) => staffindex 0, voiceindex 0
// 1 (voice number 2) => staffindex 1, voiceindex 0
// staff0 and voice0 assignments are ignored, since there are not
// for notes (usually measures which are on staff0/voice0, and
// non-notes such as harmony which will be attached to a staff with
// but voice0, but ignored at least for now.
//
void MxmlPart::prepareVoiceMapping(void) {
vector<vector<int> >& sv = m_staffvoicehist;
int staffcount = (int)sv.size() - 1;
if (staffcount < 1) {
return;
}
int i, j;
int maxvoicenum = 0;
// a staff without any voices will probably cause problems,
// so maybe check for such a case. 0th position in sv is
// not used, so maxvoicenum is an index for sv.
for (i=1; i<(int)sv.size(); i++) {
if ((int)sv[i].size() - 1 > maxvoicenum) {
maxvoicenum = (int)sv[i].size() - 1;
}
}
// reindex voice numbers to voice indexes on staves:
// m_voicemapping[homevoicenum] => {homestaffindex, newvoiceindex}
pair<int, int> empty;
empty.first = -1;
empty.second = -1;
int homestaffnum;
int homevoicenum;
int newvoiceindex;
int count;
int maxcount;
// for each voice number in the MusicXML data, assign
// a voiceindex for it on each staff.
for (j=1; j<=maxvoicenum; j++) {
maxcount = -1;
homestaffnum = -1;
homevoicenum = -1;
for (i=1; i<(int)sv.size(); i++) {
if (j >= (int)sv[i].size()) {
continue;
}
count = sv[i][j];
if ((count > 0) && (maxcount < count)) {
maxcount = count;
homestaffnum = i;
homevoicenum = j;
}
}
if (homestaffnum < 1) {
continue;
}
if (homevoicenum < 1) {
continue;
}
// find highest newvoiceindex for the current staff
newvoiceindex = -1;
for (int n=1; n<(int)m_voicemapping.size(); n++) {
if (m_voicemapping[n].first == homestaffnum - 1) {
newvoiceindex++;
}
}
// assign to next highest newvoiceindex for staff:
newvoiceindex++;
// add the new mapping for homevoicenum to (staffindex, newvoiceindex)
if (homevoicenum >= (int)m_voicemapping.size()) {
int oldsize = (int)m_voicemapping.size();
int newsize = homevoicenum + 1;
m_voicemapping.resize(newsize);
for (int m=oldsize; m<newsize; m++) {
m_voicemapping[m] = empty;
}
m_voicemapping[homevoicenum].first = homestaffnum - 1;
m_voicemapping[homevoicenum].second = newvoiceindex;
} else {
m_voicemapping[homevoicenum].first = homestaffnum - 1;
m_voicemapping[homevoicenum].second = newvoiceindex;
}
}
}
//////////////////////////////
//
// MxmlPart::getVoiceIndex -- Convert a MusicXML voice number to
// a voice index on a particular staff.
//
int MxmlPart::getVoiceIndex(int voicenum) {
if (voicenum < 1) {
return -1;
}
if (voicenum >= (int)m_voicemapping.size()) {
return -1;
}
return m_voicemapping[voicenum].second;
}
//////////////////////////////
//
// MxmlPart::getStaffIndex -- Convert a MusicXML voice number to
// a voice index on a particular staff.
//
int MxmlPart::getStaffIndex(int voicenum) {
if (voicenum < 1) {
return -1;
}
if (voicenum >= (int)m_voicemapping.size()) {
return -1;
}
return m_voicemapping[voicenum].first;
}
//////////////////////////////
//
// MxmlPart::printStaffVoiceInfo --
//
void MxmlPart::printStaffVoiceInfo(void) {
vector<vector<int> >& sv = m_staffvoicehist;
int i, j;
cout << "\n!!STAFF-VOICE MAPPING:\n";
for (i=0; i<(int)sv.size(); i++) {
cout << "!!\tSTAFF " << i << ":";
for (j=0; j<(int)sv[i].size(); j++) {
cout << "\t" << sv[i][j];
}
cout << endl;
}
cout << "!!REMAPPING:\n";
for (i=1; i<(int)m_voicemapping.size(); i++) {
cout << "!!\tvoicenum " << i << ":\t(";
cout << m_voicemapping[i].first << ", ";
cout << m_voicemapping[i].second << ")\n";
}
cout << endl;
}
//////////////////////////
//
// MxmlPart::parsePartInfo -- find the part name and part abbreviation
// if there are any.
//
// Example:
// <score-part id="P1">
// <part-name>Alto</part-name>
// <part-name-display>
// <display-text>Alto</display-text>
// </part-name-display>
// <part-abbreviation> </part-abbreviation>
// <part-abbreviation-display>
// <display-text> </display-text>
// </part-abbreviation-display>
// <score-instrument id="P1-I1">
// <instrument-name> </instrument-name>
// </score-instrument>
// </score-part>
//
void MxmlPart::parsePartInfo(xml_node partinfo) {
// ggg cerr << "PART INFO ID " << partinfo.attribute("id").value() << endl;
xml_node partnamenode = partinfo.select_node("./part-name").node();
if (partnamenode) {
// ggg cerr << "PART NAME " << partnamenode.child_value() << endl;
m_partname = cleanSpaces(partnamenode.child_value());
}
xml_node abbrnode = partinfo.select_node("./part-abbreviation").node();
if (abbrnode) {
m_partabbr = cleanSpaces(abbrnode.child_value());
}
}
//////////////////////////////
//
// MxmlPart::getPartName --
//
string MxmlPart::getPartName(void) const {
return m_partname;
}
//////////////////////////////
//
// MxmlPart::getPartAbbr --
//
string MxmlPart::getPartAbbr(void) const {
return m_partabbr;
}
//////////////////////////////
//
// MxmlPart::cleanSpaces -- remove leading/trailing spaces in string.
//
string MxmlPart::cleanSpaces(const string& input) {
string output;
int content = false;
for (int i=0; i<(int)input.size(); i++) {
if ((!content) && isspace(input[i])) {
continue;
}
content = true;
if (isspace(input[i]) && isspace(input[i-1])) {
continue;
}
if (isspace(input[i])) {
output += ' ';
} else {
output += input[i];
}
}
if (isspace(output.back())) {
output.resize(output.size() - 1);
}
return output;
}
// END_MERGE
} // end namespace hum
| craigsapp/humlib | src/MxmlPart.cpp | C++ | bsd-2-clause | 16,652 |
require 'formula'
class Ode < Formula
homepage 'http://www.ode.org/'
url 'https://downloads.sourceforge.net/project/opende/ODE/0.13/ode-0.13.tar.bz2'
sha1 '0279d58cc390ff5cc048f2baf96cff23887f3838'
head do
url 'http://opende.svn.sourceforge.net/svnroot/opende/trunk'
depends_on :autoconf
depends_on :automake
depends_on :libtool
end
option 'enable-double-precision', 'Compile ODE with double precision'
depends_on 'pkg-config' => :build
def install
args = ["--prefix=#{prefix}",
"--disable-demos"]
args << "--enable-double-precision" if build.include? 'enable-double-precision'
if build.head?
ENV['LIBTOOLIZE'] = 'glibtoolize'
inreplace 'autogen.sh', 'libtoolize', '$LIBTOOLIZE'
system "./autogen.sh"
end
system "./configure", *args
system "make"
system "make install"
end
end
| justjake/linuxbrew | Library/Formula/ode.rb | Ruby | bsd-2-clause | 876 |
/*
* Copyright 2014 Attila Szegedi, Daniel Dekany, Jonathan Revusky
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package freemarker.test.servlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public interface Model2Action {
/**
* The web application relative path of the view JSP or FTL, or {@code null} if we expect that to be
* specified in the URL. The architecture meant to be similar to JSP Model 2, this, the FreeMarker data-model
* variables meant to be created as servlet scope (request, session, etc.) attributes.
*
* @return The servlet-relative path to forward to, or {@code null} if we expect it to be specified with the
* {@value Model2TesterServlet#VIEW_PARAM_NAME} request parameter.
*/
public String execute(final HttpServletRequest req, final HttpServletResponse resp) throws Exception;
}
| ekollof/DarkUniverse | lib/Freemarker/source/src/test/java/freemarker/test/servlet/Model2Action.java | Java | bsd-2-clause | 1,428 |
/*
* Copyright (C) 2005 Allan Sandfeld Jensen (kde@carewolf.com)
* Copyright (C) 2006, 2007 Apple Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this library; see the file COPYING.LIB. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#include "third_party/blink/renderer/core/layout/counter_node.h"
#include "base/numerics/checked_math.h"
#include "third_party/blink/renderer/core/layout/layout_counter.h"
#include "third_party/blink/renderer/platform/heap/handle.h"
#if DCHECK_IS_ON()
#include <stdio.h>
#endif
namespace blink {
CounterNode::CounterNode(LayoutObject& o, unsigned type_mask, int value)
: type_mask_(type_mask),
value_(value),
count_in_parent_(0),
owner_(&o),
root_layout_object_(nullptr),
parent_(nullptr),
previous_sibling_(nullptr),
next_sibling_(nullptr),
first_child_(nullptr),
last_child_(nullptr) {}
void CounterNode::Destroy() {
// Ideally this would be an assert and this would never be reached. In reality
// this happens a lot so we need to handle these cases. The node is still
// connected to the tree so we need to detach it.
if (parent_ || previous_sibling_ || next_sibling_ || first_child_ ||
last_child_) {
CounterNode* old_parent = nullptr;
CounterNode* old_previous_sibling = nullptr;
// Instead of calling removeChild() we do this safely as the tree is likely
// broken if we get here.
if (parent_) {
if (parent_->first_child_ == this)
parent_->first_child_ = next_sibling_;
if (parent_->last_child_ == this)
parent_->last_child_ = previous_sibling_;
old_parent = parent_;
parent_ = nullptr;
}
if (previous_sibling_) {
if (previous_sibling_->next_sibling_ == this)
previous_sibling_->next_sibling_ = next_sibling_;
old_previous_sibling = previous_sibling_;
previous_sibling_ = nullptr;
}
if (next_sibling_) {
if (next_sibling_->previous_sibling_ == this)
next_sibling_->previous_sibling_ = old_previous_sibling;
next_sibling_ = nullptr;
}
if (first_child_) {
// The node's children are reparented to the old parent.
for (CounterNode* child = first_child_; child;) {
CounterNode* next_child = child->next_sibling_;
CounterNode* next_sibling = nullptr;
child->parent_ = old_parent;
if (old_previous_sibling) {
next_sibling = old_previous_sibling->next_sibling_;
child->previous_sibling_ = old_previous_sibling;
old_previous_sibling->next_sibling_ = child;
child->next_sibling_ = next_sibling;
next_sibling->previous_sibling_ = child;
old_previous_sibling = child;
}
child = next_child;
}
}
}
ResetLayoutObjects();
}
void CounterNode::Trace(Visitor* visitor) const {
visitor->Trace(owner_);
visitor->Trace(root_layout_object_);
visitor->Trace(parent_);
visitor->Trace(previous_sibling_);
visitor->Trace(next_sibling_);
visitor->Trace(first_child_);
visitor->Trace(last_child_);
}
CounterNode* CounterNode::NextInPreOrderAfterChildren(
const CounterNode* stay_within) const {
if (this == stay_within)
return nullptr;
const CounterNode* current = this;
CounterNode* next = current->next_sibling_;
for (; !next; next = current->next_sibling_) {
current = current->parent_;
if (!current || current == stay_within)
return nullptr;
}
return next;
}
CounterNode* CounterNode::NextInPreOrder(const CounterNode* stay_within) const {
if (CounterNode* next = first_child_)
return next;
return NextInPreOrderAfterChildren(stay_within);
}
CounterNode* CounterNode::LastDescendant() const {
CounterNode* last = last_child_;
if (!last)
return nullptr;
while (CounterNode* last_child = last->last_child_)
last = last_child;
return last;
}
CounterNode* CounterNode::PreviousInPreOrder() const {
CounterNode* previous = previous_sibling_;
if (!previous)
return parent_;
while (CounterNode* last_child = previous->last_child_)
previous = last_child;
return previous;
}
int CounterNode::ComputeCountInParent() const {
// According to the spec, if an increment would overflow or underflow the
// counter, we are allowed to ignore the increment.
// https://drafts.csswg.org/css-lists-3/#valdef-counter-reset-custom-ident-integer
// If we have a set type, then we override parent value altogether, so the
// result is just our value.
if (HasSetType())
return value_;
// If we act as a reset, then we don't add anything on top of the parent count
// (and we don't override it as we would with a set type).
int increment = ActsAsReset() ? 0 : value_;
if (previous_sibling_) {
return base::CheckAdd(previous_sibling_->count_in_parent_, increment)
.ValueOrDefault(previous_sibling_->count_in_parent_);
}
DCHECK_EQ(parent_->first_child_, this);
return base::CheckAdd(parent_->value_, increment)
.ValueOrDefault(parent_->value_);
}
void CounterNode::AddLayoutObject(LayoutCounter* value) {
if (!value) {
NOTREACHED();
return;
}
if (value->counter_node_) {
NOTREACHED();
value->counter_node_->RemoveLayoutObject(value);
}
DCHECK(!value->next_for_same_counter_);
for (LayoutCounter* iterator = root_layout_object_; iterator;
iterator = iterator->next_for_same_counter_) {
if (iterator == value) {
NOTREACHED();
return;
}
}
value->next_for_same_counter_ = root_layout_object_.Get();
root_layout_object_ = value;
if (value->counter_node_ != this) {
if (value->counter_node_) {
NOTREACHED();
value->counter_node_->RemoveLayoutObject(value);
}
value->counter_node_ = this;
}
}
void CounterNode::RemoveLayoutObject(LayoutCounter* value) {
if (!value) {
NOTREACHED();
return;
}
if (value->counter_node_ && value->counter_node_ != this) {
NOTREACHED();
value->counter_node_->RemoveLayoutObject(value);
}
LayoutCounter* previous = nullptr;
for (LayoutCounter* iterator = root_layout_object_; iterator;
iterator = iterator->next_for_same_counter_) {
if (iterator == value) {
if (previous)
previous->next_for_same_counter_ = value->next_for_same_counter_;
else
root_layout_object_ = value->next_for_same_counter_;
value->next_for_same_counter_ = nullptr;
value->counter_node_ = nullptr;
return;
}
previous = iterator;
}
NOTREACHED();
}
void CounterNode::ResetLayoutObjects() {
while (root_layout_object_) {
// This makes m_rootLayoutObject point to the next layoutObject if any since
// it disconnects the m_rootLayoutObject from this.
root_layout_object_->Invalidate();
}
}
// static
CounterNode* CounterNode::AncestorNodeAcrossStyleContainment(
const LayoutObject& starting_object,
const AtomicString& identifier) {
bool crossed_style_containment = false;
for (auto* ancestor = starting_object.Parent(); ancestor;
ancestor = ancestor->Parent()) {
crossed_style_containment |= ancestor->ShouldApplyStyleContainment();
if (!crossed_style_containment)
continue;
if (CounterMap* node_map = LayoutCounter::GetCounterMap(ancestor)) {
if (node_map->Contains(identifier))
return node_map->at(identifier);
}
}
return nullptr;
}
CounterNode* CounterNode::ParentCrossingStyleContainment(
const AtomicString& identifier) const {
if (parent_)
return parent_;
return AncestorNodeAcrossStyleContainment(Owner(), identifier);
}
void CounterNode::ResetThisAndDescendantsLayoutObjects() {
CounterNode* node = this;
do {
node->ResetLayoutObjects();
node = node->NextInPreOrder(this);
} while (node);
}
void CounterNode::Recount() {
for (CounterNode* node = this; node; node = node->next_sibling_) {
int old_count = node->count_in_parent_;
int new_count = node->ComputeCountInParent();
if (old_count == new_count)
break;
node->count_in_parent_ = new_count;
node->ResetThisAndDescendantsLayoutObjects();
}
}
void CounterNode::InsertAfter(CounterNode* new_child,
CounterNode* ref_child,
const AtomicString& identifier) {
DCHECK(new_child);
DCHECK(!new_child->parent_);
DCHECK(!new_child->previous_sibling_);
DCHECK(!new_child->next_sibling_);
// If the refChild is not our child we can not complete the request. This
// hardens against bugs in LayoutCounter.
// When layoutObjects are reparented it may request that we insert counter
// nodes improperly.
if (ref_child && ref_child->parent_ != this)
return;
if (new_child->HasResetType()) {
while (last_child_ != ref_child)
LayoutCounter::DestroyCounterNode(last_child_->Owner(), identifier);
}
CounterNode* next = nullptr;
if (ref_child) {
next = ref_child->next_sibling_;
ref_child->next_sibling_ = new_child;
} else {
next = first_child_;
first_child_ = new_child;
}
new_child->parent_ = this;
new_child->previous_sibling_ = ref_child;
if (next) {
DCHECK_EQ(next->previous_sibling_, ref_child);
next->previous_sibling_ = new_child;
new_child->next_sibling_ = next;
} else {
DCHECK_EQ(last_child_, ref_child);
last_child_ = new_child;
}
if (!new_child->first_child_ || new_child->HasResetType()) {
new_child->count_in_parent_ = new_child->ComputeCountInParent();
new_child->ResetThisAndDescendantsLayoutObjects();
if (next)
next->Recount();
return;
}
// The code below handles the case when a formerly root increment counter is
// loosing its root position and therefore its children become next siblings.
CounterNode* last = new_child->last_child_;
CounterNode* first = new_child->first_child_;
DCHECK(last);
new_child->next_sibling_ = first;
if (last_child_ == new_child)
last_child_ = last;
first->previous_sibling_ = new_child;
// The case when the original next sibling of the inserted node becomes a
// child of one of the former children of the inserted node is not handled
// as it is believed to be impossible since:
// 1. if the increment counter node lost it's root position as a result of
// another counter node being created, it will be inserted as the last
// child so next is null.
// 2. if the increment counter node lost it's root position as a result of a
// layoutObject being inserted into the document's layout tree, all its
// former children counters are attached to children of the inserted
// layoutObject and hence cannot be in scope for counter nodes attached
// to layoutObjects that were already in the document's layout tree.
last->next_sibling_ = next;
if (next) {
DCHECK_EQ(next->previous_sibling_, new_child);
next->previous_sibling_ = last;
} else {
last_child_ = last;
}
for (next = first;; next = next->next_sibling_) {
next->parent_ = this;
if (last == next)
break;
}
new_child->first_child_ = nullptr;
new_child->last_child_ = nullptr;
new_child->count_in_parent_ = new_child->ComputeCountInParent();
new_child->ResetLayoutObjects();
first->Recount();
}
void CounterNode::RemoveChild(CounterNode* old_child) {
DCHECK(old_child);
DCHECK(!old_child->first_child_);
DCHECK(!old_child->last_child_);
CounterNode* next = old_child->next_sibling_;
CounterNode* previous = old_child->previous_sibling_;
old_child->next_sibling_ = nullptr;
old_child->previous_sibling_ = nullptr;
old_child->parent_ = nullptr;
if (previous) {
previous->next_sibling_ = next;
} else {
DCHECK_EQ(first_child_, old_child);
first_child_ = next;
}
if (next) {
next->previous_sibling_ = previous;
} else {
DCHECK_EQ(last_child_, old_child);
last_child_ = previous;
}
if (next)
next->Recount();
}
void CounterNode::MoveNonResetSiblingsToChildOf(
CounterNode* first_node,
CounterNode& new_parent,
const AtomicString& identifier) {
if (!first_node)
return;
CounterNode* cur_node = first_node;
CounterNode* old_parent = first_node->Parent();
while (cur_node) {
CounterNode* next = cur_node->NextSibling();
if (!cur_node->ActsAsReset()) {
old_parent->RemoveChild(cur_node);
new_parent.InsertAfter(cur_node, new_parent.LastChild(), identifier);
}
cur_node = next;
}
}
#if DCHECK_IS_ON()
static void ShowTreeAndMark(const CounterNode* node) {
const CounterNode* root = node;
while (root->Parent())
root = root->Parent();
for (const CounterNode* current = root; current;
current = current->NextInPreOrder()) {
fprintf(stderr, "%c", (current == node) ? '*' : ' ');
for (const CounterNode* parent = current; parent && parent != root;
parent = parent->Parent())
fprintf(stderr, " ");
fprintf(stderr, "%p %s: %d %d P:%p PS:%p NS:%p R:%p\n", current,
current->ActsAsReset() ? "reset____" : "increment",
current->Value(), current->CountInParent(), current->Parent(),
current->PreviousSibling(), current->NextSibling(),
¤t->Owner());
}
fflush(stderr);
}
#endif
} // namespace blink
#if DCHECK_IS_ON()
void ShowCounterTree(const blink::CounterNode* counter) {
if (counter)
ShowTreeAndMark(counter);
else
fprintf(stderr, "Cannot showCounterTree for (nil).\n");
}
#endif
| nwjs/chromium.src | third_party/blink/renderer/core/layout/counter_node.cc | C++ | bsd-3-clause | 14,114 |
<?php
/**
* UMI.Framework (http://umi-framework.ru/)
*
* @link http://github.com/Umisoft/framework for the canonical source repository
* @copyright Copyright (c) 2007-2013 Umisoft ltd. (http://umisoft.ru/)
* @license http://umi-framework.ru/license/bsd-3 BSD-3 License
*/
namespace umi\acl\toolbox;
use umi\acl\IAclAware;
use umi\acl\IAclFactory;
use umi\toolkit\toolbox\IToolbox;
use umi\toolkit\toolbox\TToolbox;
/**
* Инструменты для создания ACL.
*/
class AclTools implements IToolbox
{
/**
* Имя набора инструментов
*/
const NAME = 'acl';
use TToolbox;
/**
* @var string $aclManagerClass класс менеджера ACL
*/
public $aclFactoryClass = 'umi\acl\toolbox\factory\AclFactory';
/**
* Конструктор.
*/
public function __construct()
{
$this->registerFactory(
'acl',
$this->aclFactoryClass,
['umi\acl\IAclFactory']
);
}
/**
* {@inheritdoc}
*/
public function injectDependencies($object)
{
if ($object instanceof IAclAware) {
$object->setAclFactory($this->getAclFactory());
}
}
/**
* Возвращает фабрику сущностей ACL
* @return IAclFactory
*/
protected function getAclFactory()
{
return $this->getFactory('acl');
}
}
| Umisoft/umi.framework-dev | library/acl/toolbox/AclTools.php | PHP | bsd-3-clause | 1,443 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.