text stringlengths 2 1.04M | meta dict |
|---|---|
from fabric.api import *
from json import JSONEncoder
import secrets
from tinycloud.env import ConnectionManager
class Server(JSONEncoder):
def __init__(self, name, ip, port, user, pwd, capacity, virt_type='docker'):
super(Server, self).__init__()
self.name = name
self.ip = ip
self.port = port
self.user = user
self.pwd = pwd
self.virt_type = virt_type
self.apps = list()
self.capacity = capacity
def cmd(self, cmd, use_sudo=True):
with settings(host_string='{0}@{1}'.format(self.user, self.ip), port=self.port):
res = sudo(cmd) if use_sudo else run(cmd)
# print res
return res
def add_app(self, name, capacity=1):
print "Capacity: " + str(capacity) + " Remaining: " + str(self.capacity)
self.apps.append(name)
self.capacity -= capacity
| {
"content_hash": "2b71da41b3c5aa222e9d83173c82bce6",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 88,
"avg_line_length": 30.724137931034484,
"alnum_prop": 0.5959595959595959,
"repo_name": "ebagdasa/tinycloud",
"id": "cbd703b8300a3fcda1cd4172466e4d7e12456976",
"size": "891",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tinycloud/server.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "4120"
},
{
"name": "Python",
"bytes": "22180"
},
{
"name": "Shell",
"bytes": "3761"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<!-- The above 3 meta tags *must* come first in the head; any other head content must come *after* these tags -->
<meta name="description" content="">
<meta name="author" content="">
<title>Acess user</title>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0-alpha.6/css/bootstrap.min.css"
integrity="sha384-rwoIResjU2yc3z8GV/NPeZWAv56rSmLldC3R/AZzGRnGxQQKnKkoFVhFQhNUwEyJ" crossorigin="anonymous">
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.1.1/jquery.min.js"></script>
<script src="https://cdn.jsdelivr.net/vue/1.0.28/vue.js"></script>
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js"></script>
<script src="https://cdn.jsdelivr.net/vue.resource/1.0.3/vue-resource.min.js"></script>
<script type="text/javascript" src="https://cdn.jsdelivr.net/vue.table/1.5.3/vue-table.min.js"></script>
<style type="text/css">
body {
font-family: Helvetica Neue, Arial, sans-serif;
font-size: 14px;
color: #444;
}
th {
background-color: #2185d0;
color: rgba(255, 255, 255, 0.66);
cursor: pointer;
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
font-weight: bold;
}
th, td {
min-width: 120px;
padding: 10px 20px;
}
th.active {
color: #fff;
}
th.active .arrow {
opacity: 1;
}
.arrow {
display: inline-block;
vertical-align: middle;
width: 0;
height: 0;
margin-left: 5px;
opacity: 0.66;
}
.arrow.asc {
border-left: 4px solid transparent;
border-right: 4px solid transparent;
border-bottom: 4px solid #fff;
}
.arrow.dsc {
border-left: 4px solid transparent;
border-right: 4px solid transparent;
border-top: 4px solid #fff;
}
ul.dropdown-menu li {
margin-left: 20px;
}
.vuetable th.sortable:hover {
color: #f0f0f0;
cursor: pointer;
font-weight: bold;
}
.vuetable-actions {
width: 11%;
padding: 12px 0px;
text-align: center;
}
.vuetable-actions > button {
padding: 3px 6px;
margin-right: 4px;
}
.vuetable-pagination {
}
.vuetable-pagination-info {
float: left;
margin-top: auto;
margin-bottom: auto;
}
.vuetable-pagination-component {
float: right;
}
.vuetable-pagination-component .pagination {
margin: 0px;
}
.vuetable-pagination-component .pagination .btn {
cursor: pointer;
margin: 2px;
}
[v-cloak] {
display: none;
}
.highlight {
background-color: yellow;
}
/* Loading Animation: */
.vuetable-wrapper {
opacity: 1;
position: relative;
filter: alpha(opacity=100); /* IE8 and earlier */
}
.vuetable-wrapper.loading {
opacity: 0.4;
transition: opacity .3s ease-in-out;
-moz-transition: opacity .3s ease-in-out;
-webkit-transition: opacity .3s ease-in-out;
}
.vuetable-wrapper.loading:after {
position: absolute;
content: '';
top: 40%;
left: 50%;
margin: -30px 0 0 -30px;
border-radius: 100%;
-webkit-animation-fill-mode: both;
animation-fill-mode: both;
border: 4px solid #000;
height: 60px;
width: 60px;
background: transparent !important;
display: inline-block;
-webkit-animation: pulse 1s 0s ease-in-out infinite;
animation: pulse 1s 0s ease-in-out infinite;
}
@keyframes pulse {
0% {
-webkit-transform: scale(0.6);
transform: scale(0.6);
}
50% {
-webkit-transform: scale(1);
transform: scale(1);
border-width: 12px;
}
100% {
-webkit-transform: scale(0.6);
transform: scale(0.6);
}
}
/* Loading Animation: */
.vuetable-wrapper {
position: relative;
opacity: 1;
}
.loader {
visibility: hidden;
opacity: 0;
transition: opacity 0.3s linear;
width: 200px;
height: 30px;
font-size: 1.3em;
text-align: center;
margin-left: -100px;
letter-spacing: 4px;
color: #3881d6;
position: absolute;
top: 160px;
left: 50%;
}
.loading .loader {
visibility: visible;
opacity: 1;
z-index: 100;
}
.loading .vuetable {
opacity: 0.3;
filter: alpha(opacity=30); /* IE8 and earlier */
}
.active {
font-weight: bold;
font-size: 110%;
color: #3071be;
}
.active {
font-weight: bold;
font-size: 110%;
}
</style>
</head>
<body>
<div id="app" class="row">
<div class="col">
<div class="row">
<!-- Nav tabs -->
<ul id="myTabs" class="col nav nav-pills nav-fill" role="tablist">
<li role="presentation" class="nav-item">
<a href="#Access" v-on:click="chageTable" class="nav-link active" aria-controls="Access" role="tab"
data-toggle="tab">Access</a>
</li>
<li role="presentation" class="nav-item">
<a class="nav-link" href="#AccessAgents" v-on:click="chageTable" aria-controls="AccessAgents"
role="tab" data-toggle="tab">Agents</a>
</li>
<li role="presentation" class="nav-item">
<a class="nav-link" href="#AccessDevices" v-on:click="chageTable" aria-controls="AccessDevices"
role="tab" data-toggle="tab">Devices</a>
</li>
<li role="presentation" class="nav-item">
<a class="nav-link" href="#AccessDomains" v-on:click="chageTable" aria-controls="AccessDomains"
role="tab" data-toggle="tab">Url</a>
</li>
<li role="presentation" class="nav-item">
<a class="nav-link" href="#AccessRoutes" v-on:click="chageTable" aria-controls="AccessRoutes"
role="tab" data-toggle="tab">Route</a>
</li>
<li role="presentation" class="nav-item">
<a class="nav-link" href="#AccessUserLog" v-on:click="chageTable" aria-controls="AccessUserLog"
role="tab" data-toggle="tab">User</a>
</li>
</ul>
</div>
<div class="row">
<!-- Example row of colu mns -->
<h2 class="col sub-header" id="title"> {!! $h1 !!}</h2>
<hr>
</div>
<div class="row">
<div class="col-lg-6 col-md-6 col-sm-12">
<div class="form-inline form-group">
<label>Search:</label>
<input v-model="searchFor" class="form-control" @keyup.enter="setFilter">
<button class="btn btn-primary" @click="setFilter">Go</button>
<button class="btn btn-default" @click="resetFilter">Reset</button>
</div>
</div>
<div class="col-lg-6 col-md-6 col-sm-12">
<div class="dropdown form-inline pull-right">
<label>Pagination:</label>
<select class="form-control" v-model="perPage">
<option value=10>10</option>
<option value=15>15</option>
<option value=20>20</option>
<option value=25>25</option>
</select>
</div>
</div>
</div>
<div class="row">
<div class="col">
<div id="content" class="">
@include('accessuser::loading')
<!--Your Loading Message -->
<vuetable
:api-url="urlApi"
table-wrapper="#content"
:fields="columns"
:item-actions="itemActions"
pagination-path=""
table-class="table table-bordered table-striped table-hover "
ascending-icon="glyphicon glyphicon-chevron-up"
descending-icon="glyphicon glyphicon-chevron-down"
pagination-class=""
pagination-component-class=""
:pagination-component="paginationComponent"
:item-actions="itemActions"
:per-page="perPage"
:append-params="moreParams"
wrapper-class="vuetable-wrapper"
table-wrapper=".vuetable-wrapper"
loading-class="loading"
pagination-info-template=" {from} - {to} out of {total} records"
pagination-info-no-data-template="The requested query return no result"
></vuetable>
</div>
</div>
</div>
</div>
</div>
<script type="text/javascript">
// fields definition
var tableColumns = {!! $fields !!}
new Vue({
el: '#app',
data: {
urlApi: '/accessuserlog/show/',
urlApiColumns: '/accessuserlog/cols/',
searchFor: '',
columns: tableColumns,
sortOrder: {
field: 'name',
direction: 'asc'
},
perPage: 10,
paginationComponent: 'vuetable-pagination',
paginationInfoTemplate: 'แสดง {from} ถึง {to} จากทั้งหมด {total} รายการ',
itemActions: [
{name: 'view-item', label: 'var', icon: 'glyphicon glyphicon-search', class: 'btn btn-primary'}
],
moreParams: []
},
watch: {
'perPage': function (val, oldVal) {
this.$broadcast('vuetable:refresh')
},
'paginationComponent': function (val, oldVal) {
this.$broadcast('vuetable:load-success', this.$refs.vuetable.tablePagination)
this.paginationConfig(this.paginationComponent)
}
},
methods: {
/**
* Other functions
*/
setFilter: function () {
this.moreParams = [
'filter=' + this.searchFor
]
this.$nextTick(function () {
this.$broadcast('vuetable:refresh')
})
},
resetFilter: function () {
this.searchFor = ''
this.setFilter()
},
chageTable: function (ev) {
var tag = $(ev.target);
if (!tag.hasClass('active')) {
$("#myTabs .active").removeClass("active");
var href = tag.attr('href').replace('#', "");
tag.toggleClass('active');
this.moreParams = [
'table=' + href
];
this.$http.get(this.urlApiColumns, {params: {table: href}}).then(
function (response) {
this.columns = response.data;
this.$nextTick(function () {
this.$broadcast('vuetable:refresh')
});
$(".sub-header").html(href);
}, function (error) {
console.dir(error)
});
}
},
changePaginationComponent: function () {
this.$broadcast('vuetable:load-success', this.$refs.vuetable.tablePagination)
},
preg_quote: function (str) {
// http://kevin.vanzonneveld.net
// + original by: booeyOH
// + improved by: Ates Goral (http://magnetiq.com)
// + improved by: Kevin van Zonneveld (http://kevin.vanzonneveld.net)
// + bugfixed by: Onno Marsman
// * example 1: preg_quote("$40");
// * returns 1: '\$40'
// * example 2: preg_quote("*RRRING* Hello?");
// * returns 2: '\*RRRING\* Hello\?'
// * example 3: preg_quote("\\.+*?[^]$(){}=!<>|:");
// * returns 3: '\\\.\+\*\?\[\^\]\$\(\)\{\}\=\!\<\>\|\:'
return (str + '').replace(/([\\\.\+\*\?\[\^\]\$\(\)\{\}\=\!\<\>\|\:])/g, "\\$1");
},
highlight: function (needle, haystack) {
return haystack.replace(
new RegExp('(' + this.preg_quote(needle) + ')', 'ig'),
'<span class="highlight">$1</span>'
)
},
paginationConfig: function (componentName) {
console.log('paginationConfig: ', componentName)
if (componentName == 'vuetable-pagination') {
this.$broadcast('vuetable-pagination:set-options', {
wrapperClass: 'pagination',
icons: {
first: '',
prev: '',
next: '',
last: ''
},
activeClass: 'active',
linkClass: 'btn btn-default',
pageClass: 'btn btn-default'
})
}
if (componentName == 'vuetable-pagination-dropdown') {
this.$broadcast('vuetable-pagination:set-options', {
wrapperClass: 'form-inline',
icons: {
prev: 'glyphicon glyphicon-chevron-left',
next: 'glyphicon glyphicon-chevron-right'
},
dropdownClass: 'form-control'
})
}
}
},
events: {
'vuetable:action': function (action, data) {
console.log('vuetable:action', action, data)
if (action == 'view-item') {
console.log(action, data.name)
} else if (action == 'edit-item') {
sweetAlert(action, data.name)
} else if (action == 'delete-item') {
sweetAlert(action, data.name)
}
},
'vuetable:cell-dblclicked': function (item, field, event) {
var self = this
console.log('cell-dblclicked: old value =', item[field.name])
},
'vuetable:load-success': function (response) {
console.log('total = ', response.data.total)
var data = response.data.data
if (this.searchFor !== '') {
for (n in data) {
data[n].name = this.highlight(this.searchFor, data[n].name)
data[n].email = this.highlight(this.searchFor, data[n].email)
}
}
},
'vuetable:load-error': function (response) {
if (response.status == 400) {
sweetAlert('Something\'s Wrong!', response.data.message, 'error')
} else {
sweetAlert('Oops', E_SERVER_ERROR, 'error')
}
}
}
});
$(document).ready(function () {
/*$('#myTabs a').click(function (e) {
e.preventDefault()
})*/
});
</script>
</body>
</html> | {
"content_hash": "4d736b4e09f672b8ffc0c3555e8583b3",
"timestamp": "",
"source": "github",
"line_count": 485,
"max_line_length": 119,
"avg_line_length": 35.02061855670103,
"alnum_prop": 0.445216367382985,
"repo_name": "erickosma/accessuser",
"id": "ca1662dc329c52123ba4590ca0a666ebb4eeebe2",
"size": "17031",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/views/index.blade.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "22116"
},
{
"name": "PHP",
"bytes": "100132"
}
],
"symlink_target": ""
} |
package com.jroossien.boxx.util.item;
import com.jroossien.boxx.options.SingleOption;
import org.bukkit.command.CommandSender;
public abstract class ItemTagCallback {
abstract boolean onSet(CommandSender sender, EItem item, SingleOption result);
abstract String onGet(EItem item);
}
| {
"content_hash": "6c44315a0a10b7e6ee3319024c234953",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 82,
"avg_line_length": 26.818181818181817,
"alnum_prop": 0.7966101694915254,
"repo_name": "Rojoss/Boxx",
"id": "0dbc0291a551d6831a7b31a990efdcfdb4d72387",
"size": "1491",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/jroossien/boxx/util/item/ItemTagCallback.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "1200065"
}
],
"symlink_target": ""
} |
package extracells.api.storage.filter;
import net.minecraftforge.fluids.Fluid;
import java.util.EnumSet;
/**
* Interface for filtering fluids.
* Used to blacklist fluids from EC grid functions.
*/
public interface IFluidFilter {
/**
* @param filterTypes A list of modes to filter for, see {@link FilterType}
* @param fluid Fluid to check
* @return True if allowed, false if not.
*/
boolean isAllowed(FilterType filterType, Fluid fluid);
}
| {
"content_hash": "522d445fe57eac471a1eef85fb3045d2",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 79,
"avg_line_length": 26.27777777777778,
"alnum_prop": 0.7082452431289641,
"repo_name": "ruifung/ExtraCells2",
"id": "f960d8a5f1b1b2f59266f8c96f457b7f3157194d",
"size": "473",
"binary": false,
"copies": "1",
"ref": "refs/heads/rewrite",
"path": "src/main/scala/extracells/api/storage/filter/IFluidFilter.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "36"
},
{
"name": "Java",
"bytes": "21141"
},
{
"name": "Scala",
"bytes": "73869"
},
{
"name": "Shell",
"bytes": "36"
}
],
"symlink_target": ""
} |
/*jshint node:true*/
'use strict';
var express = require('express');
var app = express();
var bodyParser = require('body-parser');
var compress = require('compression');
var cors = require('cors');
var errorHandler = require('./routes/utils/errorHandler')();
var favicon = require('serve-favicon');
var logger = require('morgan');
var port = process.env.PORT || 7203;
var routes;
var environment = process.env.NODE_ENV;
app.use(favicon(__dirname + '/favicon.ico'));
app.use(bodyParser.urlencoded({
extended: true
}));
app.use(bodyParser.json());
app.use(compress());
app.use(logger('dev'));
app.use(cors());
app.use(errorHandler.init);
routes = require('./routes/index')(app);
console.log('About to crank up node');
console.log('PORT=' + port);
console.log('NODE_ENV=' + environment);
app.get('/ping', function(req, res, next) {
console.log(req.body);
res.send('pong');
});
switch (environment) {
case 'build':
console.log('** BUILD **');
app.use(express.static('./build/'));
app.use('/*', express.static('./build/index.html'));
break;
default:
console.log('** DEV **');
app.use(express.static('./src/client/'));
app.use(express.static('./'));
app.use(express.static('./tmp'));
app.use('/*', express.static('./src/client/index.html'));
break;
}
app.listen(port, function() {
console.log('Express server listening on port ' + port);
console.log('\n__dirname = ' + __dirname +
'\nprocess.cwd = ' + process.cwd());
});
| {
"content_hash": "6660f23c98c983f046159a8e77f42cb7",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 65,
"avg_line_length": 27.157894736842106,
"alnum_prop": 0.6117571059431525,
"repo_name": "LRPalacios/web-project-template-less",
"id": "876b6daa5666ba9f541ca61ca735344189a977d5",
"size": "1548",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/server/app.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "56"
},
{
"name": "HTML",
"bytes": "990"
},
{
"name": "JavaScript",
"bytes": "13625"
}
],
"symlink_target": ""
} |
package org.opensaml.saml1.core.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.opensaml.saml1.core.Assertion;
import org.opensaml.saml1.core.Response;
import org.opensaml.saml1.core.Status;
import org.opensaml.xml.XMLObject;
import org.opensaml.xml.util.XMLObjectChildrenList;
/**
* Implementation of the {@link org.opensaml.saml1.core.Response} Object
*/
public class ResponseImpl extends ResponseAbstractTypeImpl implements Response {
/** Status associated with this element */
private Status status = null;
/** List of all the Assertions */
private final XMLObjectChildrenList<Assertion> assertions;
/**
* Constructor
*
* @param namespaceURI the namespace the element is in
* @param elementLocalName the local name of the XML element this Object represents
* @param namespacePrefix the prefix for the given namespace
*/
protected ResponseImpl(String namespaceURI, String elementLocalName, String namespacePrefix) {
super(namespaceURI, elementLocalName, namespacePrefix);
assertions = new XMLObjectChildrenList<Assertion>(this);
}
/** {@inheritDoc} */
public List<Assertion> getAssertions() {
return assertions;
}
/** {@inheritDoc} */
public Status getStatus() {
return status;
}
/** {@inheritDoc} */
public void setStatus(Status status) throws IllegalArgumentException {
this.status = prepareForAssignment(this.status, status);
}
/** {@inheritDoc} */
public List<XMLObject> getOrderedChildren() {
ArrayList<XMLObject> children = new ArrayList<XMLObject>(1 + assertions.size());
if (super.getOrderedChildren() != null) {
children.addAll(super.getOrderedChildren());
}
if (status != null) {
children.add(status);
}
children.addAll(assertions);
if (children.size() == 0) {
return null;
}
return Collections.unmodifiableList(children);
}
} | {
"content_hash": "c5343b5d19331153bc5b92ff0a8e51ff",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 98,
"avg_line_length": 28.26027397260274,
"alnum_prop": 0.6732913233155599,
"repo_name": "Safewhere/kombit-service-java",
"id": "e3b28af41f7395a0e12a9e916e1691e86ef6a921",
"size": "2907",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "OpenSaml/src/org/opensaml/saml1/core/impl/ResponseImpl.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "73985"
},
{
"name": "Java",
"bytes": "5965174"
}
],
"symlink_target": ""
} |
This application makes use of the following third party libraries:
## AFNetworking
Copyright (c) 2011-2016 Alamofire Software Foundation (http://alamofire.org/)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
## MBProgressHUD
Copyright © 2009-2016 Matej Bukovinski
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
## Masonry
Copyright (c) 2011-2012 Masonry Team - https://github.com/Masonry
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
## SDWebImage
Copyright (c) 2009-2018 Olivier Poitrey rs@dailymotion.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is furnished
to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
Generated by CocoaPods - https://cocoapods.org
| {
"content_hash": "c11a7b8c6c3ed4272fa3b0ed67b90a99",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 79,
"avg_line_length": 48.645161290322584,
"alnum_prop": 0.8023872679045093,
"repo_name": "XLsn0w/XLsn0w",
"id": "1fb6ffdc3effc12e393bfd42dcf37fa1149eb902",
"size": "4544",
"binary": false,
"copies": "1",
"ref": "refs/heads/XLsn0w",
"path": "Pods/Target Support Files/Pods-XLsn0w/Pods-XLsn0w-acknowledgements.markdown",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "477083"
},
{
"name": "Ruby",
"bytes": "833"
}
],
"symlink_target": ""
} |
<?php
namespace Skilla\TwigMarkdownBundle\Libs;
interface TwigMarkdownInterface {
/**
* Main function. Performs some preprocessing on the input text
* and pass it through the document gamut.
*/
public function transform($text);
}
| {
"content_hash": "00082d01a79d3b399ec1e7a4514691c2",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 67,
"avg_line_length": 21.333333333333332,
"alnum_prop": 0.703125,
"repo_name": "skilla/TwigMarkdownBundle",
"id": "dac0a5999fa391233887309cde0bfac2bfbd4a6d",
"size": "1235",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Libs/TwigMarkdownInterface.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "26057"
}
],
"symlink_target": ""
} |
import Control.Exception (assert)
-- The following code calls a naive algorithm for computing a Fibonacci number.
--
-- What to do:
-- 1. Compile the following code and run it on an input "40" to check that it is slow.
-- You may also want to submit it to the grader to ensure that it gets the "time limit exceeded" message.
-- 2. Implement the fibonacci_fast procedure.
-- 3. Remove the line that prints the result of the naive algorithm, comment the lines reading the input,
-- uncomment the line with a call to test_solution, compile the program, and run it.
-- This will ensure that your efficient algorithm returns the same as the naive one for small values of n.
-- 4. If test_solution reveals a bug in your implementation, debug it, fix it, and repeat step 3.
-- 5. Remove the call to test_solution, uncomment the line with a call to fibonacci_fast (and the lines reading the input),
-- and submit it to the grader.
fibonacci_naive :: Int -> Int
fibonacci_naive 0 = 0
fibonacci_naive 1 = 1
fibonacci_naive n = fibonacci_naive (n - 1) + fibonacci_naive (n - 2)
fibonacci_fast :: Int -> Int
fibonacci_fast n = 0 -- write your code here
test_solution :: IO ()
test_solution = and tests `seq` return ()
where
tests =
[ assert (fibonacci_fast 3 == 2) True
, assert (fibonacci_fast 10 == 55) True
] ++ map (\i -> assert (fibonacci_fast i == fibonacci_naive i) True) [0..20]
main :: IO ()
main = do
[w] <- fmap words getLine
let n = read w
print $ fibonacci_naive n
--test_solution
--print $ fibonacci_fast n
| {
"content_hash": "49d4f7fc75861b88784455f2a3bc2614",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 123,
"avg_line_length": 40.1025641025641,
"alnum_prop": 0.6937340153452686,
"repo_name": "xunilrj/sandbox",
"id": "ef872c649b2fc1ef76b48bcb449677ea685634a3",
"size": "1585",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "courses/coursera-sandiego-algorithms/algorithmic-toolbox/assignment001/fibonacci/fibonacci.hs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "235"
},
{
"name": "ASP.NET",
"bytes": "110"
},
{
"name": "Assembly",
"bytes": "28409"
},
{
"name": "Asymptote",
"bytes": "22978"
},
{
"name": "C",
"bytes": "1022035"
},
{
"name": "C#",
"bytes": "474510"
},
{
"name": "C++",
"bytes": "33387716"
},
{
"name": "CMake",
"bytes": "1288737"
},
{
"name": "CSS",
"bytes": "49690"
},
{
"name": "Common Lisp",
"bytes": "858"
},
{
"name": "Coq",
"bytes": "6200"
},
{
"name": "Dockerfile",
"bytes": "2912"
},
{
"name": "Elixir",
"bytes": "34"
},
{
"name": "Erlang",
"bytes": "8204"
},
{
"name": "F#",
"bytes": "33187"
},
{
"name": "Fortran",
"bytes": "20472"
},
{
"name": "GDB",
"bytes": "701"
},
{
"name": "GLSL",
"bytes": "7478"
},
{
"name": "Go",
"bytes": "8971"
},
{
"name": "HTML",
"bytes": "6469462"
},
{
"name": "Handlebars",
"bytes": "8236"
},
{
"name": "Haskell",
"bytes": "18581"
},
{
"name": "Java",
"bytes": "120539"
},
{
"name": "JavaScript",
"bytes": "5055335"
},
{
"name": "Jupyter Notebook",
"bytes": "1849172"
},
{
"name": "LLVM",
"bytes": "43431"
},
{
"name": "MATLAB",
"bytes": "462980"
},
{
"name": "Makefile",
"bytes": "1622666"
},
{
"name": "Objective-C",
"bytes": "2001"
},
{
"name": "PostScript",
"bytes": "45490"
},
{
"name": "PowerShell",
"bytes": "192867"
},
{
"name": "Python",
"bytes": "726138"
},
{
"name": "R",
"bytes": "31364"
},
{
"name": "Roff",
"bytes": "5700"
},
{
"name": "Ruby",
"bytes": "5865"
},
{
"name": "Rust",
"bytes": "797104"
},
{
"name": "Sage",
"bytes": "654"
},
{
"name": "Scala",
"bytes": "42383"
},
{
"name": "Shell",
"bytes": "154039"
},
{
"name": "TLA",
"bytes": "16779"
},
{
"name": "TSQL",
"bytes": "3412"
},
{
"name": "TeX",
"bytes": "6989202"
},
{
"name": "TypeScript",
"bytes": "8845"
},
{
"name": "Visual Basic .NET",
"bytes": "1090"
},
{
"name": "WebAssembly",
"bytes": "70321"
},
{
"name": "q",
"bytes": "13889"
}
],
"symlink_target": ""
} |
<!doctype html>
<html lang="en">
<%- include('/components/head.html', {title:'Project starter kit'}) %>
<body>
<!-- We need 2 elements app and wrapper to make footer sticky in ie -->
<div class="app">
<div class="wrapper">
<%- include('/components/header/header.html') %>
<main class="body">
<div class="inner">
app
</div>
</main>
<%- include('/components/footer/footer.html') %>
</div>
</div>
<%- include('/components/scripts.html') %>
</body>
</html>
| {
"content_hash": "2cf279a6cbfb0b73f18f257554d6e01c",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 75,
"avg_line_length": 26.80952380952381,
"alnum_prop": 0.5186500888099467,
"repo_name": "Nejik/nj-starter-kit",
"id": "c27fab870e687e3768c38f20a05fec2910039155",
"size": "563",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/index.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6711"
},
{
"name": "HTML",
"bytes": "1199"
},
{
"name": "JavaScript",
"bytes": "11387"
}
],
"symlink_target": ""
} |
using System.Collections;
using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using System.Reflection;
using System.Web.Http.Routing;
using AllGreen.WebServer.Owin;
using FluentAssertions;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Moq;
namespace AllGreen.WebServer.Owin.Tests
{
[TestClass]
public class SignalRDependencyResolverTests
{
private TinyIoC.TinyIoCContainer _IoCContainer;
private SignalRDependencyResolver _SignalRDependencyResolver;
[TestInitialize]
public void Setup()
{
_IoCContainer = new TinyIoC.TinyIoCContainer();
_SignalRDependencyResolver = new SignalRDependencyResolver(_IoCContainer);
}
[TestMethod]
public void GetServiceTest()
{
_SignalRDependencyResolver.GetService(typeof(IEnumerable)).Should().BeNull();
IEnumerable dummy = Mock.Of<IEnumerable>();
_IoCContainer.Register<IEnumerable>(dummy);
_SignalRDependencyResolver.GetService(typeof(IEnumerable)).Should().Be(dummy);
}
[TestMethod]
public void GetServicesTest()
{
_SignalRDependencyResolver.GetServices(typeof(IEnumerable)).Should().BeNull();
IEnumerable dummy = Mock.Of<IEnumerable>();
_IoCContainer.Register<IEnumerable>(dummy);
_SignalRDependencyResolver.GetServices(typeof(IEnumerable)).ShouldAllBeEquivalentTo(new object[] { dummy });
}
}
}
| {
"content_hash": "24905885f6ef85aa2070ef9c55e432c8",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 120,
"avg_line_length": 33.361702127659576,
"alnum_prop": 0.6613520408163265,
"repo_name": "gstamac/AllGreen",
"id": "9b2411bbba942ca65a0699fe38cbd3008dffb1cc",
"size": "1568",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/AllGreen.WebServer.Owin.Tests/SignalRDependencyResolverTests.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "432243"
},
{
"name": "CSS",
"bytes": "12282"
},
{
"name": "HTML",
"bytes": "75891"
},
{
"name": "JavaScript",
"bytes": "228092"
},
{
"name": "Smalltalk",
"bytes": "3"
},
{
"name": "TypeScript",
"bytes": "59433"
}
],
"symlink_target": ""
} |
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.github.tgits</groupId>
<artifactId>basic-algorithmic-workouts</artifactId>
<packaging>jar</packaging>
<version>1.0-SNAPSHOT</version>
<name>basic-algorithmic-workouts</name>
<url>http://maven.apache.org</url>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.13.1</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>
| {
"content_hash": "3aa110004c8e3424d71c9413e5e36c64",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 104,
"avg_line_length": 36.44444444444444,
"alnum_prop": 0.7240853658536586,
"repo_name": "TGITS/programming-workouts",
"id": "f186f39397ec59b0a6b7ddd0e57f5e88d92c5eb9",
"size": "656",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "java/kata/basic-algorithmic-workouts/pom.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "244"
},
{
"name": "C#",
"bytes": "175"
},
{
"name": "CSS",
"bytes": "57544"
},
{
"name": "Clojure",
"bytes": "145363"
},
{
"name": "D",
"bytes": "5141"
},
{
"name": "Dart",
"bytes": "80832"
},
{
"name": "Dockerfile",
"bytes": "811"
},
{
"name": "Elixir",
"bytes": "86418"
},
{
"name": "Elm",
"bytes": "2738"
},
{
"name": "F#",
"bytes": "4142"
},
{
"name": "Gherkin",
"bytes": "503"
},
{
"name": "Gnuplot",
"bytes": "2363"
},
{
"name": "Go",
"bytes": "65562"
},
{
"name": "Groovy",
"bytes": "2457"
},
{
"name": "HTML",
"bytes": "1536579"
},
{
"name": "Haskell",
"bytes": "157"
},
{
"name": "Java",
"bytes": "744052"
},
{
"name": "JavaScript",
"bytes": "79838"
},
{
"name": "Jinja",
"bytes": "362"
},
{
"name": "Julia",
"bytes": "1365"
},
{
"name": "Kotlin",
"bytes": "53565"
},
{
"name": "Lua",
"bytes": "3980"
},
{
"name": "PHP",
"bytes": "264599"
},
{
"name": "Pascal",
"bytes": "2952"
},
{
"name": "Perl",
"bytes": "927"
},
{
"name": "PowerShell",
"bytes": "397"
},
{
"name": "Prolog",
"bytes": "574"
},
{
"name": "Pug",
"bytes": "550"
},
{
"name": "Python",
"bytes": "550192"
},
{
"name": "R",
"bytes": "19071"
},
{
"name": "Raku",
"bytes": "5189"
},
{
"name": "Ruby",
"bytes": "27911"
},
{
"name": "Rust",
"bytes": "71504"
},
{
"name": "Scala",
"bytes": "136475"
},
{
"name": "Shell",
"bytes": "9158"
},
{
"name": "TypeScript",
"bytes": "64644"
}
],
"symlink_target": ""
} |
<?php
namespace Oro\Bundle\EntityBundle\Tests\Unit\ORM;
use Doctrine\ORM\Mapping\ClassMetadataFactory;
use Doctrine\ORM\Proxy\Proxy;
use Doctrine\ORM\Query;
use Oro\Bundle\EntityBundle\ORM\OrmConfiguration;
use Oro\Bundle\EntityBundle\ORM\OroEntityManager;
use Oro\Bundle\EntityBundle\ORM\Registry;
use Oro\Bundle\EntityBundle\Tests\Unit\ORM\Fixtures\TestEntity;
use Oro\Bundle\EntityBundle\Tests\Unit\ORM\Stub\OroEntityManagerStub;
use Symfony\Component\DependencyInjection\ContainerInterface;
class RegistryTest extends \PHPUnit\Framework\TestCase
{
const TEST_NAMESPACE_ALIAS = 'Test';
const TEST_NAMESPACE = 'Oro\Bundle\EntityBundle\Tests\Unit\ORM\Fixtures';
const TEST_ENTITY_CLASS = TestEntity::class;
const TEST_ENTITY_PROXY_CLASS = Proxy::class;
/** @var ContainerInterface|\PHPUnit\Framework\MockObject\MockObject */
private $container;
/** @var Registry */
private $registry;
protected function setUp()
{
$this->container = $this->createMock(ContainerInterface::class);
$this->registry = new Registry(
$this->container,
[''],
['default' => 'service.default'],
'',
'default'
);
}
/**
* @return \PHPUnit\Framework\MockObject\MockObject|OroEntityManager
*/
private function getManagerMock()
{
$managerConfiguration = new OrmConfiguration();
$managerConfiguration->addEntityNamespace(self::TEST_NAMESPACE_ALIAS, self::TEST_NAMESPACE);
$managerMetadataFactory = $this->createMock(ClassMetadataFactory::class);
$managerMetadataFactory->expects(self::any())
->method('isTransient')
->willReturn(false);
$manager = $this->getMockBuilder(OroEntityManagerStub::class)
->disableOriginalConstructor()
->setMethods(['getConfiguration', 'getMetadataFactory'])
->getMock();
$manager->expects(self::any())
->method('getConfiguration')
->willReturn($managerConfiguration);
$manager->expects(self::any())
->method('getMetadataFactory')
->willReturn($managerMetadataFactory);
return $manager;
}
public function testManagerServiceCache()
{
$manager1 = $this->getManagerMock();
$manager2 = $this->getManagerMock();
$this->container->expects(self::exactly(3))
->method('get')
->with('service.default')
->will(self::onConsecutiveCalls($manager1, $manager1, $manager2));
$this->container->expects(self::once())
->method('initialized')
->willReturnMap([['service.default', true]]);
self::assertSame($manager1, $this->registry->getManager('default'));
// test that a manager service cached
self::assertSame($manager1, $this->registry->getManager('default'));
self::assertSame($manager2, $this->registry->resetManager('default'));
self::assertSame($manager2, $this->registry->getManager('default'));
// test that a manager cached
self::assertSame($manager2, $this->registry->getManager('default'));
}
public function testManagerCache()
{
$manager1 = $this->getManagerMock();
$manager2 = $this->getManagerMock();
$this->container->expects(self::exactly(3))
->method('get')
->with('service.default')
->will(self::onConsecutiveCalls($manager1, $manager1, $manager2));
$this->container->expects(self::once())
->method('initialized')
->willReturnMap([['service.default', true]]);
self::assertSame($manager1, $this->registry->getManagerForClass(self::TEST_ENTITY_CLASS));
// test that a manager cached
self::assertSame($manager1, $this->registry->getManagerForClass(self::TEST_ENTITY_CLASS));
self::assertSame($manager2, $this->registry->resetManager());
self::assertSame($manager2, $this->registry->getManagerForClass(self::TEST_ENTITY_CLASS));
// test that a manager cached
self::assertSame($manager2, $this->registry->getManagerForClass(self::TEST_ENTITY_CLASS));
}
public function testManagerCacheWhenEntityManagerDoesNotExist()
{
$this->container->expects(self::once())
->method('get')
->with('service.default')
->willReturn(null);
$this->container->expects(self::once())
->method('initialized')
->willReturnMap([['service.default', false]]);
self::assertNull($this->registry->getManagerForClass(self::TEST_ENTITY_PROXY_CLASS));
// test that a manager cached
self::assertNull($this->registry->getManagerForClass(self::TEST_ENTITY_PROXY_CLASS));
self::assertNull($this->registry->resetManager());
self::assertNull($this->registry->getManagerForClass(self::TEST_ENTITY_PROXY_CLASS));
// test that a manager cached
self::assertNull($this->registry->getManagerForClass(self::TEST_ENTITY_PROXY_CLASS));
}
public function testDefaultQueryCacheLifetimeWhenItWasSpecifiedExplicitly()
{
$defaultQueryCacheLifetime = 3600;
$manager = $this->getManagerMock();
$this->container->expects(self::atLeastOnce())
->method('get')
->with('service.default')
->willReturn($manager);
$this->registry->setDefaultQueryCacheLifetime($defaultQueryCacheLifetime);
/** @var Query $query */
$query = $this->registry->getManager('default')
->createQuery('SELECT * FROM ' . self::TEST_ENTITY_CLASS);
self::assertSame($defaultQueryCacheLifetime, $query->getQueryCacheLifetime());
}
public function testDefaultQueryCacheLifetimeWhenItWasNotSpecified()
{
$manager = $this->getManagerMock();
$this->container->expects(self::atLeastOnce())
->method('get')
->with('service.default')
->willReturn($manager);
/** @var Query $query */
$query = $this->registry->getManager('default')
->createQuery('SELECT * FROM ' . self::TEST_ENTITY_CLASS);
self::assertNull($query->getQueryCacheLifetime());
}
public function testDefaultQueryCacheLifetimeWhenItWasSetToZero()
{
$manager = $this->getManagerMock();
$this->container->expects(self::atLeastOnce())
->method('get')
->with('service.default')
->willReturn($manager);
$this->registry->setDefaultQueryCacheLifetime(0);
/** @var Query $query */
$query = $this->registry->getManager('default')
->createQuery('SELECT * FROM ' . self::TEST_ENTITY_CLASS);
self::assertSame(0, $query->getQueryCacheLifetime());
}
public function testGetAliasNamespaceForKnownAlias()
{
$manager1 = $this->getManagerMock();
$this->container->expects(self::once())
->method('get')
->with('service.default')
->willReturn($manager1);
self::assertEquals(
self::TEST_NAMESPACE,
$this->registry->getAliasNamespace(self::TEST_NAMESPACE_ALIAS)
);
}
/**
* @expectedException \Doctrine\ORM\ORMException
*/
public function testGetAliasNamespaceForUnknownAlias()
{
$manager1 = $this->getManagerMock();
$this->container->expects(self::once())
->method('get')
->with('service.default')
->willReturn($manager1);
$this->registry->getAliasNamespace('Another');
}
}
| {
"content_hash": "1938bb0947296179fabde1b10b83a772",
"timestamp": "",
"source": "github",
"line_count": 217,
"max_line_length": 100,
"avg_line_length": 35.35944700460829,
"alnum_prop": 0.6215300404014076,
"repo_name": "orocrm/platform",
"id": "cf8c7f41853750477e0475d2955aa8a339338bb3",
"size": "7673",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Oro/Bundle/EntityBundle/Tests/Unit/ORM/RegistryTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "618485"
},
{
"name": "Gherkin",
"bytes": "158217"
},
{
"name": "HTML",
"bytes": "1648915"
},
{
"name": "JavaScript",
"bytes": "3326127"
},
{
"name": "PHP",
"bytes": "37828618"
}
],
"symlink_target": ""
} |
package org.zalando.compass.revision.infrastructure.database;
import lombok.AllArgsConstructor;
import org.jooq.DSLContext;
import org.jooq.Record;
import org.jooq.SelectConditionStep;
import org.jooq.SortOrder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import org.zalando.compass.core.domain.model.Key;
import org.zalando.compass.core.domain.model.Revision;
import org.zalando.compass.core.infrastructure.database.model.enums.RevisionType;
import org.zalando.compass.library.pagination.Pagination;
import org.zalando.compass.revision.domain.model.KeyRevision;
import org.zalando.compass.revision.domain.spi.repository.KeyRevisionRepository;
import java.util.List;
import java.util.Optional;
import static java.time.ZoneOffset.UTC;
import static java.util.Arrays.asList;
import static org.jooq.impl.DSL.exists;
import static org.jooq.impl.DSL.max;
import static org.jooq.impl.DSL.select;
import static org.jooq.impl.DSL.selectOne;
import static org.zalando.compass.core.infrastructure.database.model.Tables.KEY_REVISION;
import static org.zalando.compass.core.infrastructure.database.model.Tables.REVISION;
@Repository
@AllArgsConstructor(onConstructor = @__(@Autowired))
class JooqKeyRevisionRepository implements KeyRevisionRepository {
private static final org.zalando.compass.core.infrastructure.database.model.tables.KeyRevision SELF = KEY_REVISION.as("self");
private final DSLContext db;
@Override
public void create(final KeyRevision key) {
db.insertInto(KEY_REVISION)
.columns(KEY_REVISION.ID,
KEY_REVISION.REVISION,
KEY_REVISION.REVISION_TYPE,
KEY_REVISION.SCHEMA,
KEY_REVISION.DESCRIPTION)
.values(key.getId(),
key.getRevision().getId(),
key.getRevision().getType(),
key.getSchema(),
key.getDescription())
.execute();
}
@Override
public List<Revision> findPageRevisions(final Pagination<Long> query) {
final var where = db.select(REVISION.fields())
.from(REVISION)
.where(exists(selectOne()
.from(KEY_REVISION)
.where(KEY_REVISION.REVISION.eq(REVISION.ID))));
return query.seek(where, REVISION.ID, SortOrder.DESC)
.fetch().map(this::mapRevisionWithoutType);
}
@Override
public List<Key> findPage(final long revisionId, final Pagination<String> query) {
return query.seek(db.select(asList(
KEY_REVISION.ID,
KEY_REVISION.SCHEMA,
KEY_REVISION.DESCRIPTION))
.from(KEY_REVISION)
.where(KEY_REVISION.REVISION_TYPE.ne(RevisionType.DELETE))
.and(KEY_REVISION.REVISION.eq(select(max(SELF.REVISION))
.from(SELF)
.where(SELF.ID.eq(KEY_REVISION.ID))
.and(SELF.REVISION.le(revisionId)))), KEY_REVISION.ID, SortOrder.ASC)
.fetchInto(Key.class);
}
@Override
public List<Revision> findRevisions(final String id, final Pagination<Long> query) {
return query.seek(db.select(REVISION.fields())
.select(KEY_REVISION.REVISION_TYPE)
.from(REVISION)
.join(KEY_REVISION).on(KEY_REVISION.REVISION.eq(REVISION.ID))
.where(KEY_REVISION.ID.eq(id)), REVISION.ID, SortOrder.DESC)
.fetch().map(this::mapRevisionWithType);
}
@Override
public Optional<KeyRevision> find(final String id, final long revision) {
return db.select(KEY_REVISION.fields())
.select(REVISION.fields())
.from(KEY_REVISION)
.join(REVISION).on(REVISION.ID.eq(KEY_REVISION.REVISION))
.where(KEY_REVISION.ID.eq(id))
.and(REVISION.ID.eq(revision))
.fetchOptional()
.map(this::mapKey);
}
private KeyRevision mapKey(final Record record) {
return new KeyRevision(
record.get(KEY_REVISION.ID),
mapRevisionWithType(record),
record.get(KEY_REVISION.SCHEMA),
record.get(KEY_REVISION.DESCRIPTION)
);
}
private Revision mapRevisionWithType(final Record record) {
return new Revision(
record.get(REVISION.ID),
record.get(REVISION.TIMESTAMP).withOffsetSameInstant(UTC),
record.get(KEY_REVISION.REVISION_TYPE),
record.get(REVISION.USER),
record.get(REVISION.COMMENT)
);
}
private Revision mapRevisionWithoutType(final Record record) {
return new Revision(
record.get(REVISION.ID),
record.get(REVISION.TIMESTAMP).withOffsetSameInstant(UTC),
null,
record.get(REVISION.USER),
record.get(REVISION.COMMENT)
);
}
}
| {
"content_hash": "83c8241da4e61c781c603e4d8e9becad",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 130,
"avg_line_length": 39.35114503816794,
"alnum_prop": 0.6234723569350146,
"repo_name": "whiskeysierra/compass",
"id": "e5ebcd932a08e02baaf9dd171dfd52f89b1b5f85",
"size": "5155",
"binary": false,
"copies": "1",
"ref": "refs/heads/dependabot/maven/org.flywaydb-flyway-maven-plugin-7.12.0",
"path": "src/main/java/org/zalando/compass/revision/infrastructure/database/JooqKeyRevisionRepository.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "239"
},
{
"name": "Gherkin",
"bytes": "145160"
},
{
"name": "Java",
"bytes": "318543"
},
{
"name": "Shell",
"bytes": "16407"
}
],
"symlink_target": ""
} |
<?xml version="1.0" ?><!DOCTYPE TS><TS language="eu_ES" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Onecoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+39"/>
<source><b>Onecoin</b> version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2012-2014 The NovaCoin developers
Copyright © 2014 The Onecoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or <a href="http://www.opensource.org/licenses/mit-license.php">http://www.opensource.org/licenses/mit-license.php</a>.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (<a href="https://www.openssl.org/">https://www.openssl.org/</a>) and cryptographic software written by Eric Young (<a href="mailto:eay@cryptsoft.com">eay@cryptsoft.com</a>) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation>Klik bikoitza helbidea edo etiketa editatzeko</translation>
</message>
<message>
<location line="+24"/>
<source>Create a new address</source>
<translation>Sortu helbide berria</translation>
</message>
<message>
<location line="+10"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Kopiatu hautatutako helbidea sistemaren arbelera</translation>
</message>
<message>
<location line="-7"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-43"/>
<source>These are your Onecoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+53"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Sign a message to prove you own a Onecoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-10"/>
<source>Verify a message to ensure it was signed with a specified Onecoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Delete</source>
<translation>&Ezabatu</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+66"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+248"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Komaz bereizitako artxiboa (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+145"/>
<source>Label</source>
<translation>Etiketa</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Helbidea</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(etiketarik ez)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Sartu pasahitza</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Pasahitz berria</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Errepikatu pasahitz berria</translation>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+38"/>
<source>Encrypt wallet</source>
<translation>Enkriptatu zorroa</translation>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Eragiketa honek zorroaren pasahitza behar du zorroa desblokeatzeko.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Desblokeatu zorroa</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Eragiketa honek zure zorroaren pasahitza behar du, zorroa desenkriptatzeko.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Desenkriptatu zorroa</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Aldatu pasahitza</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Sartu zorroaren pasahitz zaharra eta berria.</translation>
</message>
<message>
<location line="+45"/>
<source>Confirm wallet encryption</source>
<translation>Berretsi zorroaren enkriptazioa</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation>Zorroa enkriptatuta</translation>
</message>
<message>
<location line="-140"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>ten or more random characters</b>, or <b>eight or more words</b>.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<source>Onecoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Zorroaren enkriptazioak huts egin du</translation>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Zorroaren enkriptazioak huts egin du barne-errore baten ondorioz. Zure zorroa ez da enkriptatu.</translation>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation>Eman dituzun pasahitzak ez datoz bat.</translation>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation>Zorroaren desblokeoak huts egin du</translation>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Zorroa desenkriptatzeko sartutako pasahitza okerra da.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Zorroaren desenkriptazioak huts egin du</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+297"/>
<source>Sign &message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Show general overview of wallet</source>
<translation>Ikusi zorroaren begirada orokorra</translation>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation>&Transakzioak</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Ikusi transakzioen historia</translation>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-18"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>E&xit</source>
<translation>Irten</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Irten aplikaziotik</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about Onecoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>&Qt-ari buruz</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Erakutsi Bitcoin-i buruzko informazioa</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Aukerak...</translation>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Backup Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Export...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-55"/>
<source>Send coins to a Onecoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+39"/>
<source>Modify configuration options for Onecoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-13"/>
<source>Encrypt or decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Aldatu zorroa enkriptatzeko erabilitako pasahitza</translation>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-214"/>
<location line="+555"/>
<source>Onecoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-555"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+193"/>
<source>&About Onecoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>&File</source>
<translation>&Artxiboa</translation>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation>&Ezarpenak</translation>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation>&Laguntza</translation>
</message>
<message>
<location line="+17"/>
<source>Tabs toolbar</source>
<translation>Fitxen tresna-barra</translation>
</message>
<message>
<location line="+46"/>
<location line="+9"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+0"/>
<location line="+58"/>
<source>Onecoin client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+70"/>
<source>%n active connection(s) to Onecoin network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+488"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-812"/>
<source>&Dashboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>&Receive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>&Send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+277"/>
<source>Up to date</source>
<translation>Egunean</translation>
</message>
<message>
<location line="+43"/>
<source>Catching up...</source>
<translation>Eguneratzen...</translation>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation>Bidalitako transakzioa</translation>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation>Sarrerako transakzioa</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid Onecoin address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Wallet is <b>not encrypted</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Zorroa <b>enkriptatuta</b> eta <b>desblokeatuta</b> dago une honetan</translation>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Zorroa <b>enkriptatuta</b> eta <b>blokeatuta</b> dago une honetan</translation>
</message>
<message>
<location line="+24"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+91"/>
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="-429"/>
<location line="+433"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="-456"/>
<source>Processed %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+27"/>
<location line="+433"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="-429"/>
<location line="+6"/>
<source>%n week(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+0"/>
<source>%1 and %2</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+0"/>
<source>%n year(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>%1 behind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Last received block was generated %1 ago.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+69"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+324"/>
<source>Not staking</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+104"/>
<source>A fatal error occurred. Onecoin can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+110"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation>Kopurua</translation>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+537"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation>Kopurua</translation>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation>Helbidea</translation>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-500"/>
<source>Copy address</source>
<translation>Kopiatu helbidea</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Kopiatu etiketa</translation>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+140"/>
<source>DUST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<location line="+66"/>
<source>(no label)</source>
<translation>(etiketarik ez)</translation>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Editatu helbidea</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Etiketa</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Helbidea</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation>Jasotzeko helbide berria</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Bidaltzeko helbide berria</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Editatu jasotzeko helbidea</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Editatu bidaltzeko helbidea</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>Sartu berri den helbidea, "%1", helbide-liburuan dago jadanik.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid Onecoin address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Ezin desblokeatu zorroa.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>Gako berriaren sorrerak huts egin du.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+426"/>
<location line="+12"/>
<source>Onecoin-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Aukerak</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start Onecoin after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start Onecoin on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the Onecoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-57"/>
<source>Connect to the Onecoin network through a SOCKS5 proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS5 proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+90"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Onecoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Whether to select the coin outputs randomly or with minimal coin age.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Minimize weight consumption (experimental)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Use black visual theme (requires restart)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+47"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+148"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Onecoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Inprimakia</translation>
</message>
<message>
<location line="+46"/>
<location line="+247"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Onecoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-173"/>
<source>Stake:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-113"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+80"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Total:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source><b>Recent transactions</b></source>
<translation><b>Azken transakzioak</b></translation>
</message>
<message>
<location line="-118"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-32"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start onecoin: click-to-pay handler</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-194"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+197"/>
<source>&Network Traffic</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Clear</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Totals</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+64"/>
<source>In:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+80"/>
<source>Out:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-383"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the Onecoin-Qt help message to get a list with possible Onecoin command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-237"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>Onecoin - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Onecoin Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+256"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the Onecoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="+325"/>
<source>Welcome to the Onecoin RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+127"/>
<source>%1 B</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1 KB</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1 MB</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1 GB</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>%1 m</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>%1 h</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1 h %2 m</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Bidali txanponak</translation>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation>Kopurua</translation>
</message>
<message>
<location line="+35"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation>Bidali hainbat jasotzaileri batera</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Balance:</source>
<translation>Saldoa:</translation>
</message>
<message>
<location line="+47"/>
<source>Confirm the send action</source>
<translation>Berretsi bidaltzeko ekintza</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-174"/>
<source>Enter a Onecoin address (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+87"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Berretsi txanponak bidaltzea</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>Ordaintzeko kopurua 0 baino handiagoa izan behar du.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+247"/>
<source>WARNING: Invalid Onecoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation>(etiketarik ez)</translation>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>K&opurua:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>Ordaindu &honi:</translation>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Sartu etiketa bat helbide honetarako, eta gehitu zure helbide-liburuan</translation>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation>&Etiketa:</translation>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Itsatsi helbidea arbeletik</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Onecoin address (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation>Itsatsi helbidea arbeletik</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this Onecoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Onecoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Onecoin address (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter Onecoin signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+85"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TrafficGraphWidget</name>
<message>
<location filename="../trafficgraphwidget.cpp" line="+75"/>
<source>KB/s</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+25"/>
<source>Open until %1</source>
<translation>Zabalik %1 arte</translation>
</message>
<message>
<location line="+6"/>
<source>conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/konfirmatu gabe</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 konfirmazioak</translation>
</message>
<message>
<location line="+17"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+13"/>
<source>From</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+19"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 250 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Amount</source>
<translation>Kopurua</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-202"/>
<source>, has not been successfully broadcast yet</source>
<translation>, ez da arrakastaz emititu oraindik</translation>
</message>
<message numerus="yes">
<location line="-36"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+67"/>
<source>unknown</source>
<translation>ezezaguna</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Transakzioaren xehetasunak</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Panel honek transakzioaren deskribapen xehea erakusten du</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+231"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Mota</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Helbidea</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Kopurua</translation>
</message>
<message>
<location line="+52"/>
<source>Open until %1</source>
<translation>Zabalik %1 arte</translation>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Konfirmatuta (%1 konfirmazio)</translation>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Bloke hau ez du beste inongo nodorik jaso, eta seguruenik ez da onartuko!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Sortua, baina ez onartua</translation>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation>Jasoa honekin: </translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Honi bidalia: </translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Ordainketa zeure buruari</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Bildua</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+194"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Transakzioaren egoera. Pasatu sagua gainetik konfirmazio kopurua ikusteko.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Transakzioa jasotako data eta ordua.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Transakzio mota.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Transakzioaren xede-helbidea.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Saldoan kendu edo gehitutako kopurua.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+54"/>
<location line="+17"/>
<source>All</source>
<translation>Denak</translation>
</message>
<message>
<location line="-16"/>
<source>Today</source>
<translation>Gaur</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Aste honetan</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Hil honetan</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Azken hilean</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Aurten</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Muga...</translation>
</message>
<message>
<location line="+12"/>
<source>Received with</source>
<translation>Jasota honekin: </translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Hona bidalia: </translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Zeure buruari</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Bildua</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Beste</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Sartu bilatzeko helbide edo etiketa</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Kopuru minimoa</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Kopiatu helbidea</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Kopiatu etiketa</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+138"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Komaz bereizitako artxiboa (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Mota</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Etiketa</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Helbidea</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Kopurua</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+212"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+171"/>
<source>Onecoin version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or onecoind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation>Komandoen lista</translation>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation>Laguntza komando batean</translation>
</message>
<message>
<location line="-145"/>
<source>Options:</source>
<translation>Aukerak</translation>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: onecoin.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: onecoind.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=onecoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Onecoin Alert" admin@foo.com
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Listen for connections on <port> (default: 15714 or testnet: 25714)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Query for peer addresses via DNS lookup, if low on addresses (default: 1 unless -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Always query for peer addresses via DNS lookup (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-35"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+62"/>
<source>Listen for JSON-RPC connections on <port> (default: 15715 or testnet: 25715)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-23"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-28"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+93"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-103"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Onecoin will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+130"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-34"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-67"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+101"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-89"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+30"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-34"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-41"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+54"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Error: Transaction creation failed!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-52"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-59"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-47"/>
<source>Connect through SOCKS5 proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Output debugging information (default: 0, supplying <category> is optional)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>If <category> is not supplied, output all debugging information.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source><category> can be:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly. This is intended for regression testing tools and app development.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wait for RPC server to start</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Keep at most <n> MiB of unconnectable blocks in memory (default: %u)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Unsupported argument -socks found. Setting SOCKS version isn't possible anymore, only SOCKS5 proxies are supported.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Initialization sanity check failed. Onecoin is shutting down.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Error loading block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Error: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-168"/>
<source>This help message</source>
<translation>Laguntza mezu hau</translation>
</message>
<message>
<location line="+104"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-129"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+125"/>
<source>Loading addresses...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-10"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of Onecoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart Onecoin to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-22"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+58"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-40"/>
<source>Loading block index...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-109"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+124"/>
<source>Unable to bind to %s on this computer. Onecoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-101"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<source>Minimize weight consumption (experimental) (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>How many blocks to check at startup (default: 500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Acceptable ciphers (default: TLSv1.2+HIGH:TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: Deprecated argument -debugnet ignored, use -debug=net</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. Onecoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error initializing wallet database environment %s!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Loading wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation>Birbilatzen...</translation>
</message>
<message>
<location line="+2"/>
<source>Done loading</source>
<translation>Zamaketa amaitua</translation>
</message>
<message>
<location line="-159"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+186"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-18"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS> | {
"content_hash": "ff8c3561a5f68b0921813b97abb4a055",
"timestamp": "",
"source": "github",
"line_count": 3309,
"max_line_length": 395,
"avg_line_length": 33.34723481414325,
"alnum_prop": 0.5898537328040889,
"repo_name": "theone-and-not-only-coin/onecoin",
"id": "4faa9cb4db8f9fcfd565155c4ece70c3e9d74ec8",
"size": "110349",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/qt/locale/bitcoin_eu_ES.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "51312"
},
{
"name": "C",
"bytes": "33564"
},
{
"name": "C++",
"bytes": "2493218"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "HTML",
"bytes": "50620"
},
{
"name": "Makefile",
"bytes": "155501"
},
{
"name": "NSIS",
"bytes": "6077"
},
{
"name": "Objective-C",
"bytes": "858"
},
{
"name": "Objective-C++",
"bytes": "3517"
},
{
"name": "Python",
"bytes": "54354"
},
{
"name": "QMake",
"bytes": "13665"
},
{
"name": "Roff",
"bytes": "12638"
},
{
"name": "Shell",
"bytes": "9076"
}
],
"symlink_target": ""
} |
Proyecto para dar soporte de autenticación usando el [autenticador dirdoc](https://sepa.utem.cl/autenticador-dirdoc-portal) en Laravel 5.
## Instalación
La libreria esta en [packagist](https://packagist.org/packages/utem/dirdoc-auth), usamos composer para instalarla:
~~~bash
composer require utem/dirdoc-auth
~~~
## Registrarlo en los providers
Se necesita registrar el paquete en laravel, para esto agregamos lo siguiente al fichero `config/app.php`:
~~~php
'providers' => [
...
'UTEM\Dirdoc\Auth\DirdocAuthServiceProvider',
...
~~~
## Crear la tabla de usuarios
El paquete utiliza una tabla de usuarios para mantener registro de los usuarios loggeados con el servicio REST.
Se incluye una migración para esto (La tabla debe tener `rut` como llave primaria).
~~~bash
php artisan vendor:publish --provider="UTEM\Dirdoc\Auth\DirdocAuthServiceProvider"
php artisan migrate
~~~
## Crear el modelo
Se usa un modelo Eloquent, este es mapeado a la tabla creada en el paso anterior, por supuesto que se incluye un modelo.
~~~bash
php artisan make:model --no-migration Models/Usuario
~~~
Ahora modificamos el modelo creado, para heredar de `\UTEM\Dirdoc\Auth\Models\DirdocWSUser`:
~~~php
<?php namespace App\Models;
class Usuario extends \UTEM\Dirdoc\Auth\Models\DirdocWSUser {
}
~~~
## Cambiar el driver de autenticación
Con el paquete registrado, procedemos a cambiar el driver de autenticacion y el modelo a usar, cambiamos `config/auth.php`:
~~~php
...
'driver' => 'dirdoc',
...
'model' => 'App\Models/Usuario',
...
~~~
## Configurar las credenciales del servicio
Debemos ingresar las credenciales del servicio, para esto agregamos lo siguiente a nuestro `.env`:
~~~bash
DIRDOC_REST_USERNAME=USUARIOENAUTENTICADOR
DIRDOC_REST_PASSWORD=PASSWORDENAUTENTICADOR
~~~
## Probando
Para hacer una prueba, usamos tinker:
~~~php
>>> Auth::attempt(['rut' => '12345678-5', 'password' => 'passworddirdoc])
~~~
Voila!.
| {
"content_hash": "9dfd6a2e1d5bbda2f194ba2604186a95",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 137,
"avg_line_length": 24.3125,
"alnum_prop": 0.7408740359897172,
"repo_name": "fcanalesS/ProyectoWeb",
"id": "97f3efb07fd9bb677c327be341fff8d4ba947c49",
"size": "1968",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vendor/utem/dirdoc-auth/README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "356"
},
{
"name": "CSS",
"bytes": "611752"
},
{
"name": "HTML",
"bytes": "1698308"
},
{
"name": "JavaScript",
"bytes": "2014564"
},
{
"name": "PHP",
"bytes": "607612"
}
],
"symlink_target": ""
} |
using System;
using System.Linq;
using LinqToDB.Mapping;
using NUnit.Framework;
namespace Tests.UserTests
{
using LinqToDB;
using LinqToDB.Data;
[TestFixture]
public class GroupBySubqueryTests : TestBase
{
class Table1
{
public long Field1 { get; set; }
public int Field2 { get; set; }
[Nullable]
public int? Field3 { get; set; }
[Association(ThisKey = "Field1", OtherKey = "Field1", CanBeNull = false)]
public Table3 Ref1 { get; set; } = null!;
[Association(ThisKey = "Field3", OtherKey = "Field3", CanBeNull = true)]
public Table5? Ref2 { get; set; }
[Association(ThisKey = "Field2", OtherKey = "Field2", CanBeNull = true)]
public Table2? Ref3 { get; set; }
}
class Table2
{
public int Field2 { get; set; }
public string? Field4 { get; set; }
}
class Table3
{
public int Field5 { get; set; }
public long Field1 { get; set; }
[Association(ThisKey = "Field5", OtherKey = "Field5", CanBeNull = false)]
public Table4 Ref4 { get; set; } = null!;
}
class Table4
{
public int Field5 { get; set; }
public int Field6 { get; set; }
}
public class Table5
{
[Nullable]
public int? Field3 { get; set; }
public int Field7 { get; set; }
[Association(ThisKey = "Field7", OtherKey = "Field7", CanBeNull = true)]
public Table6? Ref5 { get; set; }
}
public class Table6
{
public int Field7 { get; set; }
public string? Field8 { get; set; }
}
[Test]
public void Test()
{
using (var db = new DataConnection())
{
var q1 = (
from t1 in db.GetTable<Table1>()
where t1.Field3 != null
select new
{
t1.Ref1.Ref4.Field6,
t1.Ref3!.Field4,
Field1 = t1.Ref2!.Ref5!.Field8 ?? string.Empty
}
).Distinct();
var sql1 = q1.GetSelectQuery();
Assert.That(sql1.Select.IsDistinct, "Distinct not present");
var q2 =
from t3 in q1
group t3 by new { t3.Field6, t3.Field4 }
into g
where g.Count() > 1
select new { g.Key.Field6, EngineeringCircuitNumber = g.Key.Field4, Count = g.Count() };
var distinct = q2.EnumQueries().FirstOrDefault(q => q.Select.IsDistinct)!;
Assert.That(distinct, Is.Not.Null);
Assert.That(distinct.Select.Columns.Count, Is.EqualTo(3));
}
}
}
}
| {
"content_hash": "0c76a15a793dea24d92b343f7c4ae14f",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 93,
"avg_line_length": 23.25242718446602,
"alnum_prop": 0.5891440501043841,
"repo_name": "LinqToDB4iSeries/Linq2DB4iSeries",
"id": "b27f9f890aea6d0a85bb9cbe309593c21dbd4e22",
"size": "2397",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Tests/Linq/UserTests/GroupBySubqueryTests.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "5721100"
},
{
"name": "F#",
"bytes": "16033"
},
{
"name": "TSQL",
"bytes": "25671"
},
{
"name": "Visual Basic .NET",
"bytes": "3871"
}
],
"symlink_target": ""
} |
package org.trianacode.http;
import org.thinginitself.http.HttpPeer;
import org.trianacode.taskgraph.tool.ToolResolver;
import java.io.IOException;
/**
* Starts off the HTTP and discovery services...
* <p/>
* User: scmijt Date: Jul 30, 2010 Time: 12:06:44 PM
*/
public class HTTPServices {
private TrianaHttpServer workflowServer;
private HttpPeer httpEngine;
public HTTPServices() {
workflowServer = new TrianaHttpServer();
httpEngine = workflowServer.getHTTPPeerInstance();
}
public void startServices(ToolResolver resolver) throws IOException {
workflowServer.start(resolver);
}
public void stopServices() {
try {
workflowServer.stop();
httpEngine.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public TrianaHttpServer getWorkflowServer() {
return workflowServer;
}
public HttpPeer getHttpEngine() {
return httpEngine;
}
}
| {
"content_hash": "ed255a17d59a73739a493cd02cdb6a79",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 73,
"avg_line_length": 22.266666666666666,
"alnum_prop": 0.654690618762475,
"repo_name": "CSCSI/Triana",
"id": "d49e401864bf2dc7375bcbffbbea1b9c11a9a5ae",
"size": "1002",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "triana-core/src/main/java/org/trianacode/http/HTTPServices.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "35699"
},
{
"name": "CSS",
"bytes": "13201"
},
{
"name": "Java",
"bytes": "10215563"
},
{
"name": "JavaScript",
"bytes": "10926"
},
{
"name": "Shell",
"bytes": "1610"
}
],
"symlink_target": ""
} |
Contains function definitions for the Windows API library mi. See winapi for types and constants.
```toml
[dependencies]
mi-sys = "0.0.1"
```
```rust
extern crate mi;
```
[Documentation](https://retep998.github.io/doc/mi/)
| {
"content_hash": "5f6475d921bd7204fac4517aa06981a1",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 97,
"avg_line_length": 18.833333333333332,
"alnum_prop": 0.7123893805309734,
"repo_name": "Boddlnagg/winapi-rs",
"id": "214513906c83a44bfc1988df64213f652f2182cd",
"size": "233",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/mi/README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Rust",
"bytes": "4424273"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<!--
~ Copyright (c) 2016. Jahir Fiquitiva
~
~ Licensed under the CreativeCommons Attribution-ShareAlike
~ 4.0 International License. You may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://creativecommons.org/licenses/by-sa/4.0/legalcode
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
~
~ Big thanks to the project contributors. Check them in the repository.
~
-->
<RelativeLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
android:layout_width="match_parent"
android:layout_height="match_parent"
app:layout_behavior="@string/appbar_scrolling_view_behavior">
<android.support.v7.widget.RecyclerView
android:id="@+id/zooper_rv"
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:overScrollMode="ifContentScrolls"
android:scrollbars="none"
app:layout_behavior="@string/appbar_scrolling_view_behavior"/>
<com.pluscubed.recyclerfastscroll.RecyclerFastScroller
android:id="@+id/rvFastScroller"
android:layout_width="wrap_content"
android:layout_height="match_parent"
android:layout_alignParentEnd="true"
android:layout_alignParentRight="true"
android:layout_gravity="end"/>
</RelativeLayout> | {
"content_hash": "2b11448a019be92659bf6fe1e70a0119",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 77,
"avg_line_length": 40.21739130434783,
"alnum_prop": 0.7086486486486486,
"repo_name": "PitchedApps/Material-Glass",
"id": "aac7db2a2253ee3a81ae5ab6f96d4eec560a9290",
"size": "1850",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "themeshowcase/src/main/res/layout/sections/layout/zooper_section.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "597650"
}
],
"symlink_target": ""
} |
/** @defgroup usb_audio_defines USB MIDI Type Definitions
@brief <b>Defined Constants and Types for the USB MIDI Type Definitions</b>
@ingroup USB_defines
@version 1.0.0
@author @htmlonly © @endhtmlonly 2014
Daniel Thompson <daniel@redfelineninja.org.uk>
@date 19 April 2014
LGPL License Terms @ref lgpl_license
*/
/*
* This file is part of the libopencm3 project.
*
* Copyright (C) 2014 Daniel Thompson <daniel@redfelineninja.org.uk>
*
* This library is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this library. If not, see <http://www.gnu.org/licenses/>.
*/
/**@{*/
#ifndef LIBOPENCM3_USB_MIDI_H
#define LIBOPENCM3_USB_MIDI_H
/*
* Definitions from the USB_MIDI_ or usb_midi_ namespace come from:
* "Universal Serial Bus Class Definitions for MIDI Devices, Revision 1.0"
*/
/* Appendix A.1: MS Class-Specific Interface Descriptor Subtypes */
#define USB_MIDI_SUBTYPE_MS_DESCRIPTOR_UNDEFINED 0x00
#define USB_MIDI_SUBTYPE_MS_HEADER 0x01
#define USB_MIDI_SUBTYPE_MIDI_IN_JACK 0x02
#define USB_MIDI_SUBTYPE_MIDI_OUT_JACK 0x03
#define USB_MIDI_SUBTYPE_MIDI_ELEMENT 0x04
/* Appendix A.2: MS Class-Specific Endpoint Descriptor Subtypes */
#define USB_MIDI_SUBTYPE_DESCRIPTOR_UNDEFINED 0x00
#define USB_MIDI_SUBTYPE_MS_GENERAL 0x01
/* Appendix A.3: MS MIDI IN and OUT Jack types */
#define USB_MIDI_JACK_TYPE_UNDEFINED 0x00
#define USB_MIDI_JACK_TYPE_EMBEDDED 0x01
#define USB_MIDI_JACK_TYPE_EXTERNAL 0x02
/* Appendix A.5.1 Endpoint Control Selectors */
#define USB_MIDI_EP_CONTROL_UNDEFINED 0x00
#define USB_MIDI_ASSOCIATION_CONTROL 0x01
/* Table 6-2: Class-Specific MS Interface Header Descriptor */
struct usb_midi_header_descriptor {
uint8_t bLength;
uint8_t bDescriptorType;
uint8_t bDescriptorSubtype;
uint16_t bcdMSC;
uint16_t wTotalLength;
} __attribute__((packed));
/* Table 6-3: MIDI IN Jack Descriptor */
struct usb_midi_in_jack_descriptor {
uint8_t bLength;
uint8_t bDescriptorType;
uint8_t bDescriptorSubtype;
uint8_t bJackType;
uint8_t bJackID;
uint8_t iJack;
} __attribute__((packed));
/* Table 6-4: MIDI OUT Jack Descriptor (head) */
struct usb_midi_out_jack_descriptor_head {
uint8_t bLength;
uint8_t bDescriptorType;
uint8_t bDescriptorSubtype;
uint8_t bJackType;
uint8_t bJackID;
uint8_t bNrInputPins;
/* ... */
} __attribute__((packed));
/* Table 6.4: MIDI OUT Jack Descriptor (body) */
struct usb_midi_out_jack_descriptor_body {
/* ... */
uint8_t baSourceID;
uint8_t baSourcePin;
/* ... */
} __attribute__((packed));
/* Table 6.4: MIDI OUT Jack Descriptor (tail) */
struct usb_midi_out_jack_descriptor_tail {
/* ... */
uint8_t iJack;
} __attribute__((packed));
/* Table 6.4: MIDI OUT Jack Descriptor (single)
*
* This structure is a convenience covering the (normal) case where
* there is only one input pin.
*/
struct usb_midi_out_jack_descriptor {
struct usb_midi_out_jack_descriptor_head head;
struct usb_midi_out_jack_descriptor_body source[1];
struct usb_midi_out_jack_descriptor_tail tail;
} __attribute__((packed));
/* Table 6-5: MIDI Element Descriptor (head) */
struct usb_midi_element_descriptor_head {
uint8_t bLength;
uint8_t bDescriptorType;
uint8_t bDescriptorSubtype;
uint8_t bElementID;
uint8_t bNrInputPins;
/* ... */
} __attribute__((packed));
/* Table 6-5: MIDI Element Descriptor (body) */
struct usb_midi_element_descriptor_body {
/* ... */
uint8_t baSourceID;
uint8_t baSourcePin;
/* ... */
} __attribute__((packed));
/* Table 6-5: MIDI Element Descriptor (tail) */
struct usb_midi_element_descriptor_tail {
/* ... */
uint8_t bNrOutputPins;
uint8_t bInTerminalLink;
uint8_t bOutTerminalLink;
uint8_t bElCapsSize;
uint16_t bmElementCaps; /* host cannot assume this is 16-bit but
device can (since highest defined bitmap
value in v1.0 is bit 11) */
uint8_t iElement;
} __attribute__((packed));
/* Table 6-5: MIDI Element Descriptor (single)
*
* This structure is a convenience covering the (common) case where
* there is only one input pin.
*/
struct usb_midi_element_descriptor {
struct usb_midi_element_descriptor_head head;
struct usb_midi_element_descriptor_body source[1];
struct usb_midi_element_descriptor_tail tail;
} __attribute__((packed));
/* Table 6-7: Class-specific MS Bulk Data Endpoint Descriptor (head) */
struct usb_midi_endpoint_descriptor_head {
uint8_t bLength;
uint8_t bDescriptorType;
uint8_t bDescriptorSubType;
uint8_t bNumEmbMIDIJack;
} __attribute__((packed));
/* Table 6-7: Class-specific MS Bulk Data Endpoint Descriptor (body) */
struct usb_midi_endpoint_descriptor_body {
uint8_t baAssocJackID;
} __attribute__((packed));
/* Table 6.7: Class-specific MS Bulk Data Endpoint Descriptor (single)
*
* This structure is a convenience covering the (normal) case where
* there is only one input pin.
*/
struct usb_midi_endpoint_descriptor {
struct usb_midi_endpoint_descriptor_head head;
struct usb_midi_endpoint_descriptor_body jack[1];
} __attribute__((packed));
#endif
/**@}*/
| {
"content_hash": "d002437fede137d5b92a58ae3e0af1d0",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 78,
"avg_line_length": 29.16315789473684,
"alnum_prop": 0.7220718281898574,
"repo_name": "yangosoft/stm32f103c8-board",
"id": "c8a5aa8af8c0dc56f0fa01240f89405ed6247645",
"size": "5541",
"binary": false,
"copies": "19",
"ref": "refs/heads/master",
"path": "code/freertos_stm32f103rb6/libopencm3/usb/midi.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "9178"
},
{
"name": "C",
"bytes": "5685581"
},
{
"name": "C++",
"bytes": "253275"
},
{
"name": "Eagle",
"bytes": "5248995"
},
{
"name": "Makefile",
"bytes": "3833"
},
{
"name": "Objective-C",
"bytes": "9922"
},
{
"name": "Prolog",
"bytes": "2918"
}
],
"symlink_target": ""
} |
default['selenium']['node']['service_name'] = 'selenium_node' # used by node recipe only
default['selenium']['node']['host'] = 'ip'
default['selenium']['node']['port'] = 5555
default['selenium']['node']['jvm_args'] = nil
default['selenium']['node']['proxy'] = 'org.openqa.grid.selenium.proxy.DefaultRemoteProxy'
default['selenium']['node']['maxSession'] = 5
default['selenium']['node']['register'] = true
default['selenium']['node']['registerCycle'] = 5000
default['selenium']['node']['hubPort'] = 4444
default['selenium']['node']['hubHost'] = 'ip'
default['selenium']['node']['capabilities'] = []
default['selenium']['node']['additional_args'] = []
default['selenium']['node']['display'] = ':0'
default['selenium']['node']['username'] = nil
default['selenium']['node']['password'] = nil
default['selenium']['node']['domain'] = nil
| {
"content_hash": "49e70caa7087195be2dcb3dd6e210caa",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 90,
"avg_line_length": 52,
"alnum_prop": 0.6550480769230769,
"repo_name": "decareano/chef-selenium",
"id": "444bd641ec45250d3211749f4f660940cd49fb02",
"size": "832",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "attributes/node.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1842"
},
{
"name": "Ruby",
"bytes": "54644"
},
{
"name": "Shell",
"bytes": "2394"
}
],
"symlink_target": ""
} |
package io.agrest.it.fixture.cayenne.auto;
import org.apache.cayenne.CayenneDataObject;
/**
* Class _E25 was generated by Cayenne.
* It is probably a good idea to avoid changing this class manually,
* since it may be overwritten next time code is regenerated.
* If you need to make any customizations, please use subclass.
*/
public abstract class _E25 extends CayenneDataObject {
private static final long serialVersionUID = 1L;
public static final String ID_PK_COLUMN = "id";
}
| {
"content_hash": "81456b65ccdc4f0df6aff223440d6b1b",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 68,
"avg_line_length": 27.77777777777778,
"alnum_prop": 0.748,
"repo_name": "AbleOne/link-rest",
"id": "ae25080e18034355c6f5b120c4fb6244eccb75aa",
"size": "500",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "agrest/src/test/java/io/agrest/it/fixture/cayenne/auto/_E25.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "15181"
},
{
"name": "Java",
"bytes": "1332576"
}
],
"symlink_target": ""
} |
using MathSite.BasicAdmin.ViewModels.SharedModels.Posts;
namespace MathSite.BasicAdmin.ViewModels.News
{
public class ListNewsViewModel : ListPostViewModel
{
}
} | {
"content_hash": "c6cb21a0536efda12b823eb7f27fe8c4",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 57,
"avg_line_length": 21.875,
"alnum_prop": 0.7771428571428571,
"repo_name": "YarGU-Demidov/math-site",
"id": "3bda5857169b74d915b2a8b997942837f9200099",
"size": "177",
"binary": false,
"copies": "1",
"ref": "refs/heads/rc-1.1.0",
"path": "src/MathSite.BasicAdmin.ViewModels/News/ListNewsViewModel.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "118"
},
{
"name": "C#",
"bytes": "706608"
},
{
"name": "CSS",
"bytes": "769495"
},
{
"name": "HTML",
"bytes": "396124"
},
{
"name": "JavaScript",
"bytes": "708405"
},
{
"name": "PHP",
"bytes": "2200"
},
{
"name": "Shell",
"bytes": "186"
},
{
"name": "TSQL",
"bytes": "35575"
}
],
"symlink_target": ""
} |
.class public interface abstract annotation Lcom/google/gson/annotations/Since;
.super Ljava/lang/Object;
.source "SourceFile"
# interfaces
.implements Ljava/lang/annotation/Annotation;
# annotations
.annotation runtime Ljava/lang/annotation/Retention;
value = .enum Ljava/lang/annotation/RetentionPolicy;->RUNTIME:Ljava/lang/annotation/RetentionPolicy;
.end annotation
.annotation runtime Ljava/lang/annotation/Target;
value = {
.enum Ljava/lang/annotation/ElementType;->FIELD:Ljava/lang/annotation/ElementType;,
.enum Ljava/lang/annotation/ElementType;->TYPE:Ljava/lang/annotation/ElementType;
}
.end annotation
# virtual methods
.method public abstract value()D
.end method
| {
"content_hash": "ee0e8fc1a097fbc1ebbe5353151289ca",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 104,
"avg_line_length": 29.625,
"alnum_prop": 0.7763713080168776,
"repo_name": "mclarkelauer/AndroidAnalyzer",
"id": "ad973a93731866f634f2ed6747c44963e7a85545",
"size": "711",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "Tests/Test_Cases/ComplexCase/out/com/google/gson/annotations/Since.smali",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "OpenEdge ABL",
"bytes": "2660501"
},
{
"name": "Python",
"bytes": "118996"
},
{
"name": "Shell",
"bytes": "2320"
},
{
"name": "Smali",
"bytes": "3701697"
}
],
"symlink_target": ""
} |
@interface KingDownloadManagerInfo : NSMutableDictionary
-(KingDownLoader *)getDownLoaderWithURL:(NSURL *)url;
-(KingDownLoader *)MD5URL:(NSURL *)url;
@end
| {
"content_hash": "eb40d31347579ea56884c43d0c3639e7",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 56,
"avg_line_length": 39,
"alnum_prop": 0.7948717948717948,
"repo_name": "hengyangKing/KingDownloadManager",
"id": "e4e84b858519d4f192c36e8c5eea6aaa4ada069d",
"size": "390",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "KingDownloadManager/KingDownloadManagerInfo.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "25191"
},
{
"name": "Ruby",
"bytes": "6466"
}
],
"symlink_target": ""
} |
package com.smartstudio.deviceinfo.analytics.dashboard.system;
import com.crashlytics.android.answers.CustomEvent;
import com.smartstudio.deviceinfo.analytics.FabricAnalyticsManager;
import com.smartstudio.deviceinfo.analytics.FabricAnalyticsManagerTest;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.verify;
public class SystemFabricAnalyticsTest extends FabricAnalyticsManagerTest {
private SystemFabricAnalytics mAnalytics;
@Test
public void testGetScreenName() throws Exception {
String screenName = mAnalytics.getScreenName();
assertThat(screenName).isEqualTo(SystemFirebaseAnalyticsImpl.SCREEN_NAME);
}
@Test
public void testSetupContentViewEvent() throws Exception {
mAnalytics.setupContentViewEvent(mContentViewEvent);
String screenName = SystemFabricAnalytics.SCREEN_NAME;
verify(mContentViewEvent).putContentName(screenName);
verify(mContentViewEvent).putContentId(String.valueOf(screenName.hashCode()));
verify(mContentViewEvent).putContentType(SystemFabricAnalytics.CONTENT_TYPE);
}
@Test
public void testShare() throws Exception {
CustomEvent event = mockCustomEvent();
mAnalytics.reportShare();
verifyEvent(event, SystemFabricAnalytics.SHARE);
}
@Override
protected FabricAnalyticsManager createAnalyticsManager() {
mAnalytics = new SystemFabricAnalytics(mAnswers, mContentViewEvent);
return mAnalytics;
}
} | {
"content_hash": "730984f0c3777fb106f3e45e6f4a270e",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 86,
"avg_line_length": 36.80952380952381,
"alnum_prop": 0.7664941785252264,
"repo_name": "Smart-Studio/device-info",
"id": "d3c66e06faa9251d53f88cbfc7c7f908507a0d1e",
"size": "1546",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/test/java/com/smartstudio/deviceinfo/analytics/dashboard/system/SystemFabricAnalyticsTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "419605"
}
],
"symlink_target": ""
} |
$.pressureConfig({
polyfill: false
});
var block = {
start: function(event){
console.log('start', event);
},
change: function(force, event){
// event.preventDefault();
this.style.width = Pressure.map(force, 0, 1, 200, 300) + 'px';
this.innerHTML = force;
console.log('change', force);
},
startDeepPress: function(event){
console.log('start deep press', event);
this.style.backgroundColor = '#FF0040';
},
endDeepPress: function(){
console.log('end deep press');
this.style.backgroundColor = '#0080FF';
},
end: function(){
console.log('end');
this.style.width = '200px';
this.innerHTML = 0;
},
unsupported: function(){
console.log(this);
this.innerHTML = 'Your device / browser does not support this :(';
}
}
Pressure.set(document.querySelectorAll('#el1'), block);
Pressure.set($('#el2'), block, {only: 'mouse', polyfill: true, polyfillSpeedUp: 5000, polyfillSpeedDown: 2000});
Pressure.set('#el3', block, {only: 'touch'});
$('#el1-jquery').pressure(block);
$('#el2-jquery').pressure(block, {only: 'mouse'});
$('#el3-jquery').pressure(block, {only: 'touch'});
$('#el4-jquery').pressure(block, {only: 'pointer'});
$('img').pressure({
change: function(force, event){
console.log(force);
}
});
| {
"content_hash": "ac98b30635d482a76fce5c127d172cc2",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 112,
"avg_line_length": 24.76923076923077,
"alnum_prop": 0.6281055900621118,
"repo_name": "davidshimjs/pressure",
"id": "c1c894f0febf0a87839deefc1cf430a0bbcc55a7",
"size": "1522",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/example.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "16564"
}
],
"symlink_target": ""
} |
#pragma once
void StartDefaultThreadNetwork(void);
| {
"content_hash": "7580d39d5134310c03685a5bbb6c320d",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 37,
"avg_line_length": 10.8,
"alnum_prop": 0.7962962962962963,
"repo_name": "project-chip/connectedhomeip",
"id": "eb6ed1e7f4b42d5a16c29998c52b531c2886b533",
"size": "718",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/platform/telink/util/include/ThreadUtil.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1759301"
},
{
"name": "C++",
"bytes": "19104548"
},
{
"name": "CMake",
"bytes": "140510"
},
{
"name": "Dockerfile",
"bytes": "50353"
},
{
"name": "Emacs Lisp",
"bytes": "1042"
},
{
"name": "Java",
"bytes": "167719"
},
{
"name": "JavaScript",
"bytes": "2106"
},
{
"name": "Jinja",
"bytes": "22322"
},
{
"name": "Objective-C",
"bytes": "930838"
},
{
"name": "Objective-C++",
"bytes": "435348"
},
{
"name": "Python",
"bytes": "1931007"
},
{
"name": "Shell",
"bytes": "195843"
},
{
"name": "Tcl",
"bytes": "311"
},
{
"name": "ZAP",
"bytes": "584219"
}
],
"symlink_target": ""
} |
import * as React from 'react'
import { Location } from 'history'
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'
import { IconProp } from '@fortawesome/fontawesome-svg-core'
import { ajaxGet } from '@framework/Services';
import { EntitySettings } from '@framework/Navigator'
import * as AppContext from '@framework/AppContext'
import * as Navigator from '@framework/Navigator'
import * as Finder from '@framework/Finder'
import { Lite, Entity } from '@framework/Signum.Entities'
import { Type } from '@framework/Reflection'
import { ToolbarEntity, ToolbarMenuEntity, ToolbarElementEmbedded, ToolbarElementType, ToolbarLocation } from './Signum.Entities.Toolbar'
import * as Constructor from '@framework/Constructor'
import * as UserAssetClient from '../UserAssets/UserAssetClient'
import { ValueSearchControl } from '@framework/Search';
import { parseIcon } from '../Basics/Templates/IconTypeahead';
export function start(options: { routes: JSX.Element[] }, ...configs: ToolbarConfig<any>[]) {
Navigator.addSettings(new EntitySettings(ToolbarEntity, t => import('./Templates/Toolbar')));
Navigator.addSettings(new EntitySettings(ToolbarMenuEntity, t => import('./Templates/ToolbarMenu')));
Navigator.addSettings(new EntitySettings(ToolbarElementEmbedded, t => import('./Templates/ToolbarElement')));
Finder.addSettings({ queryName: ToolbarEntity, defaultOrders: [{ token: ToolbarEntity.token(a => a.priority), orderType: "Descending" }] });
Constructor.registerConstructor(ToolbarElementEmbedded, tn => ToolbarElementEmbedded.New({ type: "Item" }));
configs.forEach(c => registerConfig(c));
UserAssetClient.start({ routes: options.routes });
UserAssetClient.registerExportAssertLink(ToolbarEntity);
}
export abstract class ToolbarConfig<T extends Entity> {
type: Type<T>;
constructor(type: Type<T>) {
this.type = type;
}
getIcon(element: ToolbarResponse<T>) {
return ToolbarConfig.coloredIcon(parseIcon(element.iconName), element.iconColor);
}
static coloredIcon(icon: IconProp | undefined, color: string | undefined): React.ReactChild | null {
if (!icon)
return null;
return <FontAwesomeIcon icon={icon} className={"icon"} color={color} />;
}
abstract navigateTo(element: ToolbarResponse<T>): Promise<string>;
abstract isCompatibleWithUrl(element: ToolbarResponse<T>, location: Location, query: any): boolean;
handleNavigateClick(e: React.MouseEvent<any>, res: ToolbarResponse<any>) {
e.preventDefault();
e.persist();
this.navigateTo(res).then(url => {
AppContext.pushOrOpenInTab(url, e);
}).done();
}
}
export const configs: { [type: string]: ToolbarConfig<any> } = {};
export function registerConfig<T extends Entity>(config: ToolbarConfig<T>) {
configs[config.type.typeName] = config;
}
export namespace API {
export function getCurrentToolbar(location: ToolbarLocation): Promise<ToolbarResponse<any> | null> {
return ajaxGet({ url: `~/api/toolbar/current/${location}` });
}
}
export interface ToolbarResponse<T extends Entity> {
type: ToolbarElementType;
iconName?: string;
iconColor?: string;
label?: string;
content?: Lite<T>;
url?: string;
elements?: Array<ToolbarResponse<any>>;
openInPopup?: boolean;
autoRefreshPeriod?: number;
}
| {
"content_hash": "a2fba4b22cc01196aeb4452ace41bca0",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 142,
"avg_line_length": 39.64705882352941,
"alnum_prop": 0.716913946587537,
"repo_name": "AlejandroCano/extensions",
"id": "d1d5771517bdac6335ce756d685ca4274d23392b",
"size": "3370",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Signum.React.Extensions/Toolbar/ToolbarClient.tsx",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "2985100"
},
{
"name": "CSS",
"bytes": "29513"
},
{
"name": "TypeScript",
"bytes": "2097256"
}
],
"symlink_target": ""
} |
var config = {};
//------------------------------UserParams
//------------------------------EnableRealTrading
config.tradingEnabled = false;
// If false trading is simulated, candles are still aggregated
config.tradeAtStart = false;
// If true a trade is made immediately if the advice is either buy or sell
//------------------------------EnableRealTrading
//------------------------------exchangeSettings
config.exchangeSettings = {
exchange: '',
// Options: (bitstamp, kraken, btce)
currencyPair: {pair: '', asset: '', currency: ''},
// For Bitstamp just use {pair: 'XBTUSD', asset: 'XBT', currency: 'USD'}
// For Kraken look up the currency pairs in their API: https://api.kraken.com/0/public/AssetPairs
// Kraken Example: {pair: 'XXBTZEUR', asset: 'XXBT', currency: 'ZEUR'}
// For BTC-E look up the currency pairs in their API: https://btc-e.com/api/3/info
// BTC-E Example: {pair: 'BTC_USD', asset: 'BTC', currency: 'USD'}
tradingReserveAsset: 0,
// Enter an amount of "asset" you would like to freeze (not trade).
tradingReserveCurrency: 0,
// Enter an amount of "currency" you would like to freeze (not trade).
slippagePercentage: 0.1
// Percentage to sell below and buy above the market.
};
//------------------------------exchangeSettings
//------------------------------APISettings
config.apiSettings = {
bitstamp: {clientId: 0, apiKey: '', secret: ''},
kraken: {apiKey: '', secret: ''},
btce: {apiKey: '', secret: ''}
};
//------------------------------APISettings
//------------------------------dbSettings
config.mongoConnectionString = 'localhost/bitbot';
// The connection string for your MongoDB Installation.
// Example: config.mongoConnectionString = 'username:password@example.com/mydb';
//------------------------------dbSettings
//------------------------------downloaderSettings
config.downloaderRefreshSeconds = 10;
// Best to keep this default setting unless you know what you are doing.
config.downloaderMaxFails = 30;
// After failing to retrieve trade data n times the application will quit. A value of 0 prevents quitting.
//------------------------------downloaderSettings
//------------------------------IndicatorSettings
config.indicatorSettings = {
indicator: 'MACD',
// Choices: Any indicator from the indicators folder.
options: {neededPeriods: 26, shortPeriods: 12, longPeriods: 26, emaPeriods: 9, buyThreshold: 0, sellThreshold: 0},
// Options needed for your indicator (Look them up in the indicator's file).
candleStickSizeMinutes: 5
};
//------------------------------IndicatorSettings
//------------------------------orderSettings
config.orderKeepAliveMinutes = config.indicatorSettings.candleStickSizeMinutes / 10;
//------------------------------orderSettings
//------------------------------PushOver
config.pushOver = {
enabled: false,
pushUserId: '',
pushAppToken: ''
};
// Push notifications via pushover (https://pushover.net/).
//------------------------------PushOver
//------------------------------BackTesting
config.backTesterSettings = {
initialAssetBalance: 0,
initialCurrencyBalance: 10000
};
//------------------------------BackTesting
//------------------------------Debug
config.debug = true;
//------------------------------Debug
//------------------------------UserParams
module.exports = config;
| {
"content_hash": "7e3e6f2fa2032d178a8ac3135217a188",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 116,
"avg_line_length": 37.77272727272727,
"alnum_prop": 0.5824308062575211,
"repo_name": "MarcDiethelm/BitBot",
"id": "e8faa8a9dd2dedb900199d808855f907fbe6bfe2",
"size": "3324",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config.sample.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "112029"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<phpunit backupGlobals="false"
backupStaticAttributes="false"
bootstrap="bootstrap/autoload.php"
colors="true"
convertErrorsToExceptions="true"
convertNoticesToExceptions="true"
convertWarningsToExceptions="true"
processIsolation="false"
stopOnFailure="false"
syntaxCheck="false">
<testsuites>
<testsuite name="Application Test Suite">
<directory>./tests/</directory>
</testsuite>
</testsuites>
<filter>
<whitelist>
<directory suffix=".php">app/</directory>
</whitelist>
</filter>
<php>
<env name="APP_ENV" value="testing"/>
<env name="DB_CONNECTION" value="sqlite"/>
<env name="CACHE_DRIVER" value="array"/>
<env name="SESSION_DRIVER" value="array"/>
<env name="QUEUE_DRIVER" value="sync"/>
</php>
</phpunit>
| {
"content_hash": "dc262b8e5a44f4f611d916397046abc4",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 53,
"avg_line_length": 32.758620689655174,
"alnum_prop": 0.5873684210526315,
"repo_name": "group-hub/PixelCreativityBoard",
"id": "9a1bba45e7ab88355cdfe442a62c98546f798fbc",
"size": "950",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "phpunit.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "412"
},
{
"name": "CSS",
"bytes": "8896"
},
{
"name": "HTML",
"bytes": "9004"
},
{
"name": "JavaScript",
"bytes": "146587"
},
{
"name": "PHP",
"bytes": "256632"
},
{
"name": "Shell",
"bytes": "55"
}
],
"symlink_target": ""
} |
var Search = {
searchTimeoutHandle: null,
currentQuery: '',
alternatives: undefined,
originalTrack: null,
lastVideosSearchQuery: undefined,
lastPlaylistsSearchQuery: undefined,
lastSoundCloudTracksQuery: undefined,
itemsPerPage: 30,
lastQueries: {},
tabs: null,
$rightView: null,
init: function() {
Search.$rightView = $('#right .search');
Search.tabs = new Tabs(Search.$rightView, {
'youtube-videos': Search.searchCurrentQuery,
'soundcloud-tracks': Search.searchCurrentQuery,
'officialfm-tracks': Search.searchCurrentQuery
});
/* Search on key up */
$('#top .search input').keyup(function(event) {
$('#left, #right').removeClass('focused');
$('#top .search').addClass('focused');
var i,
deadKeys = [9, 16, 17, 18, 37, 38, 39, 40];
for (i = 0; i < deadKeys.length; i += 1) {
if (event.keyCode === deadKeys[i]) {
return;
}
}
var timeout = 700,
q = $.trim($('#top .search input').val());
if (Search.searchTimeoutHandle) {
clearTimeout(Search.searchTimeoutHandle);
}
if (event.keyCode === 13) {
Search.search(q);
} else {
Search.searchTimeoutHandle = setTimeout(function() {
Search.search(q);
}, timeout);
}
});
(function() {
var $search = $('#right .search');
var timeout;
$('#right .search').scroll(function(event) {
if (timeout) {
clearTimeout(timeout);
}
timeout = setTimeout(function() {
var $pane = $('#right .search .pane.selected');
if ($search.scrollTop() >= ($pane.height() - $search.height()) && $pane.hasClass('has-more')) {
Search.search(Search.currentQuery, true);
}
}, 100);
});
}());
$('#top .search button').click(function() {
Search.search($.trim($('#top .search input').val()));
});
},
searchCurrentQuery: function() {
Search.search(Search.currentQuery);
},
show: function() {
Menu.deSelect();
Search.$rightView.show();
history.pushState(null, null, encodeURI('/search?q=' + Search.currentQuery));
},
search: function(q, loadMore) {
if (q.length === 0) {
return;
}
Utils.scrollRight();
var url = null,
c = null,
start = null,
type = null,
params = null;
Search.currentQuery = q;
Search.show();
if (Search.tabs.$selectedTab) {
type = Search.tabs.$selectedTab.attr('rel');
} else {
type = 'youtube-videos';
}
if (Search.lastQueries.hasOwnProperty(type) && Search.lastQueries[type] === q && !loadMore) {
return;
}
Search.lastQueries[type] = q;
if (!Search.tabs.$selectedTab) {
Search.tabs.select(type);
}
if (!loadMore) {
Search.tabs.$selectedPane.html('');
}
start = (loadMore) ? Search.tabs.$selectedPane.data('results-count') + 1 : 1;
c = Search.tabs.$selectedPane.data('results-count') || 0;
EventSystem.callEventListeners('new_search_executed', q);
switch (type) {
case 'youtube-videos':
url = 'http://gdata.youtube.com/feeds/api/videos?callback=?';
params = {
'alt': 'json-in-script', 'max-results': Search.itemsPerPage,
'start-index': start,
'format': 5,
'q': q
};
LoadingBar.show();
$.getJSON(url, params, function(data) {
var results = Search.getVideosFromYouTubeSearchData(data);
$.each(results, function(i, video) {
if (video) {
video.createListView().appendTo(Search.tabs.$selectedPane);
}
});
Search.tabs.$selectedPane.data('results-count', c + results.length);
if (results.length >= Search.itemsPerPage) {
Search.tabs.$selectedPane.addClass('has-more');
} else {
Search.tabs.$selectedPane.removeClass('has-more');
}
LoadingBar.hide();
});
break;
case 'soundcloud-tracks':
url = 'https://api.soundcloud.com/tracks.json';
params = {
'q': q,
'limit': Search.itemsPerPage,
'filter': 'streamable',
'offset': start,
'client_id': SOUNDCLOUD_API_KEY
};
LoadingBar.show();
$.getJSON(url, params, function(data) {
var results = Search.getVideosFromSoundCloudSearchData(data);
$.each(results, function(i, video) {
if (video) {
video.createListView().appendTo(Search.tabs.$selectedPane);
}
});
Search.tabs.$selectedPane.data('results-count', c + results.length);
if (results.length >= Search.itemsPerPage) {
Search.tabs.$selectedPane.addClass('has-more');
} else {
Search.tabs.$selectedPane.removeClass('has-more');
}
LoadingBar.hide();
});
break;
case 'officialfm-tracks':
url = 'http://api.official.fm/tracks/search?q=' + escape(q) + '&api_version=2&fields=cover';
params = {
'format': 'json',
'per_page': 30,
'page': Math.ceil(start / 30),
'key': OFFICIALFM_API_KEY
};
LoadingBar.show();
$.getJSON(url, params, function(data) {
var results = Search.getVideosFromOfficialfmSearchData(data.tracks);
$.each(results, function(i, video) {
if (video) {
video.createListView().appendTo(Search.tabs.$selectedPane);
}
});
Search.tabs.$selectedPane.data('results-count', c + results.length);
if (data.current >= data.per_page) {
Search.tabs.$selectedPane.addClass('has-more');
} else {
Search.tabs.$selectedPane.removeClass('has-more');
}
LoadingBar.hide();
});
break;
}
},
onPlayCallback: function() {
Menu.setAsNotPlaying();
},
getVideosFromSoundCloudSearchData: function(data) {
ret = [];
$.each(data, function(i, track) {
var buyLinks = track.purchase_url ? [track.purchase_url] : null;
ret.push(new Video({
parent: 'search',
onPlayCallback: Search.onPlayCallback,
videoId: track.id,
title: track.title,
duration: track.duration,
buyLinks: buyLinks,
uploaderUsername: track.user.permalink,
type: 'soundcloud',
artworkURL: track.artwork_url
}));
});
return ret;
},
getVideosFromOfficialfmSearchData: function(data) {
ret = [];
$.each(data, function(i, track) {
track = track.track;
var title = track.title.indexOf(track.artist) > -1 ?
track.title :
track.artist + ' - ' + track.title,
buyLinks = track.purchase_url ? [track.buy_url] : null,
id = track.page.split('/');
id = id[id.length-1];
ret.push(new Video({
parent: 'search',
onPlayCallback: Search.onPlayCallback,
videoId: id,
title: title,
duration: track.duration * 1000,
buyLinks: buyLinks,
uploaderUsername: track.project.name,
type: 'officialfm',
artworkURL: track.cover.urls.large
}));
});
return ret;
},
getVideosFromYouTubeSearchData: function(data) {
var results = [];
if (data.feed.entry === undefined) {
return results;
}
$.each(data.feed.entry, function(i, item) {
if (item.media$group.media$content === undefined || item.media$group.media$content === null) {
/* Content is blocked. Move on... */
results.push(null);
return;
}
var url = item.id.$t,
title = item.title.$t,
videoId,
username;
if (url.match('videos/(.*)$')) {
videoId = url.match('videos/(.*)$')[1];
} else {
videoId = item.media$group.yt$videoid.$t;
}
if (item.author[0].uri) {
username = item.author[0].uri.$t.split('/').pop();
} else {
username = item.author[0].name.$t;
}
var video = new Video({
parent: 'search',
onPlayCallback: Search.onPlayCallback,
videoId: videoId,
title: title,
uploaderUsername: username,
type: 'youtube',
artworkURL: item.media$group.media$thumbnail.length > 1 ? item.media$group.media$thumbnail[1].url : null
});
results.push(video);
});
return results;
},
getPlaylistsFromYouTubeSearchData: function(data) {
var results = [];
if (data.feed.entry === undefined) {
return results;
}
$.each(data.feed.entry, function(i, item) {
var playlistId = item.yt$playlistId.$t,
title = item.title.$t,
videoCountHint = item.yt$countHint.$t;
var playlist = new YouTubePlaylist(playlistId, title, videoCountHint);
results.push(playlist);
});
return results;
},
findAlternative: function(video, callback) {
console.log('finding alternative for ' + video.title);
if (Search.alternatives === undefined || video !== Search.originalTrack) {
Search.originalTrack = video;
Search.findAlternativesToVideo(video, function(videos) {
// Make sure they don't forget their master
for (i = 0; i < videos.length; i += 1) {
videos[i].alternativeFor = video;
}
Search.alternatives = videos;
if (videos.length) {
callback(Search.alternatives.shift());
} else {
callback(false);
}
});
} else if (Search.alternatives.length) {
callback(Search.alternatives.shift());
} else {
Search.originalTrack = null;
callback(false);
}
},
findAlternativesToVideo: function(video, callback) {
var i = 0,
results = [],
url = 'http://gdata.youtube.com/feeds/api/videos?callback=?',
params = {
'alt': 'json-in-script',
'max-results': 10,
'start-index': 1,
'format': 5,
'q': video.title
};
$.getJSON(url, params, function(data) {
if (data.feed.entry === undefined) {
callback(results);
return;
}
results = Search.getVideosFromYouTubeSearchData(data, true);
callback(results);
});
}
};
| {
"content_hash": "889f01bc5ac7df004602617239f0d4f9",
"timestamp": "",
"source": "github",
"line_count": 358,
"max_line_length": 120,
"avg_line_length": 34.90782122905028,
"alnum_prop": 0.46115067616227895,
"repo_name": "youtify/youtify",
"id": "192ee60f7c5cafa87d2ed062858a5e4d9d89d9c1",
"size": "12499",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/Search.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "62707"
},
{
"name": "JavaScript",
"bytes": "281943"
},
{
"name": "Python",
"bytes": "286133"
},
{
"name": "Shell",
"bytes": "323"
}
],
"symlink_target": ""
} |
(function () {
'use strict';
RAML.Directives.resources = function(ramlParserWrapper) {
return {
restrict: 'E',
templateUrl: 'resources/resources.tpl.html',
replace: true,
scope: {
src: '@'
},
controller: ['$scope', '$window', '$attrs', function($scope, $window, $attrs) {
$scope.proxy = $window.RAML.Settings.proxy;
$scope.disableTitle = false;
$scope.resourcesCollapsed = false;
$scope.documentationCollapsed = false;
$scope.credentials = {};
$scope.allowUnsafeMarkdown = false;
$scope.disableTryIt = false;
if ($attrs.hasOwnProperty('disableTryIt')) {
$scope.disableTryIt = true;
}
if ($attrs.hasOwnProperty('allowUnsafeMarkdown')) {
$scope.allowUnsafeMarkdown = true;
}
if ($attrs.hasOwnProperty('singleView')) {
$scope.singleView = true;
}
if ($attrs.hasOwnProperty('disableThemeSwitcher')) {
$scope.disableThemeSwitcher = true;
}
if ($attrs.hasOwnProperty('disableRamlClientGenerator')) {
$scope.disableRamlClientGenerator = true;
}
if ($attrs.hasOwnProperty('disableTitle')) {
$scope.disableTitle = true;
}
if ($attrs.hasOwnProperty('resourcesCollapsed')) {
$scope.resourcesCollapsed = true;
}
if ($attrs.hasOwnProperty('documentationCollapsed')) {
$scope.documentationCollapsed = true;
}
if ($scope.src) {
ramlParserWrapper.load($scope.src);
}
$scope.readResourceTraits = function readResourceTraits(traits) {
var list = [];
if (traits) {
traits.map(function (trait) {
if (trait) {
if (typeof trait === 'object') {
list.push(Object.keys(trait).join(', '));
} else {
list.push(trait);
}
}
});
}
return list.join(', ');
};
$scope.updateProxyConfig = function (status) {
$window.RAML.Settings.disableProxy = status;
};
$scope.toggle = function ($event, index, collection, flagKey) {
var $this = jQuery($event.currentTarget);
var $section = $this
.closest('.raml-console-resource-list-item')
.find('.raml-console-resource-list');
collection[index] = !collection[index];
$scope[flagKey] = checkItemStatus(false, collection) ? false : $scope[flagKey];
$scope[flagKey] = checkItemStatus(true, collection) ? true : $scope[flagKey];
$section.toggleClass('raml-console-is-collapsed');
};
$scope.collapseAll = function ($event, collection, flagKey) {
var $this = jQuery($event.currentTarget);
if ($this.hasClass('raml-console-resources-expanded')) {
$scope[flagKey] = true;
} else {
if (flagKey === 'resourcesCollapsed') {
jQuery('.raml-console-resource-description').removeClass('ng-hide');
}
$scope[flagKey] = false;
}
jQuery('.raml-console-resources-' + flagKey).find('ol.raml-console-resource-list').toggleClass('raml-console-is-collapsed');
toggleCollapsed($scope[flagKey], collection);
};
function toggleCollapsed (status, collection) {
for (var i = 0; i < collection.length; i++) {
collection[i] = collection[i] !== null ? status : collection[i];
}
}
function checkItemStatus(status, collection) {
return collection.filter(function (el) { return el === status || el === null; }).length === collection.length;
}
$scope.hasResourcesWithChilds = function () {
return $scope.raml.resourceGroups.filter(function (el) {
return el.length > 1;
}).length > 0;
};
ramlParserWrapper.onParseError(function(error) {
$scope.error = error;
$scope.loaded = true;
/*jshint camelcase: false */
var context = error.context_mark || error.problem_mark;
/*jshint camelcase: true */
$scope.errorMessage = error.message;
if (context) {
$scope.raml = context.buffer;
$window.ramlErrors.line = context.line;
$window.ramlErrors.message = error.message;
// Hack to update codemirror
setTimeout(function () {
var editor = jQuery('.raml-console-initializer-input-container .CodeMirror')[0].CodeMirror;
editor.addLineClass(context.line, 'background', 'line-error');
editor.doc.setCursor(context.line);
}, 10);
}
});
}],
link: function($scope) {
ramlParserWrapper.onParseSuccess(function(raml) {
$scope.raml = RAML.Inspector.create(raml);
$scope.rawRaml = raml;
$scope.loaded = true;
$scope.resourceList = [];
$scope.documentList = [];
for (var i = 0; i < $scope.raml.resourceGroups.length; i++) {
var resources = $scope.raml.resourceGroups[i];
var status = resources.length > 1 ? false : null;
$scope.resourceList.push($scope.resourcesCollapsed ? true : status);
}
if ($scope.raml.documentation) {
for (var j = 0; j < $scope.raml.documentation.length; j++) {
$scope.documentList.push($scope.documentationCollapsed ? true : false);
}
}
});
}
};
};
angular.module('RAML.Directives')
.directive('ramlConsole', ['ramlParserWrapper', RAML.Directives.resources]);
})();
| {
"content_hash": "ce2c7f7d34a7988545825f4654ee4491",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 134,
"avg_line_length": 33.220338983050844,
"alnum_prop": 0.5437074829931973,
"repo_name": "M3lkior/gravitee-management-webui",
"id": "2e70dc8fbb88a5d3ecec81682503aa630b214866",
"size": "5880",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bower_components/api-console/src/app/resources/resources.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "18660"
},
{
"name": "HTML",
"bytes": "155903"
},
{
"name": "JavaScript",
"bytes": "183996"
}
],
"symlink_target": ""
} |
#ifndef RHO_RAW_SOCKET_F5FFD21AD3EE463E850C5E2C789397BD
#define RHO_RAW_SOCKET_F5FFD21AD3EE463E850C5E2C789397BD
#include "common/RhoStd.h"
#include "logging/RhoLog.h"
#if !defined(WINDOWS_PLATFORM)
typedef int SOCKET;
# define INVALID_SOCKET -1
# define SOCKET_ERROR -1
# define RHO_NET_ERROR_CODE errno
# define closesocket close
#else
# if defined(OS_WINCE) || defined(OS_WP8) || defined(OS_UWP)
# include <winsock2.h>
# include <ws2tcpip.h>
# endif
# define RHO_NET_ERROR_CODE ::WSAGetLastError()
#endif
namespace rho
{
namespace net
{
#if !defined(OS_WINCE) && !defined(OS_WP8) && !defined(OS_UWP)
class RawSocket
{
public:
RawSocket(const String& host, const String& port) {}
virtual ~RawSocket() {}
bool init() { return false; }
bool send(const String& sendData) { return false; }
bool isInit() const { return false; }
};
#else
class RawSocket
{
private:
SOCKET m_clientSocket;
std::string m_hostName;
std::string m_hostPort;
bool m_isInit;
public:
RawSocket(const String& host, const String& port)
: m_hostName(host) , m_hostPort(port), m_isInit(false) {}
virtual ~RawSocket() { cleanup(); }
bool init();
bool send(const String& sendData);
bool isInit() const { return m_isInit; }
private:
bool create();
void cleanup();
};
#endif
} // end of net
} // end if rho
#endif
| {
"content_hash": "a5c7e3a50ac72c7b72eb28fdcaa1f177",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 66,
"avg_line_length": 18.473684210526315,
"alnum_prop": 0.6588319088319088,
"repo_name": "rhomobile/rhodes",
"id": "19a61d2305fb50584a519cc2617b0a1ab1a12f66",
"size": "2705",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "platform/shared/net/RawSocket.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "6291"
},
{
"name": "Batchfile",
"bytes": "115330"
},
{
"name": "C",
"bytes": "61309502"
},
{
"name": "C#",
"bytes": "702078"
},
{
"name": "C++",
"bytes": "16790255"
},
{
"name": "COBOL",
"bytes": "187"
},
{
"name": "CSS",
"bytes": "641054"
},
{
"name": "GAP",
"bytes": "76344"
},
{
"name": "HTML",
"bytes": "1827679"
},
{
"name": "Java",
"bytes": "6590034"
},
{
"name": "JavaScript",
"bytes": "1828506"
},
{
"name": "MATLAB",
"bytes": "123"
},
{
"name": "Makefile",
"bytes": "360667"
},
{
"name": "Mustache",
"bytes": "20693"
},
{
"name": "NASL",
"bytes": "285"
},
{
"name": "NSIS",
"bytes": "75538"
},
{
"name": "Objective-C",
"bytes": "5257884"
},
{
"name": "Objective-C++",
"bytes": "479778"
},
{
"name": "Perl",
"bytes": "1710"
},
{
"name": "QML",
"bytes": "29477"
},
{
"name": "QMake",
"bytes": "117073"
},
{
"name": "Rebol",
"bytes": "130"
},
{
"name": "Roff",
"bytes": "328967"
},
{
"name": "Ruby",
"bytes": "17391657"
},
{
"name": "SWIG",
"bytes": "65013"
},
{
"name": "Shell",
"bytes": "39045"
},
{
"name": "SourcePawn",
"bytes": "4786"
},
{
"name": "XSLT",
"bytes": "4315"
}
],
"symlink_target": ""
} |
//=============================================================================================================
//*************************************************************************************************************
//=============================================================================================================
// INCLUDES
//=============================================================================================================
#include "realtimemultisamplearraydelegate.h"
#include "realtimemultisamplearraymodel.h"
//*************************************************************************************************************
//=============================================================================================================
// QT INCLUDES
//=============================================================================================================
#include <QPainter>
#include <QPainterPath>
#include <QDebug>
#include <QThread>
//*************************************************************************************************************
//=============================================================================================================
// USED NAMESPACES
//=============================================================================================================
using namespace SCDISPLIB;
//*************************************************************************************************************
//=============================================================================================================
// DEFINE MEMBER METHODS
//=============================================================================================================
RealTimeMultiSampleArrayDelegate::RealTimeMultiSampleArrayDelegate(QObject *parent)
: QAbstractItemDelegate(parent)
, m_fMaxValue(0.0)
, m_fScaleY(0.0)
, m_iActiveRow(0)
{
}
//*************************************************************************************************************
void RealTimeMultiSampleArrayDelegate::initPainterPaths(const QAbstractTableModel *model)
{
for(int i = 0; i<model->rowCount(); i++)
m_painterPaths.append(QPainterPath());
// Init pens
QColor colorMarker(233,0,43);
colorMarker.setAlpha(160);
m_penMarker = QPen(colorMarker, 2, Qt::DashLine);
m_penGrid = QPen(Qt::black, 0.7, Qt::DotLine);
m_penTimeSpacers = QPen(Qt::black, 0.2, Qt::DotLine);
m_penFreeze = QPen(Qt::darkGray, 1, Qt::SolidLine);
m_penFreezeSelected = QPen(Qt::darkRed, 1, Qt::SolidLine);
m_penFreezeBad = QPen(Qt::darkGray, 0.1, Qt::SolidLine);
m_penFreezeSelectedBad = QPen(Qt::darkRed, 1, Qt::SolidLine);
m_penNormal = QPen(Qt::darkBlue, 1, Qt::SolidLine);
m_penNormalSelected = QPen(Qt::red, 1, Qt::SolidLine);
m_penNormalBad = QPen(Qt::darkBlue, 0.1, Qt::SolidLine);
m_penNormalSelectedBad = QPen(Qt::red, 1, Qt::SolidLine);
}
//*************************************************************************************************************
void createPaths(const QModelIndex &index, const QStyleOptionViewItem &option, QPainterPath &path, QPainterPath &lastPath, QPointF &ellipsePos, QPointF &markerPosition, QString &litude, const QVector<float> &data, const QVector<float> &lastData)
{
const RealTimeMultiSampleArrayModel* t_pModel = static_cast<const RealTimeMultiSampleArrayModel*>(index.model());
//get maximum range of respective channel type (range value in FiffChInfo does not seem to contain a reasonable value)
qint32 kind = t_pModel->getKind(index.row());
float fMaxValue = 1e-9f;
switch(kind) {
case FIFFV_MEG_CH: {
qint32 unit =t_pModel->getUnit(index.row());
if(unit == FIFF_UNIT_T_M) { //gradiometers
fMaxValue = 1e-10f;
if(t_pModel->getScaling().contains(FIFF_UNIT_T_M))
fMaxValue = t_pModel->getScaling()[FIFF_UNIT_T_M];
}
else if(unit == FIFF_UNIT_T) //magnitometers
{
// if(t_pModel->getCoil(index.row()) == FIFFV_COIL_BABY_MAG)
// fMaxValue = 1e-11f;
// else
fMaxValue = 1e-11f;
if(t_pModel->getScaling().contains(FIFF_UNIT_T))
fMaxValue = t_pModel->getScaling()[FIFF_UNIT_T];
}
break;
}
case FIFFV_REF_MEG_CH: { /*11/04/14 Added by Limin: MEG reference channel */
fMaxValue = 1e-11f;
if(t_pModel->getScaling().contains(FIFF_UNIT_T))
fMaxValue = t_pModel->getScaling()[FIFF_UNIT_T];
break;
}
case FIFFV_EEG_CH: {
fMaxValue = 1e-4f;
if(t_pModel->getScaling().contains(FIFFV_EEG_CH))
fMaxValue = t_pModel->getScaling()[FIFFV_EEG_CH];
break;
}
case FIFFV_EOG_CH: {
fMaxValue = 1e-3f;
if(t_pModel->getScaling().contains(FIFFV_EOG_CH))
fMaxValue = t_pModel->getScaling()[FIFFV_EOG_CH];
break;
}
case FIFFV_STIM_CH: {
fMaxValue = 5;
if(t_pModel->getScaling().contains(FIFFV_STIM_CH))
fMaxValue = t_pModel->getScaling()[FIFFV_STIM_CH];
break;
}
case FIFFV_MISC_CH: {
fMaxValue = 1e-3f;
if(t_pModel->getScaling().contains(FIFFV_MISC_CH))
fMaxValue = t_pModel->getScaling()[FIFFV_MISC_CH];
break;
}
}
float fValue;
float fScaleY = option.rect.height()/(2*fMaxValue);
float y_base = path.currentPosition().y();
QPointF qSamplePosition;
float fDx = ((float)option.rect.width()) / t_pModel->getMaxSamples();
//Move to initial starting point
if(data.size() > 0)
{
// float val = data[0];
fValue = 0;//(val-data[0])*fScaleY;
float newY = y_base-fValue;//Reverse direction -> plot the right way
qSamplePosition.setY(newY);
qSamplePosition.setX(path.currentPosition().x());
path.moveTo(qSamplePosition);
}
//create lines from one to the next sample
qint32 i;
for(i = 1; i < data.size(); ++i) {
float val = data[i] - data[0]; //remove first sample data[0] as offset
fValue = val*fScaleY;
//qDebug()<<"val"<<val<<"fScaleY"<<fScaleY<<"fValue"<<fValue;
float newY = y_base-fValue;//Reverse direction -> plot the right way
qSamplePosition.setY(newY);
qSamplePosition.setX(path.currentPosition().x()+fDx);
path.lineTo(qSamplePosition);
//Create ellipse position
if(i == (qint32)(markerPosition.x()/fDx)) {
ellipsePos.setX(path.currentPosition().x()+fDx);
ellipsePos.setY(newY+(option.rect.height()/2));
amplitude = QString::number(data[i]);
}
}
//create lines from one to the next sample for last path
qint32 sample_offset = t_pModel->numVLines() + 1;
qSamplePosition.setX(qSamplePosition.x() + fDx*sample_offset);
//start painting from first sample value
float val = lastData[i] - lastData[0]; //remove first sample lastData[0] as offset
fValue = val*fScaleY;
float newY = y_base-fValue;
qSamplePosition.setY(newY);
lastPath.moveTo(qSamplePosition);
for(i += sample_offset; i < lastData.size(); ++i) {
val = lastData[i] - lastData[0]; //remove first sample lastData[0] as offset
fValue = val*fScaleY;
newY = y_base-fValue;
qSamplePosition.setY(newY);
qSamplePosition.setX(lastPath.currentPosition().x()+fDx);
lastPath.lineTo(qSamplePosition);
//Create ellipse position
if(i == (qint32)(markerPosition.x()/fDx)) {
ellipsePos.setX(lastPath.currentPosition().x()+fDx);
ellipsePos.setY(newY+(option.rect.height()/2));
amplitude = QString::number(lastData[i]);
}
}
}
//*************************************************************************************************************
void RealTimeMultiSampleArrayDelegate::paint(QPainter *painter, const QStyleOptionViewItem &option, const QModelIndex &index) const
{
float t_fPlotHeight = option.rect.height();
switch(index.column()) {
case 0: { //chnames
painter->save();
painter->rotate(-90);
painter->drawText(QRectF(-option.rect.y()-t_fPlotHeight,0,t_fPlotHeight,20),Qt::AlignCenter,index.model()->data(index,Qt::DisplayRole).toString());
painter->restore();
break;
}
case 1: { //data plot
//draw special background when channel is marked as bad
QVariant v = index.model()->data(index,Qt::BackgroundRole);
bool bIsBadChannel = false;
if((v.canConvert<QBrush>() && !(option.state & QStyle::State_Selected)) ||
(v.canConvert<QBrush>() && (option.state & QStyle::State_Selected))) {
QPointF oldBO = painter->brushOrigin();
painter->setBrushOrigin(option.rect.topLeft());
painter->fillRect(option.rect, qvariant_cast<QBrush>(v));
painter->setBrushOrigin(oldBO);
bIsBadChannel = true;
}
// //Highlight selected channels
// if(option.state & QStyle::State_Selected) {
// QPointF oldBO = painter->brushOrigin();
// painter->setBrushOrigin(option.rect.topLeft());
// painter->fillRect(option.rect, option.palette.highlight());
// painter->setBrushOrigin(oldBO);
// }
//Get data
QVariant variant = index.model()->data(index,Qt::DisplayRole);
RowVectorPair data = variant.value<RowVectorPair>();
const RealTimeMultiSampleArrayModel* t_pModel = static_cast<const RealTimeMultiSampleArrayModel*>(index.model());
if(data.second > 0)
{
QPainterPath path(QPointF(option.rect.x(),option.rect.y()));//QPointF(option.rect.x()+t_rtmsaModel->relFiffCursor()-1,option.rect.y()));
painter->setRenderHint(QPainter::Antialiasing, true);
//Plot marker
// createMarkerPath(option, path);
// painter->save();
// painter->setPen(m_penMarker);
// painter->drawPath(path);
// painter->restore();
//Plot grid
createGridPath(index, option, path, data);
painter->save();
painter->setPen(m_penGrid);
painter->drawPath(path);
painter->restore();
//Plot time spacers
createTimeSpacersPath(index, option, path, data);
painter->save();
painter->setPen(m_penTimeSpacers);
painter->drawPath(path);
painter->restore();
//Plot detected triggers
path = QPainterPath(QPointF(option.rect.x(),option.rect.y()));//QPointF(option.rect.x()+t_rtmsaModel->relFiffCursor(),option.rect.y()));
painter->save();
createTriggerPath(painter, index, option, path, data);
painter->restore();
//Plot trigger threshold
if(index.row() == t_pModel->getCurrentTriggerIndex() &&
t_pModel->triggerDetectionActive()) {
path = QPainterPath(QPointF(option.rect.x(),option.rect.y()));//QPointF(option.rect.x()+t_rtmsaModel->relFiffCursor(),option.rect.y()));
QPointF textPosition;
createTriggerThresholdPath(index, option, path, data, textPosition);
painter->save();
painter->setPen(QPen(Qt::red, 1, Qt::DashLine));
painter->drawPath(path);
painter->drawText(textPosition, QString("%1 Threshold").arg(t_pModel->getTriggerName()));
painter->restore();
}
//Plot data path
QPointF ellipsePos;
QString amplitude;
path = QPainterPath(QPointF(option.rect.x(),option.rect.y()));//QPointF(option.rect.x()+t_rtmsaModel->relFiffCursor(),option.rect.y()));
//QTime timer;
//timer.start();
createPlotPath(index, option, path, ellipsePos, amplitude, data);
//int timeMS = timer.elapsed();
//std::cout<<"Time createPlotPath"<<timeMS<<std::endl;
painter->setRenderHint(QPainter::Antialiasing, true);
painter->save();
painter->translate(0, t_fPlotHeight/2);
if(bIsBadChannel) {
if(t_pModel->isFreezed()) {
if(option.state & QStyle::State_Selected)
painter->setPen(m_penFreezeSelectedBad);
else
painter->setPen(m_penFreezeBad);
} else {
if(option.state & QStyle::State_Selected)
painter->setPen(m_penNormalSelectedBad);
else
painter->setPen(m_penNormalBad);
}
} else {
if(t_pModel->isFreezed()) {
if(option.state & QStyle::State_Selected)
painter->setPen(m_penFreezeSelected);
else
painter->setPen(m_penFreeze);
} else {
if(option.state & QStyle::State_Selected)
painter->setPen(m_penNormalSelected);
else
painter->setPen(m_penNormal);
}
}
//timer.start();
painter->drawPath(path);
//timeMS = timer.elapsed();
//std::cout<<"Time drawPath Current data"<<timeMS<<std::endl;
//Plot ellipse and amplitude next to marker mouse position
// if(m_iActiveRow == index.row()) {
// painter->save();
// painter->drawEllipse(ellipsePos,2,2);
// painter->restore();
// painter->save();
// painter->drawText(m_markerPosition, amplitude);
// painter->drawEllipse(ellipsePos,2,2);
// painter->restore();
// }
painter->restore();
//Plot current position marker
path = QPainterPath(QPointF(option.rect.x(),option.rect.y()));//QPointF(option.rect.x()+t_rtmsaModel->relFiffCursor(),option.rect.y()));
createCurrentPositionMarkerPath(index, option, path);
painter->save();
painter->setPen(m_penMarker);
painter->drawPath(path);
painter->restore();
}
break;
}
}
}
//*************************************************************************************************************
QSize RealTimeMultiSampleArrayDelegate::sizeHint(const QStyleOptionViewItem &option, const QModelIndex &index) const
{
QSize size;
switch(index.column()) {
case 0:
size = QSize(20,option.rect.height());
break;
case 1:
QList< QVector<float> > data = index.model()->data(index).value< QList<QVector<float> > >();
// qint32 nsamples = (static_cast<const RealTimeMultiSampleArrayModel*>(index.model()))->lastSample()-(static_cast<const RealTimeMultiSampleArrayModel*>(index.model()))->firstSample();
// size = QSize(nsamples*m_dDx,m_dPlotHeight);
Q_UNUSED(option);
break;
}
return size;
}
//*************************************************************************************************************
void RealTimeMultiSampleArrayDelegate::markerMoved(QPoint position, int activeRow)
{
m_markerPosition = position;
m_iActiveRow = activeRow;
}
//*************************************************************************************************************
void RealTimeMultiSampleArrayDelegate::setSignalColor(const QColor& signalColor)
{
m_penNormal.setColor(signalColor);
m_penNormalBad.setColor(signalColor);
}
//*************************************************************************************************************
void RealTimeMultiSampleArrayDelegate::createPlotPath(const QModelIndex &index, const QStyleOptionViewItem &option, QPainterPath& path, QPointF &ellipsePos, QString &litude, RowVectorPair &data) const
{
const RealTimeMultiSampleArrayModel* t_pModel = static_cast<const RealTimeMultiSampleArrayModel*>(index.model());
//get maximum range of respective channel type (range value in FiffChInfo does not seem to contain a reasonable value)
qint32 kind = t_pModel->getKind(index.row());
float fMaxValue = 1e-9f;
switch(kind) {
case FIFFV_MEG_CH: {
qint32 unit =t_pModel->getUnit(index.row());
if(unit == FIFF_UNIT_T_M) { //gradiometers
fMaxValue = 1e-10f;
if(t_pModel->getScaling().contains(FIFF_UNIT_T_M))
fMaxValue = t_pModel->getScaling()[FIFF_UNIT_T_M];
}
else if(unit == FIFF_UNIT_T) //magnetometers
{
fMaxValue = 1e-11f;
//TODO: Debug this
// if(t_pModel->getCoil(index.row()) == FIFFV_COIL_BABY_MAG)
// fMaxValue = 1e-11f;
// else
// fMaxValue = 1e-11f;
if(t_pModel->getScaling().contains(FIFF_UNIT_T))
fMaxValue = t_pModel->getScaling()[FIFF_UNIT_T];
}
break;
}
case FIFFV_REF_MEG_CH: { /*11/04/14 Added by Limin: MEG reference channel */
fMaxValue = 1e-11f;
if(t_pModel->getScaling().contains(FIFF_UNIT_T))
fMaxValue = t_pModel->getScaling()[FIFF_UNIT_T];
break;
}
case FIFFV_EEG_CH: {
fMaxValue = 1e-4f;
if(t_pModel->getScaling().contains(FIFFV_EEG_CH))
fMaxValue = t_pModel->getScaling()[FIFFV_EEG_CH];
break;
}
case FIFFV_EOG_CH: {
fMaxValue = 1e-3f;
if(t_pModel->getScaling().contains(FIFFV_EOG_CH))
fMaxValue = t_pModel->getScaling()[FIFFV_EOG_CH];
break;
}
case FIFFV_STIM_CH: {
fMaxValue = 5;
if(t_pModel->getScaling().contains(FIFFV_STIM_CH))
fMaxValue = t_pModel->getScaling()[FIFFV_STIM_CH];
break;
}
case FIFFV_MISC_CH: {
fMaxValue = 1e-3f;
if(t_pModel->getScaling().contains(FIFFV_MISC_CH))
fMaxValue = t_pModel->getScaling()[FIFFV_MISC_CH];
break;
}
}
float fValue;
float fScaleY = option.rect.height()/(2*fMaxValue);
float y_base = path.currentPosition().y();
QPointF qSamplePosition;
float fDx = ((float)option.rect.width()) / t_pModel->getMaxSamples();
int currentSampleIndex = t_pModel->getCurrentSampleIndex();
float lastFirstValue = t_pModel->getLastBlockFirstValue(index.row());
//Move to initial starting point
if(data.second > 0)
{
// float val = data[0];
fValue = 0;//(val-data[0])*fScaleY;
float newY = y_base-fValue;//Reverse direction -> plot the right way
qSamplePosition.setY(newY);
qSamplePosition.setX(path.currentPosition().x());
path.moveTo(qSamplePosition);
}
float val;
for(qint32 j=0; j < data.second; ++j)
{
if(j<currentSampleIndex)
val = *(data.first+j) - *(data.first); //remove first sample data[0] as offset
else
val = *(data.first+j) - lastFirstValue; //do not remove first sample data[0] as offset because this is the last data part
fValue = val*fScaleY;
//qDebug()<<"val"<<val<<"fScaleY"<<fScaleY<<"fValue"<<fValue;
float newY = y_base-fValue;//Reverse direction -> plot the right way
qSamplePosition.setY(newY);
qSamplePosition.setX(path.currentPosition().x()+fDx);
path.lineTo(qSamplePosition);
//Create ellipse position
if(j == (qint32)(m_markerPosition.x()/fDx)) {
ellipsePos.setX(path.currentPosition().x()+fDx);
ellipsePos.setY(newY/*+(option.rect.height()/2)*/);
amplitude = QString::number(*(data.first+j));
}
}
}
//*************************************************************************************************************
void RealTimeMultiSampleArrayDelegate::createCurrentPositionMarkerPath(const QModelIndex &index, const QStyleOptionViewItem &option, QPainterPath& path) const
{
const RealTimeMultiSampleArrayModel* t_pModel = static_cast<const RealTimeMultiSampleArrayModel*>(index.model());
float currentSampleIndex = option.rect.x()+t_pModel->getCurrentSampleIndex();
float fDx = ((float)option.rect.width()) / t_pModel->getMaxSamples();
currentSampleIndex = currentSampleIndex*fDx;
float yStart = option.rect.topLeft().y();
float yEnd = option.rect.bottomRight().y();
path.moveTo(currentSampleIndex,yStart);
path.lineTo(currentSampleIndex,yEnd);
}
//*************************************************************************************************************
void RealTimeMultiSampleArrayDelegate::createGridPath(const QModelIndex &index, const QStyleOptionViewItem &option, QPainterPath& path, RowVectorPair &data) const
{
Q_UNUSED(data)
const RealTimeMultiSampleArrayModel* t_pModel = static_cast<const RealTimeMultiSampleArrayModel*>(index.model());
if(t_pModel->numVLines() > 0)
{
//vertical lines
float distance = float (option.rect.width())/(t_pModel->numVLines()+1);
float yStart = option.rect.topLeft().y();
float yEnd = option.rect.bottomRight().y();
for(qint8 i = 0; i < t_pModel->numVLines(); ++i) {
float x = distance*(i+1);
path.moveTo(x,yStart);
path.lineTo(x,yEnd);
}
}
}
//*************************************************************************************************************
void RealTimeMultiSampleArrayDelegate::createTimeSpacersPath(const QModelIndex &index, const QStyleOptionViewItem &option, QPainterPath& path, RowVectorPair &data) const
{
Q_UNUSED(data)
const RealTimeMultiSampleArrayModel* t_pModel = static_cast<const RealTimeMultiSampleArrayModel*>(index.model());
if(t_pModel->getNumberOfTimeSpacers() > 0)
{
//vertical lines
float distanceSec = float (option.rect.width())/(t_pModel->numVLines()+1);
float distanceSpacers = distanceSec/(t_pModel->getNumberOfTimeSpacers()+1);
float yStart = option.rect.topLeft().y();
float yEnd = option.rect.bottomRight().y();
for(qint8 t = 0; t < t_pModel->numVLines()+1; ++t) {
for(qint8 i = 0; i < t_pModel->getNumberOfTimeSpacers(); ++i) {
float x = (distanceSec*t)+(distanceSpacers*(i+1));
path.moveTo(x,yStart);
path.lineTo(x,yEnd);
}
}
}
}
//*************************************************************************************************************
void RealTimeMultiSampleArrayDelegate::createTriggerPath(QPainter *painter, const QModelIndex &index, const QStyleOptionViewItem &option, QPainterPath& path, RowVectorPair &data) const
{
Q_UNUSED(data)
const RealTimeMultiSampleArrayModel* t_pModel = static_cast<const RealTimeMultiSampleArrayModel*>(index.model());
QList<QPair<int,double> > detectedTriggers = t_pModel->getDetectedTriggers();
QList<QPair<int,double> > detectedTriggersOld = t_pModel->getDetectedTriggersOld();
QMap<double, QColor> mapTriggerTypeColors = t_pModel->getTriggerColor();
float yStart = option.rect.topLeft().y();
float yEnd = option.rect.bottomRight().y();
float fDx = ((float)option.rect.width()) / t_pModel->getMaxSamples();
int currentSampleIndex = t_pModel->getCurrentSampleIndex();
//Newly detected triggers
for(int u = 0; u < detectedTriggers.size(); ++u) {
QPainterPath path;
int triggerPos = detectedTriggers[u].first;
painter->save();
if(mapTriggerTypeColors.contains(detectedTriggers[u].second)) {
painter->setPen(QPen(mapTriggerTypeColors[detectedTriggers[u].second], 1.5, Qt::SolidLine));
}
if(triggerPos <= currentSampleIndex + t_pModel->getCurrentOverlapAddDelay()) {
path.moveTo(triggerPos*fDx,yStart);
path.lineTo(triggerPos*fDx,yEnd);
}
painter->drawPath(path);
painter->restore();
}
//Old detected triggers
for(int u = 0; u < detectedTriggersOld.size(); ++u) {
QPainterPath path;
int triggerPos = detectedTriggersOld[u].first;
if(triggerPos >= currentSampleIndex + t_pModel->getCurrentOverlapAddDelay()) {
painter->save();
if(mapTriggerTypeColors.contains(detectedTriggersOld[u].second)) {
painter->setPen(QPen(mapTriggerTypeColors[detectedTriggersOld[u].second], 1.5, Qt::SolidLine));
}
path.moveTo(triggerPos*fDx,yStart);
path.lineTo(triggerPos*fDx,yEnd);
painter->drawPath(path);
painter->restore();
}
}
}
//*************************************************************************************************************
void RealTimeMultiSampleArrayDelegate::createTriggerThresholdPath(const QModelIndex &index, const QStyleOptionViewItem &option, QPainterPath& path, RowVectorPair &data, QPointF &textPosition) const
{
Q_UNUSED(data)
const RealTimeMultiSampleArrayModel* t_pModel = static_cast<const RealTimeMultiSampleArrayModel*>(index.model());
//get maximum range of respective channel type (range value in FiffChInfo does not seem to contain a reasonable value)
qint32 kind = t_pModel->getKind(index.row());
double fMaxValue = 1e-9f;
switch(kind) {
case FIFFV_STIM_CH: {
fMaxValue = 5.0;
if(t_pModel->getScaling().contains(FIFFV_STIM_CH))
fMaxValue = t_pModel->getScaling()[FIFFV_STIM_CH];
break;
}
}
double fScaleY = option.rect.height()/(2*fMaxValue);
double triggerThreshold = -1*(t_pModel->getTriggerThreshold());
path.moveTo(option.rect.topLeft().x(), option.rect.topLeft().y()+option.rect.height()/2+fScaleY*triggerThreshold);
path.lineTo(option.rect.topRight().x(), option.rect.topLeft().y()+option.rect.height()/2+fScaleY*triggerThreshold);
textPosition = QPointF(option.rect.topLeft().x()+5, option.rect.topLeft().y()+option.rect.height()/2+fScaleY*triggerThreshold-5);
}
//*************************************************************************************************************
void RealTimeMultiSampleArrayDelegate::createMarkerPath(const QStyleOptionViewItem &option, QPainterPath& path) const
{
//horizontal lines
float distance = m_markerPosition.x();
float yStart = option.rect.topLeft().y();
float yEnd = option.rect.bottomRight().y();
path.moveTo(distance,yStart);
path.lineTo(distance,yEnd);
}
| {
"content_hash": "75e7eb0e2bc8d693be197503530be43c",
"timestamp": "",
"source": "github",
"line_count": 732,
"max_line_length": 248,
"avg_line_length": 37.79371584699454,
"alnum_prop": 0.525356949213808,
"repo_name": "louiseichhorst/mne-cpp",
"id": "b8a6af2a1a191c88ba6e78f353e458689af3fa11",
"size": "29535",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "applications/mne_scan/libs/scDisp/helpers/realtimemultisamplearraydelegate.cpp",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "267153"
},
{
"name": "C++",
"bytes": "15957455"
},
{
"name": "GLSL",
"bytes": "10152"
},
{
"name": "Prolog",
"bytes": "23027"
},
{
"name": "QML",
"bytes": "37062"
},
{
"name": "QMake",
"bytes": "274919"
}
],
"symlink_target": ""
} |
import hashlib
from alembic import op
from datetime import date
from sqlalchemy.sql import table, column
from sqlalchemy import String, Integer, Date, MetaData, create_engine, or_
from sqlalchemy.orm import Session
import json
import csv
from game import generate_score, scores
from models import Character, Episode, DraftHistory, User
# Create an ad-hoc table to use for the insert statement.
engine=create_engine('postgresql://admin:admin@localhost:5432/fantasyx')
db_session = Session(bind=engine)
db_session.execute('TRUNCATE TABLE score restart identity CASCADE')
episodes = [
"S07E01",
"S07E02",
]
for episode_number in episodes:
with open('data/episode_scores/%s.csv' % (episode_number)) as data_file:
records = [dict(record) for record in csv.DictReader(data_file)]
for record in records:
record['episode_number'] = episode_number
generate_score(record, db_session)
db_session.commit()
print scores({}, engine)
| {
"content_hash": "b53e1b53faf00893bd44b8614b25f6f9",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 76,
"avg_line_length": 30.90625,
"alnum_prop": 0.7269969666329625,
"repo_name": "emlprime/fantasyx",
"id": "2b9ca3e7cc08614f76c6db5e8f67faa5575ea677",
"size": "989",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fantasyx/import_scores.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1528"
},
{
"name": "HTML",
"bytes": "1589"
},
{
"name": "JavaScript",
"bytes": "38134"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "55036"
}
],
"symlink_target": ""
} |
package org.apache.geode.cache.query.internal;
/**
* Marker interface to mark CompiledValue classes that know how to negate themselves.
*/
public interface Negatable {
public void negate();
}
| {
"content_hash": "731d91860bf9d5995deea12cb63eeffc",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 85,
"avg_line_length": 19.9,
"alnum_prop": 0.7487437185929648,
"repo_name": "prasi-in/geode",
"id": "6887776b6e21838dd66c1f31db4a65f48e2d3f22",
"size": "988",
"binary": false,
"copies": "5",
"ref": "refs/heads/develop",
"path": "geode-core/src/main/java/org/apache/geode/cache/query/internal/Negatable.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "106707"
},
{
"name": "Groovy",
"bytes": "2928"
},
{
"name": "HTML",
"bytes": "3984628"
},
{
"name": "Java",
"bytes": "26703981"
},
{
"name": "JavaScript",
"bytes": "1781013"
},
{
"name": "Ruby",
"bytes": "6751"
},
{
"name": "Scala",
"bytes": "236402"
},
{
"name": "Shell",
"bytes": "43900"
}
],
"symlink_target": ""
} |
{extend name="default/template/base_index" /}
{block name="area_header"}
<link type="text/css" rel="stylesheet" href="__CDN__/comp/wxuploader.css?v=__APP_VERSION__" />
<link type="text/css" rel="stylesheet" href="__CDN__/jquery-uploadify/3.2.1/uploadify.css" />
<script type="text/javascript" src="__CDN__/jquery-uploadify/3.2.1/jquery.uploadify.min.js"></script>
{/block}
{block name="area_body"}
{include file="default/Widget/topbar" /}
<div class="admin-main container-fluid">
{include file="default/Widget/left" /}
<div class="admin-main-content">
{include file="default/Widget/breadcrumb" /}
<!-- 带验证 form -->
<form class="form-horizontal well wxaccountForm">
<input type="hidden" name="id" value="{$wxaccount.id}" />
<fieldset>
<legend>
公众号资料-编辑</legend>
<div class="form-group">
<label for="btns" class="col-md-2 col-lg-2 control-label"> </label>
<div class="col-lg-10 col-md-10">
<a target-form="wxaccountForm" class="ajax-post btn btn-primary" onclick="return getData();" href="{:url(''.CONTROLLER_NAME.'/store')}" autofocus="autofocus"><i class="fa fa-save"></i> {:L('BTN_SAVE')}</a>
<a href="{:url('Wxaccount/help',array('id'=>$wxaccount['id']))}" class="btn btn-default">公众号绑定信息</a>
<!--<a class="btn btn-default" href="{:url(''.CONTROLLER_NAME.'/index')}"><i class="fa fa-times-circle"></i> {:L('BTN_CANCEL')}</a>-->
</div>
</div>
<div class="form-group">
<label for="inputtitle" class="col-md-2 col-lg-2 control-label">公众号昵称</label>
<div class="col-md-10 col-lg-10">
<input type="text" value="{$wxaccount.wxname}" class="required form-control input-short" name="wxname" placeholder="请输入公众号昵称">
<div class="help-block">(公众号昵称)</div>
</div>
</div>
<div class="form-group">
<label for="inputtitle" class="col-md-2 col-lg-2 control-label">微信号</label>
<div class="col-md-10 col-lg-10">
<input type="text" value="{$wxaccount.weixin}" class="required form-control input-short" name="weixin"placeholder="请输入微信号">
<div class="help-block">(微信号)</div>
</div>
</div>
<div class="form-group">
<label for="inputtitle" class="col-md-2 col-lg-2 control-label">原始ID</label>
<div class="col-md-10 col-lg-10">
<input type="text" value="{$wxaccount.wxuid}" class="required form-control input-short" name="wxuid" placeholder="请输入原始ID">
<div class="help-block">(原始ID)</div>
</div>
</div>
<div class="form-group">
<label for="inputtitle" class="col-md-2 col-lg-2 control-label">二维码地址</label>
<div class="col-md-10 col-lg-10">
<input type="hidden" value="" id="qrcode_img" class="required form-control" name="qrcode" placeholder="请输入二维码地址">
<!-- 图片选择DOM结构 -->
<div class="wxuploaderimg clearfix <notempty name="wxaccount.qrcode">checked{/notempty}" data-maxitems="1">
<div class="img-preview clearfix" >
<notempty name="wxaccount.qrcode">
<div class="pull-left clearfix img-item">
<img src="{$wxaccount.qrcode}" />
<div class="edit_pic_wrp"><a href="javascript:;" class="fa fa-lg fa-trash js_delete"></a></div>
</div>
{/notempty}
</div>
<div class="add">
<i class="fa fa-plus"></i>
</div>
</div>
<!-- 图片选择DOM结构 -->
<div class="help-block">(二维码地址)</div>
</div>
</div>
<div class="form-group">
<label for="inputtitle" class="col-md-2 col-lg-2 control-label">头像地址</label>
<div class="col-md-10 col-lg-10">
<input type="text" value="{$wxaccount.headerpic}" class="required form-control" name="headerpic" placeholder="请输入头像地址">
<div class="help-block">(头像地址)</div>
</div>
</div>
<div class="form-group">
<label for="inputtitle" class="col-md-2 col-lg-2 control-label">公众号APPID</label>
<div class="col-md-10 col-lg-10">
<input type="text" value="{$wxaccount.appid}" class="required form-control input-short" name="appid" placeholder="请输入公众号APPID">
<div class="help-block">(公众号APPID)</div>
</div>
</div>
<div class="form-group">
<label for="inputtitle" class="col-md-2 col-lg-2 control-label">公众号APPSECRET</label>
<div class="col-md-10 col-lg-10">
<input type="text" value="{$wxaccount.appsecret}" class="required form-control" name="appsecret" placeholder="请输入公众号APPSECRET">
<div class="help-block">(公众号APPSECRET)</div>
</div>
</div>
<div class="form-group">
<label for="inputtitle" class="col-md-2 col-lg-2 control-label">EncodingAESKey</label>
<div class="col-md-10 col-lg-10">
<input type="text" readonly="readonly" value="{$wxaccount.encodingaeskey}" class="required form-control" name="encodingAESKey" id="encodingAESKey" placeholder="请输入EncodingAESKey">
<a href="javascript:createEncodingAESKey();" class="btn btn-primary">重新生成</a>
<div class="help-block">(请慎重更改,更改后必须重新配置微信开发者中心中的服务器配置!)</div>
</div>
</div>
<div class="form-group">
<label for="btns" class="col-md-2 col-lg-2 control-label"> </label>
<div class="col-lg-10 col-md-10">
<a target-form="wxaccountForm" class="ajax-post btn btn-primary" onclick="return getData();" href="{:url(''.CONTROLLER_NAME.'/store')}" autofocus="autofocus"><i class="fa fa-save"></i> {:L('BTN_SAVE')}</a>
<!--<a class="btn btn-default" href="{:url(''.CONTROLLER_NAME.'/index')}"><i class="fa fa-times-circle"></i> {:L('BTN_CANCEL')}</a>-->
</div>
</div>
</fieldset>
</form>
<!-- form -->
{include file="default/template/wxpicture" /}
</div>
<!-- END admin-main-content -->
</div>
<!-- END admin-main-->
{/block}
{block name="area_footer"}
<script type="text/javascript">
function getData(){
var img = $(".wxuploaderimg img").attr("src");
$("#qrcode_img").val(img);
}
$(function(){
wxuploadimg.init({cont:".wxuploaderimg"});
})
function createEncodingAESKey(){
var encodingAESKey = randomString();
$("#encodingAESKey").val(encodingAESKey);
}
function randomString(len) {
len = len || 43;
var $chars = 'abcdefghijklmnopqrstuvwxyz0123456789';
var maxPos = $chars.length;
var pwd = '';
for (i = 0; i < len; i++) {
pwd += $chars.charAt(Math.floor(Math.random() * maxPos));
}
return pwd;
}
</script>
{/block} | {
"content_hash": "1ee12f1aed7e89fd0b1a3e4ae06831fd",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 214,
"avg_line_length": 44.02040816326531,
"alnum_prop": 0.6097975583371967,
"repo_name": "h136799711/api_resource",
"id": "f904f1a20b90a5797f0866d98e15678b7fbbc9b2",
"size": "6831",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "application/admin/view/default/Wxaccount/edit.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1672243"
},
{
"name": "HTML",
"bytes": "1849684"
},
{
"name": "JavaScript",
"bytes": "2198991"
},
{
"name": "PHP",
"bytes": "1047257"
},
{
"name": "Shell",
"bytes": "2487"
},
{
"name": "Smarty",
"bytes": "3373"
}
],
"symlink_target": ""
} |
<?php
/**
* This file is part of the Affinity Development
* open source toolset.
*
* @author Brendan Bates <brendanbates89@gmail.com>
* @package AuthNotes
* @license http://opensource.org/licenses/bsd-license.php BSD
*/
namespace Affinity\AuthNoteBundle\Service;
use Symfony\Component\Config\Loader\Loader;
use Symfony\Component\Routing\RouteCollection;
/**
*
* Class Description.
*
* @package AuthNotes
*
*/
class RouteLoader extends Loader
{
public function load($resource, $type = null)
{
$collection = new RouteCollection();
$resource = '@AuthNoteBundle/Resources/config/bundle_routing.yml';
$type = 'yaml';
$importedRoutes = $this->import($resource, $type);
$collection->addCollection($importedRoutes);
return $collection;
}
public function supports($resource, $type = null)
{
return $type === "auth_note_route_loader";
}
}
| {
"content_hash": "fe0dec8734c7379d9b62de7f35075578",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 74,
"avg_line_length": 22.09090909090909,
"alnum_prop": 0.6358024691358025,
"repo_name": "affinitydev/auth_notes",
"id": "1a2aa6b426f1a7e235d513013806b85e89cde5ad",
"size": "972",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Affinity/AuthNoteBundle/Service/RouteLoader.php",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "PHP",
"bytes": "52270"
}
],
"symlink_target": ""
} |
$CommandName = $MyInvocation.MyCommand.Name.Replace(".Tests.ps1", "")
Write-Host -Object "Running $PSCommandpath" -ForegroundColor Cyan
. "$PSScriptRoot\constants.ps1"
Describe "$CommandName Unit Tests" -Tag 'UnitTests' {
Context "Validate parameters" {
[object[]]$params = (Get-Command $CommandName).Parameters.Keys | Where-Object { $_ -notin ('whatif', 'confirm') }
[object[]]$knownParameters = 'Name', 'Path', 'FilePath', 'InputObject', 'Architecture', 'Language', 'EnableException'
$knownParameters += [System.Management.Automation.PSCmdlet]::CommonParameters
It "Should only contain our specific parameters" {
(@(Compare-Object -ReferenceObject ($knownParameters | Where-Object { $_ }) -DifferenceObject $params).Count ) | Should Be 0
}
}
}
Describe "$commandname Integration Tests" -Tag "IntegrationTests" {
It "downloads a small update" {
$results = Save-DbaKbUpdate -Name KB2992080 -Architecture All -Path C:\temp
$results.Name -match 'aspnet'
$results | Remove-Item -Confirm:$false
}
It "supports piping" {
$results = Get-DbaKbUpdate -Name KB2992080 | select -First 1 | Save-DbaKbUpdate -Architecture All -Path C:\temp
$results.Name -match 'aspnet'
$results | Remove-Item -Confirm:$false
}
It "Download multiple updates" {
$results = Save-DbaKbUpdate -Name KB2992080, KB4513696 -Architecture All -Path C:\temp
# basic retry logic in case the first download didn't get all of the files
if ($null -eq $results -or $results.Count -ne 2) {
Write-Message -Level Warning -Message "Retrying..."
if ($results.Count -gt 0) {
$results | Remove-Item -Confirm:$false
}
Start-Sleep -s 30
$results = Save-DbaKbUpdate -Name KB2992080, KB4513696 -Architecture All -Path C:\temp
}
$results.Count | Should -Be 2
$results | Remove-Item -Confirm:$false
# download multiple updates via piping
$results = Get-DbaKbUpdate -Name KB2992080, KB4513696 | Save-DbaKbUpdate -Architecture All -Path C:\temp
# basic retry logic in case the first download didn't get all of the files
if ($null -eq $results -or $results.Count -ne 2) {
Write-Message -Level Warning -Message "Retrying..."
if ($results.Count -gt 0) {
$results | Remove-Item -Confirm:$false
}
Start-Sleep -s 30
$results = Get-DbaKbUpdate -Name KB2992080, KB4513696 | Save-DbaKbUpdate -Architecture All -Path C:\temp
}
$results.Count | Should -Be 2
$results | Remove-Item -Confirm:$false
}
# see https://github.com/dataplat/dbatools/issues/6745
It "Ensuring that variable scope doesn't impact the command negatively" {
$filter = "SQLServer*-KB-*x64*.exe"
$results = Save-DbaKbUpdate -Name KB4513696 -Architecture All -Path C:\temp
$results.Count | Should -Be 1
$results | Remove-Item -Confirm:$false
}
} | {
"content_hash": "170d2d09224c1f47fb091a362fb24704",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 136,
"avg_line_length": 45.338235294117645,
"alnum_prop": 0.6337982484592929,
"repo_name": "alevyinroc/dbatools",
"id": "8a7a4ea353d61547f59ae8891f8427978d594785",
"size": "3083",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/Save-DbaKbUpdate.Tests.ps1",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "481258"
},
{
"name": "PowerShell",
"bytes": "10381925"
},
{
"name": "Rich Text Format",
"bytes": "61846"
},
{
"name": "TSQL",
"bytes": "1026352"
}
],
"symlink_target": ""
} |
<!---
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
# Apache Arrow 0.15.0 (30 September 2019)
## Bug
* ARROW-1184 - [Java] Dictionary.equals is not working correctly
* ARROW-2317 - [Python] fix C linkage warning
* ARROW-2490 - [C++] input stream locking inconsistent
* ARROW-3176 - [Python] Overflow in Date32 column conversion to pandas
* ARROW-3203 - [C++] Build error on Debian Buster
* ARROW-3651 - [Python] Datetimes from non-DateTimeIndex cannot be deserialized
* ARROW-3652 - [Python] CategoricalIndex is lost after reading back
* ARROW-3762 - [C++] Parquet arrow::Table reads error when overflowing capacity of BinaryArray
* ARROW-3933 - [Python] Segfault reading Parquet files from GNOMAD
* ARROW-4187 - [C++] file-benchmark uses <poll.h>
* ARROW-4746 - [C++/Python] PyDataTime\_Date wrongly casted to PyDataTime\_DateTime
* ARROW-4836 - [Python] "Cannot tell() a compressed stream" when using RecordBatchStreamWriter
* ARROW-4848 - [C++] Static libparquet not compiled with -DARROW\_STATIC on Windows
* ARROW-4880 - [Python] python/asv-build.sh is probably broken after CMake refactor
* ARROW-4883 - [Python] read\_csv() returns garbage if given file object in text mode
* ARROW-5028 - [Python][C++] Creating list<string> with pyarrow.array can overflow child builder
* ARROW-5085 - [Python/C++] Conversion of dict encoded null column fails in parquet writing when using RowGroups
* ARROW-5086 - [Python] Space leak in ParquetFile.read\_row\_group()
* ARROW-5089 - [C++/Python] Writing dictionary encoded columns to parquet is extremely slow when using chunk size
* ARROW-5125 - [Python] Cannot roundtrip extreme dates through pyarrow
* ARROW-5220 - [Python] index / unknown columns in specified schema in Table.from\_pandas
* ARROW-5292 - [C++] Static libraries are built on AppVeyor
* ARROW-5300 - [C++] 0.13 FAILED to build with option -DARROW\_NO\_DEFAULT\_MEMORY\_POOL
* ARROW-5374 - [Python] Misleading error message when calling pyarrow.read\_record\_batch on a complete IPC stream
* ARROW-5414 - [C++] Using "Ninja" build system generator overrides default Release build type on Windows
* ARROW-5450 - [Python] TimestampArray.to\_pylist() fails with OverflowError: Python int too large to convert to C long
* ARROW-5471 - [C++][Gandiva]Array offset is ignored in Gandiva projector
* ARROW-5522 - [Packaging][Documentation] Comments out of date in python/manylinux1/build\_arrow.sh
* ARROW-5560 - [C++][Plasma] Cannot create Plasma object after OutOfMemory error
* ARROW-5562 - [C++][Parquet] parquet writer does not handle negative zero correctly
* ARROW-5630 - [Python][Parquet] Table of nested arrays doesn't round trip
* ARROW-5638 - [C++] cmake fails to generate Xcode project when Gandiva JNI bindings are enabled
* ARROW-5651 - [Python] Incorrect conversion from strided Numpy array when other type is specified
* ARROW-5682 - [Python] from\_pandas conversion casts values to string inconsistently
* ARROW-5731 - [CI] Turbodbc integration tests are failing
* ARROW-5753 - [Rust] Fix test failure in CI code coverage
* ARROW-5772 - [GLib][Plasma][CUDA] Plasma::Client#refer\_object test is failed
* ARROW-5775 - [C++] StructArray : cached boxed fields not thread-safe
* ARROW-5776 - [Gandiva][Crossbow] Revert template to have commit ids.
* ARROW-5790 - [Python] Passing zero-dim numpy array to pa.array causes segfault
* ARROW-5817 - [Python] Use pytest marks for Flight test to avoid silently skipping unit tests due to import failures
* ARROW-5823 - [Rust] CI scripts miss --all-targets cargo argument
* ARROW-5824 - [Gandiva] [C++] Fix decimal null
* ARROW-5836 - [Java][OSX] Flight tests are failing: address already in use
* ARROW-5838 - [C++][Flight][OSX] Building 3rdparty grpc cannot find OpenSSL
* ARROW-5848 - [C++] SO versioning schema after release 1.0.0
* ARROW-5849 - [C++] Compiler warnings on mingw-w64
* ARROW-5851 - [C++] Compilation of reference benchmarks fails
* ARROW-5856 - [Python] linking 3rd party cython modules against pyarrow fails since 0.14.0
* ARROW-5860 - [Java] [Vector] Fix decimal byte setter
* ARROW-5863 - [Python] Segmentation Fault via pytest-runner
* ARROW-5868 - [Python] manylinux2010 wheels have shared library dependency on liblz4
* ARROW-5870 - [C++] Development compile instructions need to include "make"
* ARROW-5873 - [Python] Segmentation fault when comparing schema with None
* ARROW-5874 - [Python] pyarrow 0.14.0 macOS wheels depend on shared libs under /usr/local/opt
* ARROW-5878 - [Python][C++] Parquet reader not forward compatible for timestamps without timezone
* ARROW-5884 - [Java] Fix the get method of StructVector
* ARROW-5886 - [Python][Packaging] Manylinux1/2010 compliance issue with libz
* ARROW-5887 - [C#] ArrowStreamWriter writes FieldNodes in wrong order
* ARROW-5889 - [Python][C++] Parquet backwards compat for timestamps without timezone broken
* ARROW-5894 - [C++] libgandiva.so.14 is exporting libstdc++ symbols
* ARROW-5899 - [Python][Packaging] Bundle uriparser.dll in windows wheels
* ARROW-5910 - [Python] read\_tensor() fails on non-seekable streams
* ARROW-5921 - [C++][Fuzzing] Missing nullptr checks in IPC
* ARROW-5923 - [C++] Fix int96 comment
* ARROW-5925 - [Gandiva][C++] cast decimal to int should round up
* ARROW-5930 - [FlightRPC] [Python] Flight CI tests are failing
* ARROW-5935 - [C++] ArrayBuilders with mutable type are not robustly supported
* ARROW-5946 - [Rust] [DataFusion] Projection push down with aggregate producing incorrect results
* ARROW-5952 - [Python] Segfault when reading empty table with category as pandas dataframe
* ARROW-5959 - [C++][CI] Fuzzit does not know about branch + commit hash
* ARROW-5960 - [C++] Boost dependencies are specified in wrong order
* ARROW-5963 - [R] R Appveyor job does not test changes in the C++ library
* ARROW-5964 - [C++][Gandiva] Cast double to decimal with rounding returns 0
* ARROW-5966 - [Python] Capacity error when converting large UTF32 numpy array to arrow array
* ARROW-5968 - [Java] Remove duplicate Preconditions check in JDBC adapter
* ARROW-5969 - [CI] [R] Lint failures
* ARROW-5973 - [Java] Variable width vectors' get methods should return null when the underlying data is null
* ARROW-5989 - [C++][Python] pyarrow.lib.ArrowIOError: Unable to load libjvm when using openjdk-8
* ARROW-5990 - [Python] RowGroupMetaData.column misses bounds check
* ARROW-5992 - [C++] Array::View fails for string/utf8 as binary
* ARROW-5996 - [Java] Avoid resource leak in flight service
* ARROW-5999 - [C++] Required header files missing when built with -DARROW\_DATASET=OFF
* ARROW-6002 - [C++][Gandiva] TestCastFunctions does not test int64 casting\`
* ARROW-6004 - [C++] CSV reader ignore\_empty\_lines option doesn't handle empty lines
* ARROW-6005 - [C++] parquet::arrow::FileReader::GetRecordBatchReader() does not behave as documented since ARROW-1012
* ARROW-6006 - [C++] Empty IPC streams containing a dictionary are corrupt
* ARROW-6012 - [C++] Fall back on known Apache mirror for Thrift downloads
* ARROW-6016 - [Python] pyarrow get\_library\_dirs assertion error
* ARROW-6029 - [R] Improve R docs on how to fix library version mismatch
* ARROW-6032 - [C++] CountSetBits doesn't ensure 64-bit aligned accesses
* ARROW-6038 - [Python] pyarrow.Table.from\_batches produces corrupted table if any of the batches were empty
* ARROW-6040 - [Java] Dictionary entries are required in IPC streams even when empty
* ARROW-6046 - [C++] Slice RecordBatch of String array with offset 0 returns whole batch
* ARROW-6047 - [Rust] Rust nightly 1.38.0 builds failing
* ARROW-6050 - [Java] Update out-of-date java/flight/README.md
* ARROW-6054 - pyarrow.serialize should respect the value of structured dtype of numpy
* ARROW-6058 - [Python][Parquet] Failure when reading Parquet file from S3 with s3fs
* ARROW-6060 - [Python] too large memory cost using pyarrow.parquet.read\_table with use\_threads=True
* ARROW-6061 - [C++] Cannot build libarrow without rapidjson
* ARROW-6066 - [Website] Fix blog post author header
* ARROW-6067 - [Python] Large memory test failures
* ARROW-6068 - [Python] Hypothesis test failure, Add StructType::Make that accepts vector of fields
* ARROW-6073 - [C++] Decimal128Builder is not reset in Finish()
* ARROW-6082 - [Python] create pa.dictionary() type with non-integer indices type crashes
* ARROW-6092 - [C++] Python 2.7: arrow\_python\_test failure
* ARROW-6095 - [C++] Python subproject ignores ARROW\_TEST\_LINKAGE
* ARROW-6108 - [C++] Appveyor Build\_Debug configuration is hanging in C++ unit tests
* ARROW-6116 - [C++][Gandiva] Fix bug in TimedTestFilterAdd2
* ARROW-6117 - [Java] Fix the set method of FixedSizeBinaryVector
* ARROW-6120 - [C++][Gandiva] including some headers causes decimal\_test to fail
* ARROW-6126 - [C++] IPC stream reader handling of empty streams potentially not robust
* ARROW-6132 - [Python] ListArray.from\_arrays does not check validity of input arrays
* ARROW-6135 - [C++] KeyValueMetadata::Equals should not be order-sensitive
* ARROW-6136 - [FlightRPC][Java] Don't double-close response stream
* ARROW-6145 - [Java] UnionVector created by MinorType#getNewVector could not keep field type info properly
* ARROW-6148 - [C++][Packaging] Improve aarch64 support
* ARROW-6152 - [C++][Parquet] Write arrow::Array directly into parquet::TypedColumnWriter<T>
* ARROW-6153 - [R] Address parquet deprecation warning
* ARROW-6158 - [Python] possible to create StructArray with type that conflicts with child array's types
* ARROW-6159 - [C++] PrettyPrint of arrow::Schema missing identation for first line
* ARROW-6160 - [Java] AbstractStructVector#getPrimitiveVectors fails to work with complex child vectors
* ARROW-6166 - [Go] Slice of slice causes index out of range panic
* ARROW-6167 - [R] macOS binary R packages on CRAN don't have arrow\_available
* ARROW-6170 - [R] "docker-compose build r" is slow
* ARROW-6171 - [R] "docker-compose run r" fails
* ARROW-6174 - [C++] Validate chunks in ChunkedArray::Validate
* ARROW-6175 - [Java] Fix MapVector#getMinorType and extend AbstractContainerVector addOrGet complex vector API
* ARROW-6178 - [Developer] Don't fail in merge script on bad primary author input in multi-author PRs
* ARROW-6182 - [R] Add note to README about r-arrow conda installation
* ARROW-6186 - [Packaging][C++] Plasma headers not included for ubuntu-xenial libplasma-dev debian package
* ARROW-6190 - [C++] Define and declare functions regardless of NDEBUG
* ARROW-6200 - [Java] Method getBufferSizeFor in BaseRepeatedValueVector/ListVector not correct
* ARROW-6202 - [Java] Exception in thread "main" org.apache.arrow.memory.OutOfMemoryException: Unable to allocate buffer of size 4 due to memory limit. Current allocation: 2147483646
* ARROW-6205 - [C++] ARROW\_DEPRECATED warning when including io/interfaces.h from CUDA (.cu) source
* ARROW-6208 - [Java] Correct byte order before comparing in ByteFunctionHelpers
* ARROW-6210 - [Java] remove equals API from ValueVector
* ARROW-6211 - [Java] Remove dependency on RangeEqualsVisitor from ValueVector interface
* ARROW-6214 - [R] Sanitizer errors triggered via R bindings
* ARROW-6215 - [Java] RangeEqualVisitor does not properly compare ZeroVector
* ARROW-6223 - [C++] Configuration error with Anaconda Python 3.7.4
* ARROW-6224 - [Python] remaining usages of the 'data' attribute (from previous Column) cause warnings
* ARROW-6227 - [Python] pyarrow.array() shouldn't coerce np.nan to string
* ARROW-6234 - [Java] ListVector hashCode() is not correct
* ARROW-6241 - [Java] Failures on master
* ARROW-6259 - [C++][CI] Flatbuffers-related failures in CI on macOS
* ARROW-6263 - [Python] RecordBatch.from\_arrays does not check array types against a passed schema
* ARROW-6266 - [Java] Resolve the ambiguous method overload in RangeEqualsVisitor
* ARROW-6268 - Empty buffer should have a valid address
* ARROW-6269 - [C++][Fuzzing] IPC reads do not check decimal precision
* ARROW-6270 - [C++][Fuzzing] IPC reads do not check buffer indices
* ARROW-6290 - [Rust] [DataFusion] sql\_csv example errors when running
* ARROW-6291 - [C++] CMake ignores ARROW\_PARQUET
* ARROW-6301 - [Python] atexit: pyarrow.lib.ArrowKeyError: 'No type extension with name arrow.py\_extension\_type found'
* ARROW-6302 - [Python][Parquet] Reading dictionary type with serialized Arrow schema does not restore "ordered" type property
* ARROW-6309 - [C++] Parquet tests and executables are linked statically
* ARROW-6323 - [R] Expand file paths when passing to readers
* ARROW-6325 - [Python] wrong conversion of DataFrame with boolean values
* ARROW-6330 - [C++] Include missing headers in api.h
* ARROW-6332 - [Java][C++][Gandiva] Handle size of varchar vectors correctly
* ARROW-6339 - [Python][C++] Rowgroup statistics for pd.NaT array ill defined
* ARROW-6343 - [Java] [Vector] Fix allocation helper
* ARROW-6344 - [C++][Gandiva] substring does not handle multibyte characters
* ARROW-6345 - [C++][Python] "ordered" flag seemingly not taken into account when comparing DictionaryType values for equality
* ARROW-6348 - [R] arrow::read\_csv\_arrow namespace error when package not loaded
* ARROW-6354 - [C++] Building without Parquet fails
* ARROW-6363 - [R] segfault in Table\_\_from\_dots with unexpected schema
* ARROW-6364 - [R] Handling unexpected input to time64() et al
* ARROW-6369 - [Python] Support list-of-boolean in Array.to\_pandas conversion
* ARROW-6371 - [Doc] Row to columnar conversion example mentions arrow::Column in comments
* ARROW-6372 - [Rust][Datafusion] Casting from Un-signed to Signed Integers not supported
* ARROW-6376 - [Developer] PR merge script has "master" target ref hard-coded
* ARROW-6387 - [Archery] Errors with make
* ARROW-6392 - [Python][Flight] list\_actions Server RPC is not tested in test\_flight.py, nor is return value validated
* ARROW-6406 - [C++] jemalloc\_ep fails for offline build
* ARROW-6411 - [C++][Parquet] DictEncoderImpl<T>::PutIndicesTyped has bad performance on some systems
* ARROW-6412 - [C++] arrow-flight-test can crash because of port allocation
* ARROW-6418 - [C++] Plasma cmake targets are not exported
* ARROW-6423 - [Python] pyarrow.CompressedOutputStream() never completes with compression='snappy'
* ARROW-6424 - [C++][Fuzzing] Fuzzit nightly is broken
* ARROW-6428 - [CI][Crossbow] Nightly turbodbc job fails
* ARROW-6431 - [Python] Test suite fails without pandas installed
* ARROW-6432 - [CI][Crossbow] Remove alpine crossbow jobs
* ARROW-6433 - [CI][Crossbow] Nightly java docker job fails
* ARROW-6434 - [CI][Crossbow] Nightly HDFS integration job fails
* ARROW-6435 - [CI][Crossbow] Nightly dask integration job fails
* ARROW-6440 - [CI][Crossbow] Nightly ubuntu, debian, and centos package builds fail
* ARROW-6441 - [CI][Crossbow] Nightly Centos 6 job fails
* ARROW-6443 - [CI][Crossbow] Nightly conda osx builds fail
* ARROW-6445 - [CI][Crossbow] Nightly Gandiva jar trusty job fails
* ARROW-6446 - [OSX][Python][Wheel] Turn off ORC feature in the wheel building scripts
* ARROW-6449 - [R] io "tell()" methods are inconsistently named and untested
* ARROW-6457 - [C++] CMake build locally fails with MSVC 2015 build generator
* ARROW-6461 - [Java] EchoServer can close socket before client has finished reading
* ARROW-6472 - [Java] ValueVector#accept may has potential cast exception
* ARROW-6476 - [Java][CI] Travis java all-jdks job is broken
* ARROW-6478 - [C++] Roll back to jemalloc stable-4 branch until performance issues in 5.2.x addressed
* ARROW-6481 - [Python][C++] Bad performance of read\_csv() with column\_types
* ARROW-6488 - [Python] pyarrow.NULL equals to itself
* ARROW-6492 - [Python] file written with latest fastparquet cannot be read with latest pyarrow
* ARROW-6502 - [GLib][CI] MinGW failure in CI
* ARROW-6506 - [C++] Validation of ExtensionType with nested type fails
* ARROW-6509 - [C++][Gandiva] Re-enable Gandiva JNI tests and fix Travis CI failure
* ARROW-6520 - [Python] Segmentation fault on writing tables with fixed size binary fields
* ARROW-6522 - [Python] Test suite fails with pandas 0.23.4, pytest 3.8.1
* ARROW-6530 - [CI][Crossbow][R] Nightly R job doesn't install all dependencies
* ARROW-6550 - [C++] Filter expressions PR failing manylinux package builds
* ARROW-6552 - [C++] boost::optional in STL test fails compiling in gcc 4.8.2
* ARROW-6560 - [Python] Failures in \*-nopandas integration tests
* ARROW-6561 - [Python] pandas-master integration test failure
* ARROW-6562 - [GLib] Fix wrong sliced data of GArrowBuffer
* ARROW-6564 - [Python] Do not require pandas for invoking Array.\_\_array\_\_
* ARROW-6565 - [Rust] [DataFusion] Intermittent test failure due to temp dir already existing
* ARROW-6568 - [C++][Python][Parquet] pyarrow.parquet crash writing zero-chunk dictionary-type column
* ARROW-6572 - [C++] Reading some Parquet data can return uninitialized memory
* ARROW-6573 - [Python] Segfault when writing to parquet
* ARROW-6576 - [R] Fix sparklyr integration tests
* ARROW-6597 - [Python] Segfault in test\_pandas with Python 2.7
* ARROW-6618 - [Python] Reading a zero-size buffer can segfault
* ARROW-6622 - [C++][R] SubTreeFileSystem path error on Windows
* ARROW-6623 - [CI][Python] Dask docker integration test broken perhaps by statistics-related change
* ARROW-6639 - [Packaging][RPM] Add support for CentOS 7 on aarch64
* ARROW-6640 - [C++] Error when BufferedInputStream Peek more than bytes buffered
* ARROW-6642 - [Python] chained access of ParquetDataset's metadata segfaults
* ARROW-6651 - [R] Fix R conda job
* ARROW-6652 - [Python] to\_pandas conversion removes timezone from type
* ARROW-6660 - [Rust] [DataFusion] Minor docs update for 0.15.0 release
* ARROW-6670 - [CI][R] Fix fix for R nightly jobs
* ARROW-6674 - [Python] Fix or ignore the test warnings
* ARROW-6677 - [FlightRPC][C++] Document using Flight in C++
* ARROW-6678 - [C++] Regression in Parquet file compatibility introduced by ARROW-3246
* ARROW-6679 - [RELEASE] autobrew license in LICENSE.txt is not acceptable
* ARROW-6682 - [C#] Arrow R/C++ hangs reading binary file generated by C#
* ARROW-6687 - [Rust] [DataFusion] Query returns incorrect row count
* ARROW-6701 - [C++][R] Lint failing on R cpp code
* ARROW-6703 - [Packaging][Linux] Restore ARROW\_VERSION environment variable
* ARROW-6705 - [Rust] [DataFusion] README has invalid github URL
* ARROW-6709 - [JAVA] Jdbc adapter currentIndex should increment when value is null
* ARROW-6714 - [R] Fix untested RecordBatchWriter case
* ARROW-6716 - [CI] [Rust] New 1.40.0 nightly causing builds to fail
## Improvement
* ARROW-1324 - [C++] Support ARROW\_BOOST\_VENDORED on Windows / MSVC
* ARROW-1789 - [Format] Consolidate specification documents and improve clarity for new implementation authors
* ARROW-2769 - [C++][Python] Deprecate and rename add\_metadata methods
* ARROW-3032 - [Python] Clean up NumPy-related C++ headers
* ARROW-3243 - [C++] Upgrade jemalloc to version 5
* ARROW-3246 - [Python][Parquet] direct reading/writing of pandas categoricals in parquet
* ARROW-3325 - [Python] Support reading Parquet binary/string columns directly as DictionaryArray
* ARROW-3531 - [Python] Deprecate Schema.field\_by\_name in favor of \_\_getitem\_\_
* ARROW-3579 - [Crossbow] Unintuitive error message when remote branch has not been pushed
* ARROW-3643 - [Rust] Optimize \`push\_slice\` of \`BufferBuilder<bool>\`
* ARROW-3710 - [Crossbow][Python] Run nightly tests against pandas master
* ARROW-3772 - [C++] Read Parquet dictionary encoded ColumnChunks directly into an Arrow DictionaryArray
* ARROW-3829 - [Python] Support protocols to extract Arrow objects from third-party classes
* ARROW-3943 - [R] Write vignette for R package
* ARROW-4036 - [C++] Make status codes pluggable
* ARROW-4095 - [C++] Implement optimizations for dictionary unification where dictionaries are prefixes of the unified dictionary
* ARROW-4111 - [Python] Create time types from Python sequences of integers
* ARROW-4220 - [Python] Add buffered input and output stream ASV benchmarks with simulated high latency IO
* ARROW-4398 - [Python] Add benchmarks for Arrow<>Parquet BYTE\_ARRAY serialization (read and write)
* ARROW-4473 - [Website] Add instructions to do a test-deploy of Arrow website and fix bugs
* ARROW-4648 - [C++/Question] Naming/organizational inconsistencies in cpp codebase
* ARROW-4649 - [C++/CI/R] Add (nightly) job that builds \`brew install apache-arrow --HEAD\`
* ARROW-4752 - [Rust] Add explicit SIMD vectorization for the divide kernel
* ARROW-4810 - [Format][C++] Add "LargeList" type with 64-bit offsets
* ARROW-4841 - [C++] Persist CMake options in generated CMake config
* ARROW-5134 - [R][CI] Run nightly tests against multiple R versions
* ARROW-5211 - [Format] Missing documentation under \`Dictionary encoding\` section on MetaData page
* ARROW-5216 - [CI] Add Appveyor badge to README
* ARROW-5307 - [CI][GLib] Enable GTK-Doc
* ARROW-5343 - [C++] Consider using Buffer for transpose maps in DictionaryType::Unify instead of std::vector
* ARROW-5344 - [C++] Use ArrayDataVisitor in implementation of dictionary unpacking in compute/kernels/cast.cc
* ARROW-5358 - [Rust] Implement equality check for ArrayData and Array
* ARROW-5380 - [C++] Fix and enable UBSan for unaligned accesses.
* ARROW-5439 - [Java] Utilize stream EOS in File format
* ARROW-5444 - [Release][Website] After 0.14 release, update what is an "official" release
* ARROW-5458 - [C++] ARMv8 parallel CRC32c computation optimization
* ARROW-5480 - [Python] Pandas categorical type doesn't survive a round-trip through parquet
* ARROW-5494 - [Python] Create FileSystem bindings
* ARROW-5505 - [R] Stop masking base R functions/rethink namespacing
* ARROW-5527 - [C++] HashTable/MemoTable should use Buffer(s)/Builder(s) for heap data
* ARROW-5558 - [C++] Support Array::View on arrays with non-zero offsets
* ARROW-5559 - [C++] Introduce IpcOptions struct object for better API-stability when adding new options
* ARROW-5564 - [C++] Add uriparser to conda-forge
* ARROW-5610 - [Python] Define extension type API in Python to "receive" or "send" a foreign extension type
* ARROW-5646 - [Crossbow][Documentation] Move the user guide to the Sphinx documentation
* ARROW-5681 - [FlightRPC] Wrap gRPC exceptions/statuses
* ARROW-5686 - [R] Review R Windows CI build
* ARROW-5716 - [Developer] Improve merge PR script to acknowledge co-authors
* ARROW-5717 - [Python] Support dictionary unification when converting variable dictionaries to pandas
* ARROW-5722 - [Rust] Implement std::fmt::Debug for ListArray, BinaryArray and StructArray
* ARROW-5734 - [Python] Dispatch to Table.from\_arrays from pyarrow.table factory function
* ARROW-5736 - [Format][C++] Support small bit-width indices in sparse tensor
* ARROW-5741 - [JS] Make numeric vector from functions consistent with TypedArray.from
* ARROW-5743 - [C++] Add CMake option to enable "large memory" unit tests
* ARROW-5746 - [Website] Move website source out of apache/arrow
* ARROW-5747 - [C++] Better column name and header support in CSV reader
* ARROW-5762 - [Integration][JS] Integration Tests for Map Type
* ARROW-5777 - [C++] BasicDecimal128 is a small object it doesn't always make sense to pass by const ref
* ARROW-5778 - [Java] Extract the logic for vector data copying to the super classes
* ARROW-5784 - [Release][GLib] Replace c\_glib/ after running c\_glib/autogen.sh in dev/release/02-source.sh
* ARROW-5786 - [Release] Use arrow-jni profile in dev/release/01-prepare.sh
* ARROW-5788 - [Rust] Use { version = "...", path = "../..." } for arrow and parquet dependencies
* ARROW-5789 - [C++] Small Warning/Linkage cleanups
* ARROW-5798 - [Packaging][deb] Update doc architecture
* ARROW-5800 - [R] Dockerize R Travis CI tests so they can be run anywhere via docker-compose
* ARROW-5803 - [C++] Dockerize C++ with clang 7 Travis CI unit test logic
* ARROW-5812 - [Java] Refactor method name and param type in BaseIntVector
* ARROW-5813 - [C++] Support checking the equality of the different contiguous tensors
* ARROW-5814 - [Java] Implement a <Object, int> HashMap for DictionaryEncoder
* ARROW-5827 - [C++] Require c-ares CMake config
* ARROW-5828 - [C++] Add Protocol Buffers version check
* ARROW-5830 - [C++] Stop using memcmp in TensorEquals
* ARROW-5833 - [C++] Factor out status copying code from cast.cc
* ARROW-5842 - [Java] Revise the semantic of lastSet in ListVector
* ARROW-5843 - [Java] Improve the readability and performance of BitVectorHelper#getNullCount
* ARROW-5853 - [Python] Expose boolean filter kernel on Array
* ARROW-5864 - [Python] simplify cython wrapping of Result
* ARROW-5865 - [Release] Helper script for rebasing open pull requests on master
* ARROW-5866 - [C++] Remove duplicate library in cpp/Brewfile
* ARROW-5876 - [FlightRPC] Implement basic auth across all languages
* ARROW-5877 - [FlightRPC] Fix auth incompatibilities between Python/Java
* ARROW-5880 - [C++] Update arrow parquet writer to use TypedBufferBuilder
* ARROW-5883 - [Java] Support dictionary encoding for List and Struct type
* ARROW-5888 - [Python][C++] Add metadata to store Arrow time zones in Parquet file metadata
* ARROW-5897 - [Java] Remove duplicated logic in MapVector
* ARROW-5900 - [Gandiva] [Java] Decimal precision,scale bounds check
* ARROW-5904 - [Java] [Plasma] Fix compilation of Plasma Java client
* ARROW-5906 - [CI] Set -DARROW\_VERBOSE\_THIRDPARTY\_BUILD=OFF in builds running in Travis CI, maybe all docker-compose builds by default
* ARROW-5908 - [C#] ArrowStreamWriter doesn't align buffers to 8 bytes
* ARROW-5909 - [Java] Optimize ByteFunctionHelpers equals & compare logic
* ARROW-5911 - [Java] Make ListVector and MapVector create reader lazily
* ARROW-5918 - [Java] Add get to BaseIntVector interface
* ARROW-5919 - [R] Add nightly tests for building r-arrow with dependencies from conda-forge
* ARROW-5924 - [C++][Plasma] It is not convenient to release a GPU object
* ARROW-5937 - [Release] Stop parallel binary upload
* ARROW-5938 - [Release] Create branch for adding release note automatically
* ARROW-5939 - [Release] Add support for generating vote email template separately
* ARROW-5940 - [Release] Add support for re-uploading sign/checksum for binary artifacts
* ARROW-5941 - [Release] Avoid re-uploading already uploaded binary artifacts
* ARROW-5943 - [GLib][Gandiva] Add support for function aliases
* ARROW-5947 - [Rust] [DataFusion] Remove serde\_json dependency
* ARROW-5948 - [Rust] [DataFusion] create\_logical\_plan should not call optimizer
* ARROW-5955 - [Plasma] Support setting memory quotas per plasma client for better isolation
* ARROW-5961 - [R] Be able to run R-only tests even without C++ library
* ARROW-5962 - [CI][Python] Do not test manylinux1 wheels in Travis CI
* ARROW-5967 - [Java] DateUtility#timeZoneList is not correct
* ARROW-5976 - [C++] RETURN\_IF\_ERROR(ctx) should be namespaced
* ARROW-5977 - [C++] [Python] Method for read\_csv to limit which columns are read?
* ARROW-5985 - [Developer] Do not suggest setting Fix Version for point releases in dev/merge\_arrow\_pr.py
* ARROW-5986 - [Java] Code cleanup for dictionary encoding
* ARROW-5998 - [Java] Open a document to track the API changes
* ARROW-6000 - [Python] Expose LargeBinaryType and LargeStringType
* ARROW-6017 - [FlightRPC] Allow creating Locations with unknown schemes
* ARROW-6020 - [Java] Refactor ByteFunctionHelper#hash with new added ArrowBufHasher
* ARROW-6021 - [Java] Extract copyFrom and copyFromSafe methods to ValueVector interface
* ARROW-6036 - [GLib] Add support for skip rows and column\_names CSV read option
* ARROW-6037 - [GLib] Add a missing version macro
* ARROW-6041 - [Website] Blog post announcing R package release
* ARROW-6042 - [C++] Implement alternative DictionaryBuilder that always yields int32 indices
* ARROW-6045 - [C++] Benchmark for Parquet float and NaN encoding/decoding
* ARROW-6048 - [C++] Add ChunkedArray::View which calls to Array::View
* ARROW-6049 - [C++] Support using Array::View from compatible dictionary type to another
* ARROW-6063 - [FlightRPC] Implement "half-closed" semantics for DoPut
* ARROW-6065 - [C++] Reorganize parquet/arrow/reader.cc, remove code duplication, improve readability
* ARROW-6070 - [Java] Avoid creating new schema before IPC sending
* ARROW-6077 - [C++][Parquet] Build logical schema tree mapping Arrow fields to Parquet schema levels
* ARROW-6083 - [Java] Refactor Jdbc adapter consume logic
* ARROW-6084 - [Python] Support LargeList
* ARROW-6093 - [Java] reduce branches in algo for first match in VectorRangeSearcher
* ARROW-6096 - [C++] Conditionally depend on boost regex library
* ARROW-6100 - [Rust] Pin to specific Rust nightly release
* ARROW-6104 - [Rust] [DataFusion] Don't allow bare\_trait\_objects
* ARROW-6105 - [C++][Parquet][Python] Add test case showing dictionary-encoded subfields in nested type
* ARROW-6115 - [Python] support LargeList, LargeString, LargeBinary in conversion to pandas
* ARROW-6118 - [Java] Replace google Preconditions with Arrow Preconditions
* ARROW-6121 - [Tools] Improve merge tool cli ergonomic
* ARROW-6125 - [Python] Remove any APIs deprecated prior to 0.14.x
* ARROW-6127 - [Website] Add favicons and meta tags
* ARROW-6128 - [C++] Can't build with g++ 8.3.0 by class-memaccess warning
* ARROW-6130 - [Release] Use 0.15.0 as the next release
* ARROW-6139 - [Documentation][R] Build R docs (pkgdown) site and add to arrow-site
* ARROW-6141 - [C++] Enable memory-mapping a file region that is offset from the beginning of the file
* ARROW-6143 - [Java] Unify the copyFrom and copyFromSafe methods for all vectors
* ARROW-6172 - [Java] Provide benchmarks to set IntVector with different methods
* ARROW-6180 - [C++] Create InputStream that is an isolated reader of a segment of a RandomAccessFile
* ARROW-6181 - [R] Only allow R package to install without libarrow on linux
* ARROW-6187 - [C++] fallback to storage type when writing ExtensionType to Parquet
* ARROW-6192 - [GLib] Use the same SO version as C++
* ARROW-6194 - [Java] Add non-static approach in DictionaryEncoder making it easy to extend and reuse
* ARROW-6206 - [Java][Docs] Document environment variables/java properties
* ARROW-6209 - [Java] Extract set null method to the base class for fixed width vectors
* ARROW-6216 - [C++] Allow user to select the compression level
* ARROW-6219 - [Java] Add API for JDBC adapter that can convert less then the full result set at a time.
* ARROW-6225 - [Website] Update arrow-site/README and any other places to point website contributors in right direction
* ARROW-6230 - [R] Reading in Parquet files are 20x slower than reading fst files in R
* ARROW-6231 - [C++][Python] Consider assigning default column names when reading CSV file and header\_rows=0
* ARROW-6232 - [C++] Rename Argsort kernel to SortToIndices
* ARROW-6237 - [R] Add option to set CXXFLAGS when compiling R package with $ARROW\_R\_CXXFLAGS
* ARROW-6240 - [Ruby] Arrow::Decimal128Array returns BigDecimal
* ARROW-6246 - [Website] Add link to R documentation site
* ARROW-6249 - [Java] Remove useless class ByteArrayWrapper
* ARROW-6252 - [Python] Add pyarrow.Array.diff method that exposes arrow::Diff
* ARROW-6253 - [Python] Expose "enable\_buffered\_stream" option from parquet::ReaderProperties in pyarrow.parquet.read\_table
* ARROW-6258 - [R] Add macOS build scripts
* ARROW-6260 - [Website] Use deploy key on Travis to build and push to asf-site
* ARROW-6262 - [Developer] Show JIRA issue before merging
* ARROW-6264 - [Java] There is no need to consider byte order in ArrowBufHasher
* ARROW-6267 - [Ruby] Add Arrow::Time for Arrow::Time{32,64}DataType value
* ARROW-6271 - [Rust] [DataFusion] Add example for running SQL against Parquet
* ARROW-6272 - [Rust] [DataFusion] Add register\_parquet convenience method to ExecutionContext
* ARROW-6279 - [Python] Add Table.slice method or allow slices in \_\_getitem\_\_
* ARROW-6284 - [C++] Allow references in std::tuple when converting tuple to arrow array
* ARROW-6289 - [Java] Add empty() in UnionVector to create instance
* ARROW-6294 - [C++] Use hyphen for plasma-store-server executable
* ARROW-6296 - [Java] Cleanup JDBC interfaces and eliminate one memcopy for binary/varchar fields
* ARROW-6297 - [Java] Compare ArrowBufPointers by unsinged integers
* ARROW-6303 - [Rust] Add a feature to disable SIMD
* ARROW-6304 - [Java] Add description to each maven artifact
* ARROW-6311 - [Java] Make ApproxEqualsVisitor accept DiffFunction to make it more flexible
* ARROW-6313 - [Format] Tracking for ensuring flatbuffer serialized values are aligned in stream/files.
* ARROW-6319 - [C++] Extract the core of NumericTensor<T>::Value as Tensor::Value<T>
* ARROW-6328 - Click.option-s should have help text
* ARROW-6329 - [Format] Add 4-byte "stream continuation" to IPC message format to align Flatbuffers
* ARROW-6331 - [Java] Incorporate ErrorProne into the java build
* ARROW-6334 - [Java] Improve the dictionary builder API to return the position of the value in the dictionary
* ARROW-6335 - [Java] Improve the performance of DictionaryHashTable
* ARROW-6336 - [Python] Clarify pyarrow.serialize/deserialize docstrings viz-a-viz relationship with Arrow IPC protocol
* ARROW-6337 - [R] as\_tibble in R API is a misnomer
* ARROW-6338 - [R] Type function names don't match type names
* ARROW-6342 - [Python] Add pyarrow.record\_batch factory function with same basic API / semantics as pyarrow.table
* ARROW-6350 - [Ruby] Remove Arrow::Struct and use Hash instead
* ARROW-6351 - [Ruby] Improve Arrow#values performance
* ARROW-6353 - [Python] Allow user to select compression level in pyarrow.parquet.write\_table
* ARROW-6355 - [Java] Make range equal visitor reusable
* ARROW-6357 - [C++] S3: allow for background writes
* ARROW-6358 - [C++] FileSystem::DeleteDir should make it optional to delete the directory itself
* ARROW-6360 - [R] Update support for compression
* ARROW-6362 - [C++] S3: more flexible credential options
* ARROW-6365 - [R] Should be able to coerce numeric to integer with schema
* ARROW-6366 - [Java] Make field vectors final explicitly
* ARROW-6368 - [C++] Add RecordBatch projection functionality
* ARROW-6373 - [C++] Make FixedWidthBinaryBuilder consistent with other primitive fixed width builders
* ARROW-6375 - [C++] Extend ConversionTraits to allow efficiently appending list values in STL API
* ARROW-6379 - [C++] Do not append any buffers when serializing NullType for IPC
* ARROW-6381 - [C++] BufferOutputStream::Write is slow for many small writes
* ARROW-6384 - [C++] Bump dependencies
* ARROW-6391 - [Python][Flight] Add built-in methods on FlightServerBase to start server and wait for it to be available
* ARROW-6402 - [C++] Suppress sign-compare warning with g++ 9.2.1
* ARROW-6403 - [Python] Expose FileReader::ReadRowGroups() to Python
* ARROW-6408 - [Rust] Use "if cfg!" pattern in SIMD kernel implementations
* ARROW-6413 - [R] Support autogenerating column names
* ARROW-6415 - [R] Remove usage of R CMD config CXXCPP
* ARROW-6416 - [Python] Confusing API & documentation regarding chunksizes
* ARROW-6426 - [FlightRPC] Expose gRPC configuration knobs in Flight
* ARROW-6447 - [C++] Builds with ARROW\_JEMALLOC=ON wait until jemalloc\_ep is complete before building any libarrow .cc files
* ARROW-6450 - [C++] Use 2x reallocation strategy in arrow::BufferBuilder instead of 1.5x
* ARROW-6451 - [Format] Add clarifications to Columnar.rst about the contents of "null" slots in Varbinary or List arrays
* ARROW-6453 - [C++] More informative error messages from S3
* ARROW-6454 - [Developer] Add LLVM license to LICENSE.txt due to binary redistribution in packages
* ARROW-6458 - [Java] Remove value boxing/unboxing for ApproxEqualsVisitor
* ARROW-6462 - [C++] Can't build with bundled double-conversion on CentOS 6 x86\_64
* ARROW-6465 - [Python] Improve Windows build instructions
* ARROW-6475 - [C++] Don't try to dictionary encode dictionary arrays
* ARROW-6477 - [Packaging][Crossbow] Use Azure Pipelines to build linux packages
* ARROW-6484 - [Java] Enable create indexType for DictionaryEncoding according to dictionary value count
* ARROW-6487 - [Rust] [DataFusion] Create test utils module
* ARROW-6489 - [Developer][Documentation] Fix merge script and readme
* ARROW-6504 - [Python][Packaging] Add mimalloc to conda packages for better performance
* ARROW-6505 - [Website] Add new committers
* ARROW-6518 - [Packaging][Python] Flight failing in OSX Python wheel builds
* ARROW-6524 - [Developer][Packaging] Nightly build report's subject should contain Arrow
* ARROW-6526 - [C++] Poison data in PoolBuffer destructor
* ARROW-6527 - [C++] Add OutputStream::Write() variant taking an owned buffer
* ARROW-6531 - [Python] Add detach() method to buffered streams
* ARROW-6532 - [R] Write parquet files with compression
* ARROW-6533 - [R] Compression codec should take a "level"
* ARROW-6534 - [Java] Fix typos and spelling
* ARROW-6540 - [R] Add Validate() methods
* ARROW-6541 - [Format][C++] Use two-part EOS and amend Format documentation
* ARROW-6542 - [R] Add View() method to array types
* ARROW-6544 - [R] Documentation/polishing for 0.15 release
* ARROW-6545 - [Go] Update Go IPC writer to use two-part EOS per mailing list discussion
* ARROW-6546 - [C++] Add missing FlatBuffers source dependency
* ARROW-6556 - [Python] Prepare for pandas release without SparseDataFrame
* ARROW-6557 - [Python] Always return pandas.Series from Array/ChunkedArray.to\_pandas, propagate field names to Series from RecordBatch, Table
* ARROW-6558 - [C++] Refactor Iterator to a type erased handle
* ARROW-6559 - [Developer][C++] Add "archery" option to specify system toolchain for C++ builds
* ARROW-6569 - [Website] Add support for auto deployment by GitHub Actions
* ARROW-6570 - [Python] Use MemoryPool to allocate memory for NumPy arrays in to\_pandas calls
* ARROW-6584 - [Python][Wheel] Bundle zlib again with the windows wheels
* ARROW-6588 - [C++] Suppress class-memaccess warning with g++ 9.2.1
* ARROW-6589 - [C++] Support BinaryType in MakeArrayOfNull
* ARROW-6590 - [C++] Do not require ARROW\_JSON=ON when ARROW\_IPC=ON
* ARROW-6591 - [R] Ignore .Rhistory files in source control
* ARROW-6605 - [C++] Add recursion depth control to fs::Selector
* ARROW-6606 - [C++] Construct tree structure from std::vector<fs::FileStats>
* ARROW-6609 - [C++] Add minimal build Dockerfile example
* ARROW-6610 - [C++] Add ARROW\_FILESYSTEM=ON/OFF CMake configuration flag
* ARROW-6621 - [Rust][DataFusion] Examples for DataFusion are not executed in CI
* ARROW-6629 - [Doc][C++] Document the FileSystem API
* ARROW-6630 - [Doc][C++] Document the file readers (CSV, JSON, Parquet, etc.)
* ARROW-6644 - [JS] Amend NullType IPC protocol to append no buffers
* ARROW-6647 - [C++] Can't build with g++ 4.8.5 on CentOS 7 by member initializer for shared\_ptr
* ARROW-6649 - [R] print() methods for Table, RecordBatch, etc.
* ARROW-6653 - [Developer] Add support for auto JIRA link on pull request
* ARROW-6664 - [C++] Add option to build without SSE4.2
* ARROW-6667 - [Python] Avoid Reference Cycles in pyarrow.parquet
* ARROW-6683 - [Python] Add unit tests that validate cross-compatibility with pyarrow.parquet when fastparquet is installed
* ARROW-6735 - [C++] Suppress sign-compare warning with g++ 9.2.1
## New Feature
* ARROW-1561 - [C++] Kernel implementations for "isin" (set containment)
* ARROW-1566 - [C++] Implement non-materializing sort kernels
* ARROW-1741 - [C++] Comparison function for DictionaryArray to determine if indices are "compatible"
* ARROW-3204 - [R] Enable package to be made available on CRAN
* ARROW-3777 - [C++] Implement a mock "high latency" filesystem
* ARROW-3817 - [R] $ method for RecordBatch
* ARROW-453 - [C++] Add filesystem implementation for Amazon S3
* ARROW-517 - [C++] Verbose Array::Equals
* ARROW-5351 - [Rust] Add support for take kernel functions
* ARROW-5588 - [C++] Better support for building UnionArrays
* ARROW-5594 - [C++] add support for UnionArrays to Take and Filter
* ARROW-5719 - [Java] Support in-place vector sorting
* ARROW-5792 - [Rust] [Parquet] A visitor trait for parquet types.
* ARROW-5832 - [Java] Support search operations for vector data
* ARROW-5834 - [Java] Apply new hash map in DictionaryEncoder
* ARROW-5835 - [Java] Support Dictionary Encoding for binary type
* ARROW-5844 - [Java] Support comparison & sort for more numeric types
* ARROW-5862 - [Java] Provide dictionary builder
* ARROW-5881 - [Java] Provide functionalities to efficiently determine if a validity buffer has completely 1 bits/0 bits
* ARROW-5892 - [C++][Gandiva] Support function aliases
* ARROW-5893 - [C++] Remove arrow::Column class from C++ library
* ARROW-5898 - [Java] Provide functionality to efficiently compute hash code for arbitrary memory segment
* ARROW-5901 - [Rust] Implement PartialEq to compare array and json values
* ARROW-5902 - [Java] Implement hash table and equals & hashCode API for dictionary encoding
* ARROW-5917 - [Java] Redesign the dictionary encoder
* ARROW-5920 - [Java] Support sort & compare for all variable width vectors
* ARROW-5945 - [Rust] [DataFusion] Table trait should support building complete queries
* ARROW-5970 - [Java] Provide pointer to Arrow buffer
* ARROW-5974 - [Python][C++] Enable CSV reader to read from concatenated gzip stream
* ARROW-5979 - [FlightRPC] Expose (de)serialization of protocol types
* ARROW-5997 - [Java] Support dictionary encoding for Union type
* ARROW-6013 - [Java] Support range searcher
* ARROW-6022 - [Java] Support equals API in ValueVector to compare two vectors equal
* ARROW-6024 - [Java] Provide more hash algorithms
* ARROW-6030 - [Java] Efficiently compute hash code for ArrowBufPointer
* ARROW-6031 - [Java] Support iterating a vector by ArrowBufPointer
* ARROW-6039 - [GLib] Add garrow\_array\_filter()
* ARROW-6053 - [Python] RecordBatchStreamReader::Open2 cdef type signature doesn't match C++
* ARROW-6079 - [Java] Implement/test UnionFixedSizeListWriter for FixedSizeListVector
* ARROW-6080 - [Java] Support compare and search operation for BaseRepeatedValueVector
* ARROW-6113 - [Java] Support vector deduplicate function
* ARROW-6138 - [C++] Add a basic (single RecordBatch) implementation of Dataset
* ARROW-6155 - [Java] Extract a super interface for vectors whose elements reside in continuous memory segments
* ARROW-6156 - [Java] Support compare semantics for ArrowBufPointer
* ARROW-6161 - [C++] Implements dataset::ParquetFile and associated Scan structures
* ARROW-6185 - [Java] Provide hash table based dictionary builder
* ARROW-6188 - [GLib] Add garrow\_array\_is\_in()
* ARROW-6196 - [Ruby] Add support for building Arrow::TimeNNArray by .new
* ARROW-6197 - [GLib] Add garrow\_decimal128\_rescale()
* ARROW-6203 - [GLib] Add garrow\_array\_sort\_to\_indices()
* ARROW-6204 - [GLib] Add garrow\_array\_is\_in\_chunked\_array()
* ARROW-6212 - [Java] Support vector rank operation
* ARROW-6229 - [C++] Add a DataSource implementation which scans a directory
* ARROW-6238 - [C++] Implement SimpleDataSource/SimpleDataFragment
* ARROW-6242 - [C++] Implements basic Dataset/Scanner/ScannerBuilder
* ARROW-6243 - [C++] Implement basic Filter expression classes
* ARROW-6244 - [C++] Implement Partition DataSource
* ARROW-6247 - [Java] Provide a common interface for float4 and float8 vectors
* ARROW-6250 - [Java] Implement ApproxEqualsVisitor comparing approx for floating point
* ARROW-6278 - [R] Read parquet files from raw vector
* ARROW-6288 - [Java] Implement TypeEqualsVisitor comparing vector type equals considering names and metadata
* ARROW-6306 - [Java] Support stable sort by stable comparators
* ARROW-6326 - [C++] Nullable fields when converting std::tuple to Table
* ARROW-6346 - [GLib] Add garrow\_array\_view()
* ARROW-6347 - [GLib] Add garrow\_array\_diff\_unified()
* ARROW-6397 - [C++][CI] Fix S3 minio failure
* ARROW-6419 - [Website] Blog post about Parquet dictionary performance work coming in 0.15.x release
* ARROW-6427 - [GLib] Add support for column names autogeneration CSV read option
* ARROW-6438 - [R] Add bindings for filesystem API
* ARROW-6480 - [Developer] Add command to generate and send e-mail report for a Crossbow run
* ARROW-6675 - [JS] Add scanReverse function to dataFrame and filteredDataframe
* ARROW-750 - [Format] Add LargeBinary and LargeString types
## Sub-task
* ARROW-4218 - [Rust] [Parquet] Implement ColumnReader
* ARROW-4365 - [Rust] [Parquet] Implement RecordReader
* ARROW-4507 - [Format] Create outline and introduction for new document.
* ARROW-4508 - [Format] Copy content from Layout.rst to new document.
* ARROW-4509 - [Format] Copy content from Metadata.rst to new document.
* ARROW-4510 - [Format] copy content from IPC.rst to new document.
* ARROW-4511 - [Format] remove individual documents in favor of new document once all content is moved
* ARROW-5846 - [Java] Create Avro adapter module and add dependencies
* ARROW-5861 - [Java] Initial implement to convert Avro record with primitive types
* ARROW-5988 - [Java] Avro adapter implement simple Record type
* ARROW-6035 - [Java] Avro adapter support convert nullable value
* ARROW-6069 - [Rust] [Parquet] Implement Converter to convert record reader to arrow primitive array.
* ARROW-6078 - [Java] Implement dictionary-encoded subfields for List type
* ARROW-6085 - [Rust] [DataFusion] Create traits for phsyical query plan
* ARROW-6086 - [Rust] [DataFusion] Implement parallel execution for parquet scan
* ARROW-6087 - [Rust] [DataFusion] Implement parallel execution for CSV scan
* ARROW-6088 - [Rust] [DataFusion] Implement parallel execution for projection
* ARROW-6089 - [Rust] [DataFusion] Implement parallel execution for selection
* ARROW-6090 - [Rust] [DataFusion] Implement parallel execution for hash aggregate
* ARROW-6097 - [Java] Avro adapter implement unions type
* ARROW-6101 - [Rust] [DataFusion] Create physical plan from logical plan
* ARROW-6199 - [Java] Avro adapter avoid potential resource leak.
* ARROW-6220 - [Java] Add API to avro adapter to limit number of rows returned at a time.
* ARROW-6265 - [Java] Avro adapter implement Array/Map/Fixed type
* ARROW-6287 - [Rust] [DataFusion] Refactor TableProvider to return thread-safe BatchIterator
* ARROW-6310 - [C++] Write 64-bit integers as strings in JSON integration test files
* ARROW-6314 - [C++] Implement changes to ensure flatbuffer alignment.
* ARROW-6315 - [Java] Make change to ensure flatbuffer reads are aligned
* ARROW-6316 - [Go] Make change to ensure flatbuffer reads are aligned
* ARROW-6317 - [JS] Implement changes to ensure flatbuffer alignment
* ARROW-6318 - [Integration] Update integration test to use generated binaries to ensure backwards compatibility
* ARROW-6356 - [Java] Avro adapter implement Enum type and nested Record type
* ARROW-6401 - [Java] Implement dictionary-encoded subfields for Struct type
* ARROW-6460 - [Java] Add benchmark and large fake data UT for avro adapter
* ARROW-6474 - [Python] Provide mechanism for python to write out old format
* ARROW-6519 - [Java] Use IPC continuation token to mark EOS
* ARROW-6539 - [R] Provide mechanism to write out old format
* ARROW-6563 - [Rust] [DataFusion] Create "merge" execution plan
* ARROW-6599 - [Rust] [DataFusion] Implement SUM aggregate expression
* ARROW-6665 - [Rust] [DataFusion] Implement numeric literal expressions
* ARROW-6668 - [Rust] [DataFusion] Implement CAST expression
* ARROW-6669 - [Rust] [DataFusion] Implement physical expression for binary expressions
## Task
* ARROW-1875 - [Java] Write 64-bit ints as strings in integration test JSON files
* ARROW-2931 - [Crossbow] Windows builds are attempting to run linux and osx packaging tasks
* ARROW-5483 - [Java] add ValueVector constructors that take a Field object
* ARROW-5579 - [Java] shade flatbuffer dependency
* ARROW-5580 - [C++][Gandiva] Correct definitions of timestamp functions in Gandiva
* ARROW-5758 - [C++][Gandiva] Support casting decimals to varchar and vice versa
* ARROW-5841 - [Website] Add 0.14.0 release note
* ARROW-5867 - [C++][Gandiva] Add support for cast int to decimal
* ARROW-5872 - Support mod(double, double) method in Gandiva
* ARROW-5891 - [C++][Gandiva] Remove duplicates in function registries
* ARROW-5903 - [Java] Set methods in DecimalVector are slow
* ARROW-5934 - [Python] Bundle arrow's LICENSE with the wheels
* ARROW-5944 - [C++][Gandiva] Remove 'div' alias for 'divide'
* ARROW-5957 - [C++][Gandiva] Implement div function in Gandiva
* ARROW-5958 - [Python] Link zlib statically in the wheels
* ARROW-5975 - [C++][Gandiva] Add method to cast Date(in Milliseconds) to timestamp
* ARROW-6008 - [Release] Don't parallelize the bintray upload script
* ARROW-6009 - [Release][JS] Ignore NPM errors in the javascript release script
* ARROW-6023 - [C++][Gandiva] Add functions in Gandiva
* ARROW-6026 - [Doc] Add CONTRIBUTING.md
* ARROW-6034 - [C++][Gandiva] Add string functions in Gandiva
* ARROW-6094 - [Format][Flight] Add GetFlightSchema to Flight RPC
* ARROW-6134 - [C++][Gandiva] Add concat function in Gandiva
* ARROW-6137 - [C++][Gandiva] Change output format of castVARCHAR(timestamp) in Gandiva
* ARROW-6144 - [C++][Gandiva] Implement random function in Gandiva
* ARROW-6162 - [C++][Gandiva] Do not truncate string in castVARCHAR\_varchar when out\_len parameter is zero
* ARROW-6177 - [C++] Add Array::Validate()
* ARROW-6217 - [Website] Remove needless \_site/ directory
* ARROW-6383 - [Java] report outstanding child allocators on parent allocator close
* ARROW-6385 - [C++] Investigate xxh3
* ARROW-6422 - [Gandiva] Fix double-conversion linker issue
* ARROW-6490 - [Java] log error for leak in allocator close
* ARROW-6491 - [Java] fix master build failure caused by ErrorProne
* ARROW-6601 - [Java] Improve JDBC adapter performance & add benchmark
* ARROW-6725 - [CI] Disable 3rdparty fuzzit nightly builds
## Test
* ARROW-5525 - [C++][CI] Enable continuous fuzzing
* ARROW-5978 - [FlightRPC] [Java] Integration test client doesn't close buffers
* ARROW-6193 - [GLib] Add missing require in test
* ARROW-6218 - [Java] Add UINT type test in integration to avoid potential overflow
## Wish
* ARROW-3538 - [Python] ability to override the automated assignment of uuid for filenames when writing datasets
* ARROW-6142 - [R] Install instructions on linux could be clearer
* ARROW-6183 - [R] Document that you don't have to use tidyselect if you don't want
* ARROW-6292 - [C++] Add an option to build with mimalloc
* ARROW-6300 - [C++] Add io::OutputStream::Abort()
* ARROW-6525 - [C++] CloseFromDestructor() should perhaps not crash
* ARROW-6549 - [C++] Switch back to latest jemalloc 5.x
# Apache Arrow 0.14.0 (29 June 2019)
## Bug
* ARROW-1837 - [Java] Unable to read unsigned integers outside signed range for bit width in integration tests
* ARROW-2119 - [C++][Java] Handle Arrow stream with zero record batch
* ARROW-2136 - [Python] Non-nullable schema fields not checked in conversions from pandas
* ARROW-2256 - [C++] Fuzzer builds fail out of the box on Ubuntu 16.04 using LLVM apt repos
* ARROW-2461 - [Python] Build wheels for manylinux2010 tag
* ARROW-3344 - [Python] test\_plasma.py fails (in test\_plasma\_list)
* ARROW-3399 - [Python] Cannot serialize numpy matrix object
* ARROW-3650 - [Python] Mixed column indexes are read back as strings
* ARROW-3762 - [C++] Parquet arrow::Table reads error when overflowing capacity of BinaryArray
* ARROW-4021 - [Ruby] Error building red-arrow on msys2
* ARROW-4076 - [Python] schema validation and filters
* ARROW-4139 - [Python] Cast Parquet column statistics to unicode if UTF8 ConvertedType is set
* ARROW-4301 - [Java][Gandiva] Maven snapshot version update does not seem to update Gandiva submodule
* ARROW-4324 - [Python] Array dtype inference incorrect when created from list of mixed numpy scalars
* ARROW-4350 - [Python] dtype=object arrays cannot be converted to a list-of-list ListArray
* ARROW-4447 - [C++] Investigate dynamic linking for libthift
* ARROW-4516 - [Python] Error while creating a ParquetDataset on a path without \`\_common\_dataset\` but with an empty \`\_tempfile\`
* ARROW-4651 - [Format] Flight Location should be more flexible than a (host, port) pair
* ARROW-4675 - [Python] Error serializing bool ndarray in py2 and deserializing in py3
* ARROW-4694 - [CI] detect-changes.py is inconsistent
* ARROW-4723 - [Python] Skip \_files when reading a directory containing parquet files
* ARROW-4823 - [Python] read\_csv shouldn't close file handles it doesn't own
* ARROW-4845 - [R] Compiler warnings on Windows MingW64
* ARROW-4851 - [Java] BoundsChecking.java defaulting behavior for old drill parameter seems off
* ARROW-4885 - [Python] read\_csv() can't handle decimal128 columns
* ARROW-4886 - [Rust] Inconsistent behaviour with casting sliced primitive array to list array
* ARROW-4923 - Expose setters for Decimal vector that take long and double inputs
* ARROW-4934 - [Python] Address deprecation notice that will be a bug in Python 3.8
* ARROW-5019 - [C#] ArrowStreamWriter doesn't work on a non-seekable stream
* ARROW-5049 - [Python] org/apache/hadoop/fs/FileSystem class not found when pyarrow FileSystem used in spark
* ARROW-5051 - [GLib][Gandiva] Test failure in release verification script
* ARROW-5058 - [Release] 02-source.sh generates e-mail template with wrong links
* ARROW-5068 - [Gandiva][Packaging] Fix gandiva nightly builds after the CMake refactor
* ARROW-5090 - Parquet linking fails on MacOS due to @rpath in dylib
* ARROW-5092 - [C#] Source Link doesn't work with the C# release script
* ARROW-5095 - [Flight][C++] Flight DoGet doesn't expose server error message
* ARROW-5096 - [Packaging][deb] plasma-store-server packages are missing
* ARROW-5097 - [Packaging][CentOS6] arrow-lib has unresolvable dependencies
* ARROW-5098 - [Website] Update APT install document for 0.13.0
* ARROW-5100 - [JS] Writer swaps byte order if buffers share the same underlying ArrayBuffer
* ARROW-5117 - [Go] Panic when appending zero slices after initializing a builder
* ARROW-5119 - [Go] invalid Stringer implementation for array.Boolean
* ARROW-5129 - [Rust][Parquet] Column writer bug: check dictionary encoder when adding a new data page
* ARROW-5130 - [Python] Segfault when importing TensorFlow after Pyarrow
* ARROW-5132 - [Java] Errors on building gandiva\_jni.dll on Windows with Visual Studio 2017
* ARROW-5138 - [Python/C++] Row group retrieval doesn't restore index properly
* ARROW-5142 - [CI] Fix conda calls in AppVeyor scripts
* ARROW-5144 - [Python] ParquetDataset and ParquetPiece not serializable
* ARROW-5146 - [Dev] Merge script imposes directory name
* ARROW-5147 - [C++] get an error in building: Could NOT find DoubleConversion
* ARROW-5148 - [CI] [C++] LLVM-related compile errors
* ARROW-5149 - [Packaging][Wheel] Pin LLVM to version 7 in windows builds
* ARROW-5152 - [Python] CMake warnings when building
* ARROW-5159 - Unable to build benches in arrow crate.
* ARROW-5160 - [C++] ABORT\_NOT\_OK evalutes expression twice
* ARROW-5166 - [Python][Parquet] Statistics for uint64 columns may overflow
* ARROW-5167 - [C++] Upgrade string-view-light to latest
* ARROW-5169 - [Python] non-nullable fields are converted to nullable in {{Table.from\_pandas}}
* ARROW-5173 - [Go] handle multiple concatenated streams back-to-back
* ARROW-5174 - [Go] implement Stringer for DataTypes
* ARROW-5177 - [Python] ParquetReader.read\_column() doesn't check bounds
* ARROW-5183 - [CI] MinGW build failures on AppVeyor
* ARROW-5184 - [Rust] Broken links and other documentation warnings
* ARROW-5195 - [Python] read\_csv ignores null\_values on string types
* ARROW-5201 - [Python] Import ABCs from collections is deprecated in Python 3.7
* ARROW-5208 - [Python] Inconsistent resulting type during casting in pa.array() when mask is present
* ARROW-5214 - [C++] Offline dependency downloader misses some libraries
* ARROW-5217 - [Rust] [CI] DataFusion test failure
* ARROW-5232 - [Java] value vector size increases rapidly in case of clear/setSafe loop
* ARROW-5233 - [Go] migrate to new flatbuffers-v1.11.0
* ARROW-5237 - [Python] pandas\_version key in pandas metadata no longer populated
* ARROW-5240 - [C++][CI] cmake\_format 0.5.0 appears to fail the build
* ARROW-5242 - [C++] Arrow doesn't compile cleanly with Visual Studio 2017 Update 9 or later due to narrowing
* ARROW-5243 - [Java][Gandiva] Add test for decimal compare functions
* ARROW-5245 - [C++][CI] Unpin cmake\_format
* ARROW-5246 - [Go] use Go-1.12 in CI
* ARROW-5249 - [Java] Flight client doesn't handle auth correctly in some cases
* ARROW-5253 - [C++] external Snappy fails on Alpine
* ARROW-5254 - [Flight][Java] DoAction does not support result streams
* ARROW-5255 - [Java] Implement user-defined data types API
* ARROW-5260 - [Python][C++] Crash when deserializing from components in a fresh new process
* ARROW-5274 - [JavaScript] Wrong array type for countBy
* ARROW-5285 - [C++][Plasma] GpuProcessHandle is not released when GPU object deleted
* ARROW-5293 - [C++] Take kernel on DictionaryArray does not preserve ordered flag
* ARROW-5294 - [CI] setuptools\_scm failures
* ARROW-5296 - [Java] Sporadic Flight test failures
* ARROW-5301 - [Python] parquet documentation outdated on nthreads argument
* ARROW-5306 - [CI] [GLib] Disable GTK-Doc
* ARROW-5308 - [Go] remove deprecated Feather format
* ARROW-5314 - [Go] Incorrect Printing for String Arrays with Offsets
* ARROW-5325 - [Archery][Benchmark] Output properly formatted jsonlines from benchmark diff cli command
* ARROW-5330 - [Python] [CI] Run Python Flight tests on Travis-CI
* ARROW-5332 - [R] R package fails to build/install: error in dyn.load()
* ARROW-5348 - [CI] [Java] Gandiva checkstyle failure
* ARROW-5360 - [Rust] Builds are broken by rustyline on nightly 2019-05-16+
* ARROW-5362 - [C++] Compression round trip test can cause some sanitizers to to fail
* ARROW-5373 - [Java] Add missing details for Gandiva Java Build
* ARROW-5376 - [C++] Compile failure on gcc 5.4.0
* ARROW-5383 - [Go] update IPC flatbuf (new Duration type)
* ARROW-5387 - [Go] properly handle sub-slice of List
* ARROW-5388 - [Go] use arrow.TypeEqual in array.NewChunked
* ARROW-5390 - [CI] Job time limit exceeded on Travis
* ARROW-5398 - [Python] Flight tests broken by URI changes
* ARROW-5403 - [C++] Test failures not propagated in Windows shared builds
* ARROW-5411 - [C++][Python] Build error building on Mac OS Mojave
* ARROW-5412 - [Java] Integration test fails with UnsupportedOperationException
* ARROW-5419 - [C++] CSV strings\_can\_be\_null option doesn't respect all null\_values
* ARROW-5421 - [Packaging][Crossbow] Duplicated key in nightly test configuration
* ARROW-5430 - [Python] Can read but not write parquet partitioned on large ints
* ARROW-5435 - [Java] add test for IntervalYearVector#getAsStringBuilder
* ARROW-5437 - [Python] Missing pandas pytest marker from parquet tests
* ARROW-5446 - [C++] Use cmake header install directory instead of include
* ARROW-5448 - [CI] MinGW build failures on AppVeyor
* ARROW-5453 - [C++] Just-released cmake-format 0.5.2 breaks the build
* ARROW-5455 - [Rust] Build broken by 2019-05-30 Rust nightly
* ARROW-5456 - [GLib][Plasma] Installed plasma-glib may be used on building document
* ARROW-5457 - [GLib][Plasma] Environment variable name for test is wrong
* ARROW-5459 - [Go] implement Stringer for Float16 DataType
* ARROW-5462 - [Go] support writing zero-length List
* ARROW-5487 - [CI] [Python] Failure in docs build
* ARROW-5507 - [Plasma] [CUDA] Compile error
* ARROW-5514 - [C++] Printer for uint64 shows wrong values
* ARROW-5517 - [C++] Header collection CMake logic should only consider filename without directory included
* ARROW-5520 - [C++][Packaging] No NVidia CUDA toolkit on AArch64C
* ARROW-5521 - [Packaging] License check fails with Apache RAT 0.13
* ARROW-5528 - Concatenate() crashes when concatenating empty binary arrays.
* ARROW-5532 - [JS] Field Metadata Not Read
* ARROW-5551 - [Go] invalid FixedSizeArray representation
* ARROW-5553 - [Ruby] red-arrow gem does not compile on ruby:2.5 docker image
* ARROW-5576 - [C++] Flaky thrift\_ep tarball downloads
* ARROW-5577 - [C++] Link failure due to googletest shared library on Alpine Linux
* ARROW-5583 - [Java] When the isSet of a NullableValueHolder is 0, the buffer field should not be used
* ARROW-5584 - [Java] Add import for link reference in FieldReader javadoc
* ARROW-5589 - [C++][Fuzzing] arrow-ipc-fuzzing-test crash 2354085db0125113f04f7bd23f54b85cca104713
* ARROW-5592 - [Go] implement Duration array
* ARROW-5596 - [Python] Flight tests failing on Python 2.7
* ARROW-5601 - [gandiva] Error when projector with a string field
* ARROW-5603 - [Python] register pytest markers to avoid warnings
* ARROW-5605 - [C++][Fuzzing] arrow-ipc-fuzzing-test crash 74aec871d14bb6b07c72ea8f0e8c9f72cbe6b73c
* ARROW-5606 - [Python] pandas.RangeIndex.\_start/\_stop/\_step are deprecated
* ARROW-5608 - [C++][parquet] Invalid memory access when using parquet::arrow::ColumnReader
* ARROW-5615 - [C++] Compilation error due to C++11 string literals on gcc 5.4.0 Ubuntu 16.04
* ARROW-5616 - [Python] C++ build failure against Python 2.7 headers
* ARROW-5617 - [C++] thrift\_ep 0.12.0 fails to build when using ARROW\_BOOST\_VENDORED=ON
* ARROW-5619 - [C++] get\_apache\_mirror.py doesn't work with Python 3.5
* ARROW-5624 - [C++] -Duriparser\_SOURCE=BUNDLED is broken
* ARROW-5626 - [C++][Gandiva] Expression cache should consider precision and scale too
* ARROW-5629 - [C++] Fix Coverity issues
* ARROW-5631 - [C++] CMake 3.2 build is broken
* ARROW-5648 - [C++] Build fails on mingw without codecvt
* ARROW-5654 - [C++] ChunkedArray should validate the types of the arrays
* ARROW-5674 - [Python] Missing pandas pytest markers from test\_parquet.py
* ARROW-5675 - [Doc] Fix typo in documentation describing compile/debug workflow on macOS with Xcode IDE
* ARROW-5678 - [R][Lint] Fix hadolint docker linting error
* ARROW-5693 - [Go] skip IPC integration test for Decimal128
* ARROW-5697 - [GLib] c\_glib/Dockerfile is broken
* ARROW-5698 - [R] r/Dockerfile docker-compose build is broken
* ARROW-5709 - [C++] gandiva-date\_time\_test failure on Windows
* ARROW-5714 - [JS] Inconsistent behavior in Int64Builder with/without BigNum
* ARROW-5723 - [Gandiva][Crossbow] Builds failing
* ARROW-5728 - [Python] [CI] Travis-CI failures in test\_jvm.py
* ARROW-5730 - [Python][CI] Selectively skip test cases in the dask integration test
* ARROW-5732 - [C++] macOS builds failing idiosyncratically on master with warnings from pmmintrin.h
* ARROW-5735 - [C++] Appveyor builds failing persistently in thrift\_ep build
* ARROW-5737 - [C++][Gandiva] Gandiva not building in manylinux
* ARROW-5738 - [Crossbow][Conda] OSX package builds are failing with missing intrinsics
* ARROW-5739 - [CI] Fix docker python build
* ARROW-5750 - [Java] Java compilation failures on master
* ARROW-5754 - [C++]Missing override for ~GrpcStreamWriter?
* ARROW-5765 - [C++] TestDictionary.Validate test is crashed with release build
* ARROW-5770 - [C++] Fix -Wpessimizing-move in result.h
* ARROW-5771 - [Python] Docker python-nopandas job fails
* ARROW-5781 - [Archery] Ensure benchmark clone accepts remotes in revision
* ARROW-61 - [Java] Method can return the value bigger than long MAX\_VALUE
## Improvement
* ARROW-1496 - [JS] Upload coverage data to codecov.io
* ARROW-1957 - [Python] Write nanosecond timestamps using new NANO LogicalType Parquet unit
* ARROW-1983 - [Python] Add ability to write parquet \`\_metadata\` file
* ARROW-2057 - [Python] Configure size of data pages in pyarrow.parquet.write\_table
* ARROW-2217 - [C++] Add option to use dynamic linking for compression library dependencies
* ARROW-2298 - [Python] Add option to not consider NaN to be null when converting to an integer Arrow type
* ARROW-2707 - [C++] Implement Table::Slice methods using Column::Slice
* ARROW-2796 - [C++] Simplify symbols.map file, use when building libarrow\_python
* ARROW-2818 - [Python] Better error message when passing SparseDataFrame into Table.from\_pandas
* ARROW-2981 - [C++] Support scripts / documentation for running clang-tidy on codebase
* ARROW-3040 - [Go] add support for comparing Arrays
* ARROW-3041 - [Go] add support for TimeArray
* ARROW-3052 - [C++] Detect ORC system packages
* ARROW-3144 - [C++] Move "dictionary" member from DictionaryType to ArrayData to allow for changing dictionaries between Array chunks
* ARROW-3150 - [Python] Ship Flight-enabled Python wheels on Linux and Windows
* ARROW-3166 - [C++] Consolidate IO interfaces used in arrow/io and parquet-cpp
* ARROW-3200 - [C++] Add support for reading Flight streams with dictionaries
* ARROW-3290 - [C++] Toolchain support for secure gRPC
* ARROW-3294 - [C++] Test Flight RPC on Windows / Appveyor
* ARROW-3314 - [R] Set -rpath using pkg-config when building
* ARROW-3475 - [C++] Int64Builder.Finish(NumericArray<Int64Type>)
* ARROW-3572 - [Packaging] Correctly handle ssh origin urls for crossbow
* ARROW-3671 - [Go] implement Interval array
* ARROW-3676 - [Go] implement Decimal128 array
* ARROW-3679 - [Go] implement IPC protocol
* ARROW-3680 - [Go] implement Float16 array
* ARROW-3686 - [Python] Support for masked arrays in to/from numpy
* ARROW-3729 - [C++] Support for writing TIMESTAMP\_NANOS Parquet metadata
* ARROW-3758 - [R] Build R library on Windows, document build instructions for Windows developers
* ARROW-3759 - [R][CI] Build and test on Windows in Appveyor
* ARROW-3767 - [C++] Add cast for Null to any type
* ARROW-3780 - [R] Failed to fetch data: invalid data when collecting int16
* ARROW-3794 - [R] Consider mapping INT8 to integer() not raw()
* ARROW-3804 - [R] Consider lowering required R runtime
* ARROW-3904 - [C++/Python] Validate scale and precision of decimal128 type
* ARROW-4013 - [Documentation][C++] Document how to build Apache Arrow on MSYS2
* ARROW-4020 - [Release] Remove source artifacts from dev dist system after release vote passes
* ARROW-4047 - [Python] Document use of int96 timestamps and options in Parquet docs
* ARROW-4159 - [C++] Check for -Wdocumentation issues
* ARROW-4194 - [Format] Metadata.rst does not specify timezone for Timestamp type
* ARROW-4337 - [C#] Array / RecordBatch Builder Fluent API
* ARROW-4343 - [C++] Add as complete as possible Ubuntu Trusty / 14.04 build to docker-compose setup
* ARROW-4356 - [CI] Add integration (docker) test for turbodbc
* ARROW-4452 - [Python] Serializing sparse torch tensors
* ARROW-4467 - [Rust] [DataFusion] Create a REPL & Dockerfile for DataFusion
* ARROW-4503 - [C#] ArrowStreamReader allocates and copies data excessively
* ARROW-4504 - [C++] Reduce the number of unit test executables
* ARROW-4505 - [C++] Nicer PrettyPrint for date32
* ARROW-4566 - [C++][Flight] Add option to run arrow-flight-benchmark against a perf server running on a different host
* ARROW-4596 - [Rust] [DataFusion] Implement COUNT aggregate function
* ARROW-4622 - [C++] [Python] MakeDense and MakeSparse in UnionArray should accept a vector of Field
* ARROW-4625 - [Flight] Wrap server busy-wait methods
* ARROW-4626 - [Flight] Add application metadata field to DoGet
* ARROW-4627 - [Flight] Add application metadata field to DoPut
* ARROW-4714 - [C++][Java] Providing JNI interface to Read ORC file via Arrow C++
* ARROW-4717 - [C#] Consider exposing ValueTask instead of Task
* ARROW-4787 - [C++] Include "null" values (perhaps with an option to toggle on/off) in hash kernel actions
* ARROW-4788 - [C++] Develop less verbose API for constructing StructArray
* ARROW-4800 - [C++] Create/port a StatusOr implementation to be able to return a status or a type
* ARROW-4824 - [Python] read\_csv should accept io.StringIO objects
* ARROW-4847 - [Python] Add pyarrow.table factory function that dispatches to various ctors based on type of input
* ARROW-4911 - [R] Support for building package for Windows
* ARROW-4912 - [C++, Python] Allow specifying column names to CSV reader
* ARROW-4945 - [Flight] Enable Flight integration tests in Travis
* ARROW-4968 - [Rust] StructArray builder and From<> methods should check that field types match schema
* ARROW-4990 - [C++] Kernel to compare array with array
* ARROW-4993 - [C++] Display summary at the end of CMake configuration
* ARROW-5000 - [Python] Fix deprecation warning from setup.py
* ARROW-5007 - [C++] Move DCHECK out of sse-utils
* ARROW-5020 - [C++][Gandiva] Split Gandiva-related conda packages for builds into separate .yml conda env file
* ARROW-5027 - [Python] Add JSON Reader
* ARROW-5038 - [Rust] [DataFusion] Implement AVG aggregate function
* ARROW-5039 - [Rust] [DataFusion] Fix bugs in CAST support
* ARROW-5045 - [Rust] Code coverage silently failing in CI
* ARROW-5053 - [Rust] [DataFusion] Use env var for location of arrow test data
* ARROW-5054 - [C++][Release] Test Flight in verify-release-candidate.sh
* ARROW-5061 - [Release] Improve 03-binary performance
* ARROW-5062 - [Java] Shade Java Guava dependency for Flight
* ARROW-5063 - [Java] FlightClient should not create a child allocator
* ARROW-5064 - [Release] Pass PKG\_CONFIG\_PATH to glib in the verification script
* ARROW-5066 - [Integration] Add flags to enable/disable implementations in integration/integration\_test.py
* ARROW-5076 - [Packaging] Improve post binary upload performance
* ARROW-5077 - [Rust] Release process should change Cargo.toml to use release versions
* ARROW-5078 - [Documentation] Sphinx is failed by RemovedInSphinx30Warning
* ARROW-5079 - [Release] Add a script to release C# package
* ARROW-5080 - [Release] Add a script to release Rust packages
* ARROW-5081 - [C++] Consistently use PATH\_SUFFIXES in CMake config
* ARROW-5082 - [Python][Packaging] Reduce size of macOS and manylinux1 wheels
* ARROW-5083 - [Developer] In merge\_arrow\_pr.py script, allow user to set a released Fix Version
* ARROW-5088 - [C++] Do not set -Werror when using BUILD\_WARNING\_LEVEL=CHECKIN in release mode
* ARROW-5091 - [Flight] Rename FlightGetInfo message to FlightInfo
* ARROW-5093 - [Packaging] Add support for selective binary upload
* ARROW-5094 - [Packaging] Add APT/Yum verification scripts
* ARROW-5113 - [C++][Flight] Unit tests in C++ for DoPut
* ARROW-5116 - [Rust] move kernel related files under compute/kernels
* ARROW-5124 - [C++] Add support for Parquet in MinGW build
* ARROW-5136 - [Flight] Implement call options (timeouts)
* ARROW-5137 - [Flight] Implement authentication APIs
* ARROW-5157 - [Website] Add MATLAB to powered by Apache Arrow page
* ARROW-5162 - [Rust] [Parquet] Rename mod reader to arrow.
* ARROW-5163 - [Gandiva] Cast timestamp/date are incorrectly evaluating year 0097 to 1997
* ARROW-5165 - [Python][Documentation] Build docs don't suggest assigning $ARROW\_BUILD\_TYPE
* ARROW-5178 - [Python] Allow creating Table from Python dict
* ARROW-5179 - [Python] Return plain dicts, not OrderedDict, on Python 3.7+
* ARROW-5185 - [C++] Add support for Boost with CMake configuration file
* ARROW-5191 - [Rust] Expose CSV and JSON reader schemas
* ARROW-5204 - [C++] Improve BufferBuilder performance
* ARROW-5212 - [Go] Array BinaryBuilder in Go library has no access to resize the values buffer
* ARROW-5218 - [C++] Improve build when third-party library locations are specified
* ARROW-5219 - [C++] Build protobuf\_ep in parallel when using Ninja
* ARROW-5222 - [Python] Issues with installing pyarrow for development on MacOS
* ARROW-5225 - [Java] Improve performance of BaseValueVector#getValidityBufferSizeFromCount
* ARROW-5238 - [Python] Improve usability of pyarrow.dictionary function
* ARROW-5241 - [Python] Add option to disable writing statistics to parquet file
* ARROW-5252 - [C++] Change variant implementation
* ARROW-5256 - [Packaging][deb] Failed to build with LLVM 7.1.0
* ARROW-5257 - [Website] Update site to use "official" Apache Arrow logo, add clearly marked links to logo
* ARROW-5258 - [C++/Python] Expose file metadata of dataset pieces to caller
* ARROW-5261 - [C++] Finish implementation of scalar types for Duration and Interval
* ARROW-5262 - [Python] Fix typo
* ARROW-5264 - [Java] Allow enabling/disabling boundary checking by environmental variable
* ARROW-5269 - [C++] Whitelist benchmarks candidates for regression checks
* ARROW-5281 - [Rust] [Parquet] Move DataPageBuilder to test\_common
* ARROW-5284 - [Rust] Replace libc with std::alloc for memory allocation
* ARROW-5286 - [Python] support Structs in Table.from\_pandas given a known schema
* ARROW-5288 - [Documentation] Enrich the contribution guidelines
* ARROW-5289 - [C++] Move arrow/util/concatenate.h to arrow/array/
* ARROW-5291 - [Python] Add wrapper for "take" kernel on Array
* ARROW-5298 - [Rust] Add debug implementation for Buffer
* ARROW-5309 - [Python] Add clarifications to Python "append" methods that return new objects
* ARROW-5311 - [C++] Return more specific invalid Status in Take kernel
* ARROW-5317 - [Rust] [Parquet] impl IntoIterator for SerializedFileReader
* ARROW-5319 - [CI] Enable ccache with MinGW builds
* ARROW-5323 - [CI] Use compression with clcache
* ARROW-5328 - [R] Add shell scripts to do a full package rebuild and test locally
* ARROW-5334 - [C++] Add "Type" to names of arrow::Integer, arrow::FloatingPoint classes for consistency
* ARROW-5335 - [Python] Raise on variable dictionaries when converting to pandas
* ARROW-5339 - [C++] Add jemalloc to thirdparty dependency download script
* ARROW-5341 - [C++] Add instructions about fixing and testing for -Wdocumentation clang warnings locally
* ARROW-5349 - [Python/C++] Provide a way to specify the file path in parquet ColumnChunkMetaData
* ARROW-5361 - [R] Follow DictionaryType/DictionaryArray changes from ARROW-3144
* ARROW-5363 - [GLib] Fix coding styles
* ARROW-5364 - [C++] Use ASCII rather than UTF-8 in BuildUtils.cmake comment
* ARROW-5365 - [C++][CI] Add UBSan and ASAN into CI
* ARROW-5368 - [C++] Disable jemalloc by default with MinGW
* ARROW-5369 - [C++] Add support for glog on Windows
* ARROW-5370 - [C++] Detect system uriparser by default
* ARROW-5378 - [C++] Add local FileSystem implementation
* ARROW-5389 - [C++] Add an internal temporary directory API
* ARROW-5393 - [R] Add tests and example for read\_parquet()
* ARROW-5395 - [C++] Utilize stream EOS in File format
* ARROW-5407 - [C++] Integration test Travis CI entry builds many unnecessary targets
* ARROW-5413 - [C++] CSV reader doesn't remove BOM
* ARROW-5415 - [Release] Release script should update R version everywhere
* ARROW-5416 - [Website] Add Homebrew to project installation page
* ARROW-5418 - [CI][R] Run code coverage and report to codecov.io
* ARROW-5420 - [Java] Implement or remove getCurrentSizeInBytes in VariableWidthVector
* ARROW-5427 - [Python] RangeIndex serialization change implications
* ARROW-5428 - [C++] Add option to set "read extent" in arrow::io::BufferedInputStream
* ARROW-5429 - [Java] Provide alternative buffer allocation policy
* ARROW-5433 - [C++][Parquet] improve parquet-reader columns information
* ARROW-5436 - [Python] expose filters argument in parquet.read\_table
* ARROW-5438 - [JS] Utilize stream EOS in File format
* ARROW-5441 - [C++] Implement FindArrowFlight.cmake
* ARROW-5442 - [Website] Clarify what makes a release artifact "official"
* ARROW-5447 - [CI] [Ruby] CI is failed on AppVeyor
* ARROW-5452 - [R] Add documentation website (pkgdown)
* ARROW-5461 - [Java] Add micro-benchmarks for Float8Vector and allocators
* ARROW-5464 - [Archery] Bad --benchmark-filter default
* ARROW-5465 - [Crossbow] Support writing submitted job definition yaml to a file
* ARROW-5470 - [CI] C++ local filesystem patch breaks Travis R job
* ARROW-5472 - [Development] Add warning to PR merge tool if no JIRA component is set
* ARROW-5474 - [C++] Document required Boost version
* ARROW-5477 - [C++] Check required RapidJSON version
* ARROW-5478 - [Packaging] Drop Ubuntu 14.04 support
* ARROW-5481 - [GLib] garrow\_seekable\_input\_stream\_peek() misses "error" parameter document
* ARROW-5488 - [R] Workaround when C++ lib not available
* ARROW-5492 - [R] Add "col\_select" argument to read\_\* functions to read subset of columns
* ARROW-5495 - [C++] Use HTTPS consistently for downloading dependencies
* ARROW-5496 - [R][CI] Fix relative paths in R codecov.io reporting
* ARROW-5498 - [C++] Build failure with Flatbuffers 1.11.0 and MinGW
* ARROW-5500 - [R] read\_csv\_arrow() signature should match readr::read\_csv()
* ARROW-5503 - [R] add read\_json()
* ARROW-5504 - [R] move use\_threads argument to global option
* ARROW-5509 - [R] write\_parquet()
* ARROW-5511 - [Packaging] Enable Flight in Conda packages
* ARROW-5513 - [Java] Refactor method name for getstartOffset to use camel case
* ARROW-5516 - [Python] Development page for pyarrow has a missing dependency in using pip
* ARROW-5518 - [Java] Set VectorSchemaRoot rowCount to 0 on allocateNew and clear
* ARROW-5524 - [C++] Turn off PARQUET\_BUILD\_ENCRYPTION in CMake if OpenSSL not found
* ARROW-5526 - [Developer] Add more prominent notice to GitHub issue template to direct bug reports to JIRA
* ARROW-5529 - [Flight] Allow serving with multiple TLS certificates
* ARROW-5531 - [Python] Support binary, utf8, and nested types in Array.from\_buffers
* ARROW-5533 - [Plasma] Plasma client should be thread-safe
* ARROW-5538 - [C++] Restrict minimum OpenSSL version to 1.0.2
* ARROW-5541 - [R] cast from negative int32 to uint32 and uint64 are now safe
* ARROW-5544 - [Archery] should not return non-zero in \`benchmark diff\` sub command on regression
* ARROW-5545 - [C++][Docs] Clarify expectation of UTC values for timestamps with time zones in C++ API docs
* ARROW-5547 - [C++][FlightRPC] arrow-flight.pc isn't provided
* ARROW-5552 - [Go] make Schema and Field implement Stringer
* ARROW-5554 - Add a python wrapper for arrow::Concatenate
* ARROW-5555 - [R] Add install\_arrow() function to assist the user in obtaining C++ runtime libraries
* ARROW-5556 - [Doc] Document JSON reader
* ARROW-5565 - [Python] Document how to use gdb when working on pyarrow
* ARROW-5567 - [C++] Fix build error of memory-benchmark
* ARROW-5574 - [R] documentation error for read\_arrow()
* ARROW-5582 - [Go] add support for comparing Records
* ARROW-5586 - [R] convert Array of LIST type to R lists
* ARROW-5587 - [Java] Add more maven style check for Java code
* ARROW-5590 - [R] Run "no libarrow" R build in the same CI entry if possible
* ARROW-5600 - [R] R package namespace cleanup
* ARROW-5604 - [Go] improve test coverage of type-traits
* ARROW-5612 - [Python][Documentation] Clarify date\_as\_object option behavior
* ARROW-5622 - [C++][Dataset] arrow-dataset.pc isn't provided
* ARROW-5625 - [R] convert Array of struct type to data frame columns
* ARROW-5632 - [Doc] Add some documentation describing compile/debug workflow on macOS with Xcode IDE
* ARROW-5633 - [Python] Enable bz2 in Linux wheels
* ARROW-5635 - [C++] Support "compacting" a table
* ARROW-5639 - [Java] Remove floating point computation from getOffsetBufferValueCapacity
* ARROW-5641 - [GLib] Remove enums files generated by GNU Autotools from Git targets
* ARROW-5643 - [Flight] Add ability to override hostname checking
* ARROW-5652 - [CI] Fix iwyu docker image
* ARROW-5656 - [Python] Enable Flight wheels on macOS
* ARROW-5659 - [C++] Add support for finding OpenSSL installed by Homebrew
* ARROW-5660 - [GLib][CI] Use the latest macOS image and all Homebrew based libraries
* ARROW-5662 - [C++] Add support for BOOST\_SOURCE=AUTO|BUNDLED|SYSTEM
* ARROW-5663 - [Packaging][RPM] Update CentOS packages for 0.14.0
* ARROW-5664 - [Crossbow] Execute nightly crossbow tests on CircleCI instead of Travis
* ARROW-5668 - [Python] Display "not null" in Schema.\_\_repr\_\_ for non-nullable fields
* ARROW-5669 - [Crossbow] manylinux1 wheel building failing
* ARROW-5670 - [Crossbow] get\_apache\_mirror.py fails with TLS error on macOS with Python 3.5
* ARROW-5671 - [crossbow] mac os python wheels failing
* ARROW-5683 - [R] Add snappy to Rtools Windows builds
* ARROW-5684 - [Packaging][deb] Add support for Ubuntu 19.04
* ARROW-5685 - [Packaging][deb] Add support for Apache Arrow Datasets
* ARROW-5687 - [C++] Remove remaining uses of ARROW\_BOOST\_VENDORED
* ARROW-5690 - [Packaging][Python] macOS wheels broken: libprotobuf.18.dylib missing
* ARROW-5694 - [Python] List of decimals are not supported when converting to pandas
* ARROW-5695 - [C#][Release] Run sourcelink test in verify-release-candidate.sh
* ARROW-5699 - [C++] Optimize parsing of Decimal128 in CSV
* ARROW-5702 - [C++] parquet::arrow::FileReader::GetSchema()
* ARROW-5705 - [Java] Optimize BaseValueVector#computeCombinedBufferSize logic
* ARROW-5706 - [Java] Remove type conversion in getValidityBufferValueCapacity
* ARROW-5707 - [Java] Improve the performance and code structure for ArrowRecordBatch
* ARROW-5710 - [C++] Allow compiling Gandiva with Ninja on Windows
* ARROW-5718 - [R] auto splice data frames in record\_batch() and table()
* ARROW-5721 - [Rust] Move array related code into a separate module
* ARROW-5724 - [R] [CI] AppVeyor build should use ccache
* ARROW-5725 - [Crossbow] Port conda recipes to azure pipelines
* ARROW-5727 - [Python] [CI] Install pytest-faulthandler before running tests
* ARROW-5748 - [Packaging][deb] Add support for Debian GNU/Linux buster
* ARROW-5749 - [Python] Add Python binding for Table::CombineChunks()
* ARROW-5751 - [Packaging][Python] Python macOS wheels have dynamic dependency on libcares
* ARROW-5752 - [Java] Improve the performance of ArrowBuf#setZero
* ARROW-5768 - [Release] There are needless newlines at the end of CHANGELOG.md
* ARROW-5773 - [R] Clean up documentation before release
* ARROW-5782 - [Release] Setup test data for Flight in dev/release/01-perform.sh
* ARROW-5783 - [Release][C#] Exclude dummy.git from RAT check
* ARROW-767 - [C++] Adopt FileSystem abstraction
* ARROW-835 - [Format] Add Timedelta type to describe time intervals
## New Feature
* ARROW-1012 - [C++] Create a configurable implementation of RecordBatchReader that reads from Apache Parquet files
* ARROW-1207 - [C++] Implement Map logical type
* ARROW-1261 - [Java] Add container type for Map logical type
* ARROW-1278 - Integration tests for Fixed Size List type
* ARROW-1279 - [Integration][Java] Integration tests for Map type
* ARROW-1280 - [C++] Implement Fixed Size List type
* ARROW-1558 - [C++] Implement boolean selection kernels
* ARROW-1774 - [C++] Add "view" function to create zero-copy views for compatible types, if supported
* ARROW-2467 - [Rust] Generate code using Flatbuffers
* ARROW-2517 - [Java] Add list<decimal> writer
* ARROW-2835 - [C++] ReadAt/WriteAt are inconsistent with moving the files position
* ARROW-2969 - [R] Convert between StructArray and "nested" data.frame column containing data frame in each cell
* ARROW-3087 - [C++] Add kernels for comparison operations to scalars
* ARROW-3191 - [Java] Add support for ArrowBuf to point to arbitrary memory.
* ARROW-3419 - [C++] Run include-what-you-use checks as nightly build
* ARROW-3732 - [R] Add functions to write RecordBatch or Schema to Message value, then read back
* ARROW-3791 - [C++] Add type inference for boolean values in CSV files
* ARROW-3810 - [R] type= argument for Array and ChunkedArray
* ARROW-3811 - [R] struct arrays inference
* ARROW-3814 - [R] RecordBatch$from\_arrays()
* ARROW-3815 - [R] refine record batch factory
* ARROW-3848 - [R] allow nbytes to be missing in RandomAccessFile$Read()
* ARROW-3897 - [MATLAB] Add MATLAB support for writing numeric datatypes to a Feather file
* ARROW-4302 - [C++] Add OpenSSL to C++ build toolchain
* ARROW-4701 - [C++] Add JSON chunker benchmarks
* ARROW-4708 - [C++] Add multithreaded JSON reader
* ARROW-4741 - [Java] Add documentation to all classes and enable checkstyle for class javadocs
* ARROW-4805 - [Rust] Write temporal arrays to CSV
* ARROW-4806 - [Rust] Support casting temporal arrays in cast kernels
* ARROW-4827 - [C++] Implement benchmark comparison between two git revisions
* ARROW-5071 - [Benchmarking] Performs a benchmark run with archery
* ARROW-5115 - [JS] Implement the Vector Builders
* ARROW-5126 - [Rust] [Parquet] Convert parquet column desc to arrow data type
* ARROW-5150 - [Ruby] Add Arrow::Table#raw\_records
* ARROW-5155 - [GLib][Ruby] Add support for building union arrays from data type
* ARROW-5168 - [GLib] Add garrow\_array\_take()
* ARROW-5171 - [C++] Use LESS instead of LOWER in compare enum option.
* ARROW-5187 - [Rust] Ability to flatten StructArray into a RecordBatch
* ARROW-5188 - [Rust] Add temporal builders for StructArray
* ARROW-5189 - [Rust] [Parquet] Format individual fields within a parquet row
* ARROW-5203 - [GLib] Add support for Compare filter
* ARROW-5268 - [GLib] Add GArrowJSONReader
* ARROW-5290 - [Java] Provide a flag to enable/disable null-checking in vectors' get methods
* ARROW-5299 - [C++] ListArray comparison is incorrect
* ARROW-5329 - Add support for building MATLAB interface to Feather directly within MATLAB
* ARROW-5342 - [Format] Formalize extension type metadata in IPC protocol
* ARROW-5372 - [GLib] Add support for null/boolean values CSV read option
* ARROW-5384 - [Go] add FixedSizeList array
* ARROW-5396 - [JS] Ensure reader and writer support files and streams with no RecordBatches
* ARROW-5404 - [C++] nonstd::string\_view conflicts with std::string\_view in c++17
* ARROW-5432 - [Python] Add 'read\_at' method to pyarrow.NativeFile
* ARROW-5463 - [Rust] Implement AsRef for Buffer
* ARROW-5486 - [GLib] Add binding of gandiva::FunctionRegistry and related things
* ARROW-5512 - [C++] Draft initial public APIs for Datasets project
* ARROW-5534 - [GLib] Add garrow\_table\_concatenate()
* ARROW-5535 - [GLib] Add garrow\_table\_slice()
* ARROW-5537 - [JS] Support delta dictionaries in RecordBatchWriter and DictionaryBuilder
* ARROW-5581 - [Java] Provide interfaces and initial implementations for vector sorting
* ARROW-5597 - [Packaging][deb] Add Flight packages
* ARROW-5755 - [Rust] [Parquet] Add derived clone for Type
* ARROW-653 - [Python / C++] Add debugging function to print an array's buffer contents in hexadecimal
* ARROW-840 - [Python] Provide Python API for creating user-defined data types that can survive Arrow IPC
* ARROW-973 - [Website] Add FAQ page about project
## Sub-task
* ARROW-2102 - [C++] Implement take kernel functions - primitive value type
* ARROW-2103 - [C++] Implement take kernel functions - string/binary value type
* ARROW-2104 - [C++] Implement take kernel functions - nested array value type
* ARROW-2105 - [C++] Implement take kernel functions - properly handle special indices
* ARROW-4121 - [C++] Refactor memory allocation from InvertKernel
* ARROW-4971 - [Go] DataType equality
* ARROW-4972 - [Go] Array equality
* ARROW-4973 - [Go] Slice Array equality
* ARROW-4974 - [Go] Array approx equality
* ARROW-5108 - [Go] implement reading primitive arrays from Arrow file
* ARROW-5109 - [Go] implement reading binary/string arrays from Arrow file
* ARROW-5110 - [Go] implement reading struct arrays from Arrow file
* ARROW-5111 - [Go] implement reading list arrays from Arrow file
* ARROW-5112 - [Go] implement writing arrays to Arrow file
* ARROW-5127 - [Rust] [Parquet] Add page iterator
* ARROW-5172 - [Go] implement reading fixed-size binary arrays from Arrow file
* ARROW-5250 - [Java] remove javadoc suppression on methods.
* ARROW-5266 - [Go] implement read/write IPC for Float16
* ARROW-5392 - [C++][CI][MinGW] Disable static library build on AppVeyor
* ARROW-5467 - [Go] implement read/write IPC for Time32/Time64 arrays
* ARROW-5468 - [Go] implement read/write IPC for Timestamp arrays
* ARROW-5469 - [Go] implement read/write IPC for Date32/Date64 arrays
* ARROW-5591 - [Go] implement read/write IPC for Duration & Intervals
* ARROW-5621 - [Go] implement read/write IPC for Decimal128 arrays
* ARROW-5672 - [Java] Refactor redundant method modifier
* ARROW-5780 - [C++] Add benchmark for Decimal128 operations
## Task
* ARROW-2412 - [Integration] Add nested dictionary integration test
* ARROW-4086 - [Java] Add apis to debug alloc failures
* ARROW-4702 - [C++] Upgrade dependency versions
* ARROW-4719 - [C#] Implement ChunkedArray, Column and Table in C#
* ARROW-4904 - [C++] Move implementations in arrow/ipc/test-common.h into libarrow\_testing
* ARROW-4913 - [Java][Memory] Limit number of ledgers and arrowbufs
* ARROW-4956 - [C#] Allow ArrowBuffers to wrap external Memory in C#
* ARROW-4959 - [Gandiva][Crossbow] Builds broken
* ARROW-5056 - [Packaging] Adjust conda recipes to use ORC conda-forge package on unix systems
* ARROW-5164 - [Gandiva] [C++] Introduce 32bit hash functions
* ARROW-5226 - [Gandiva] support compare operators for decimal
* ARROW-5275 - [C++] Write generic filesystem tests
* ARROW-5313 - [Format] Comments on Field table are a bit confusing
* ARROW-5321 - [Gandiva][C++] add isnull and isnotnull for utf8 and binary types
* ARROW-5346 - [C++] Revert changes to qualify duration in vendored date code
* ARROW-5434 - [Java] Introduce wrappers for backward compatibility for ArrowBuf changes in ARROW-3191
* ARROW-5443 - [Gandiva][Crossbow] Turn parquet encryption off
* ARROW-5449 - [C++] Local filesystem implementation: investigate Windows UNC paths
* ARROW-5451 - [C++][Gandiva] Add round functions for decimals
* ARROW-5476 - [Java][Memory] Fix Netty ArrowBuf Slice
* ARROW-5485 - [Gandiva][Crossbow] OSx builds failing
* ARROW-5490 - [C++] Remove ARROW\_BOOST\_HEADER\_ONLY
* ARROW-5491 - [C++] Remove unecessary semicolons following MACRO definitions
* ARROW-5557 - [C++] Investigate performance of VisitBitsUnrolled on different platforms
* ARROW-5580 - Correct definitions of timestamp functions in Gandiva
* ARROW-5602 - [Java][Gandiva] Add test for decimal round functions
* ARROW-5637 - [Gandiva] [Java]Complete IN Expression
* ARROW-5650 - [Python] Update manylinux dependency versions
* ARROW-5661 - Support hash functions for decimal in Gandiva
* ARROW-5696 - [Gandiva] [C++] Introduce castVarcharVarchar
* ARROW-5701 - [C++][Gandiva] Build expressions only for the required selection vector types
* ARROW-5704 - [C++] Stop using ARROW\_TEMPLATE\_EXPORT for SparseTensorImpl class
## Test
* ARROW-4523 - [JS] Add row proxy generation benchmark
* ARROW-4725 - [C++] Dictionary tests disabled under MinGW builds
* ARROW-5194 - [C++][Plasma] TEST(PlasmaSerialization, GetReply) is failing
* ARROW-5371 - [Release] Add tests for dev/release/00-prepare.sh
* ARROW-5397 - Test Flight TLS support
* ARROW-5479 - [Rust] [DataFusion] Use ARROW\_TEST\_DATA instead of relative path for testing
* ARROW-5493 - [Integration/Go] add Go support for IPC integration tests
* ARROW-5623 - [CI][GLib] Failed on macOS
* ARROW-5769 - [Java] org.apache.arrow.flight.TestTls is failed via dev/release/00-prepare.sh
## Wish
* ARROW-5102 - [C++] Reduce header dependencies
* ARROW-5145 - [C++] Release mode lacks convenience input validation
* ARROW-5190 - [R] Discussion: tibble dependency in R package
* ARROW-5401 - [CI] [C++] Print ccache statistics on Travis-CI
# Apache Arrow 0.13.0 (28 March 2019)
## Bug
* ARROW-2392 - [Python] pyarrow RecordBatchStreamWriter allows writing batches with different schemas
* ARROW-295 - Create DOAP File
* ARROW-3086 - [Glib] GISCAN fails due to conda-shipped openblas
* ARROW-3096 - [Python] Update Python source build instructions given Anaconda/conda-forge toolchain migration
* ARROW-3133 - [C++] Logical boolean kernels in kernels/boolean.cc cannot write into preallocated memory
* ARROW-3208 - [C++] Segmentation fault when casting dictionary to numeric with nullptr valid\_bitmap
* ARROW-3564 - [Python] writing version 2.0 parquet format with dictionary encoding enabled
* ARROW-3578 - [Release] Address spurious Apache RAT failures in source release script
* ARROW-3593 - [R] CI builds failing due to GitHub API rate limits
* ARROW-3606 - [Python] flake8 fails on Crossbow
* ARROW-3669 - [Python] Convert big-endian numbers or raise error in pyarrow.array
* ARROW-3843 - [Python] Writing Parquet file from empty table created with Table.from\_pandas(..., preserve\_index=False) fails
* ARROW-3923 - [Java] JDBC-to-Arrow Conversion: Unnecessary Calendar Requirement
* ARROW-4081 - [Go] Sum methods on Mac OS X panic when the array is empty
* ARROW-4104 - [Java] race in AllocationManager during release
* ARROW-4117 - [Python] "asv dev" command fails with latest revision
* ARROW-4181 - [Python] TestConvertStructTypes.test\_from\_numpy\_large failing
* ARROW-4192 - "./dev/run\_docker\_compose.sh" is out of date
* ARROW-4213 - [Flight] C++ and Java implementations are incompatible
* ARROW-4244 - Clarify language around padding/alignment
* ARROW-4250 - [C++][Gandiva] Use approximate comparisons for floating point numbers in gandiva-projector-test
* ARROW-4252 - [C++] Status error context strings missing lines of code
* ARROW-4253 - [GLib] Cannot use non-system Boost specified with $BOOST\_ROOT
* ARROW-4254 - [C++] Gandiva tests fail to compile with Boost in Ubuntu 14.04 apt
* ARROW-4255 - [C++] Schema::GetFieldIndex is not thread-safe
* ARROW-4261 - [C++] CMake paths for IPC, Flight, Thrift, and Plasma don't support using Arrow as a subproject
* ARROW-4264 - [C++] Document why DCHECKs are used in kernels
* ARROW-4267 - [Python/C++][Parquet] Segfault when reading rowgroups with duplicated columns
* ARROW-4274 - [Gandiva] static jni library broken after decimal changes
* ARROW-4275 - [C++] gandiva-decimal\_single\_test extremely slow
* ARROW-4280 - [C++][Documentation] It looks like flex and bison are required for parquet
* ARROW-4282 - [Rust] builder benchmark is broken
* ARROW-4284 - [C#] File / Stream serialization fails due to type mismatch / missing footer
* ARROW-4295 - [Plasma] Incorrect log message when evicting objects
* ARROW-4296 - [Plasma] Starting Plasma store with use\_one\_memory\_mapped\_file enabled crashes due to improper memory alignment
* ARROW-4312 - [C++] Lint doesn't work anymore ("[Errno 24] Too many open files")
* ARROW-4319 - plasma/store.h pulls ins flatbuffer dependency
* ARROW-4322 - [CI] docker nightlies fails after conda-forge compiler migration
* ARROW-4323 - [Packaging] Fix failing OSX clang conda forge builds
* ARROW-4326 - [C++] Development instructions in python/development.rst will not work for many Linux distros with new conda-forge toolchain
* ARROW-4327 - [Python] Add requirements-build.txt file to simplify setting up Python build environment
* ARROW-4328 - Make R build compatible with DARROW\_TENSORFLOW=ON
* ARROW-4329 - Python should include the parquet headers
* ARROW-4342 - [Gandiva][Java] spurious failures in projector cache test
* ARROW-4347 - [Python] Run Python Travis CI unit tests on Linux when Java codebase changed
* ARROW-4349 - [C++] Build all benchmarks on Windows without failing
* ARROW-4351 - [C++] Fail to build with static parquet
* ARROW-4355 - [C++] test-util functions are no longer part of libarrow
* ARROW-4360 - [C++] Query homebrew for Thrift
* ARROW-4364 - [C++] Fix -weverything -wextra compilation errors
* ARROW-4366 - [Docs] Change extension from format/README.md to format/README.rst
* ARROW-4367 - [C++] StringDictionaryBuilder segfaults on Finish with only null entries
* ARROW-4368 - Bintray repository signature verification fails
* ARROW-4370 - [Python] Table to pandas conversion fails for list of bool
* ARROW-4374 - [C++] DictionaryBuilder does not correctly report length and null\_count
* ARROW-4381 - [Docker] docker-compose build lint fails
* ARROW-4385 - [Python] default\_version of a release should not include SNAPSHOT
* ARROW-4389 - [R] Installing clang-tools in CI is failing on trusty
* ARROW-4395 - ts-node throws type error running \`bin/arrow2csv.js\`
* ARROW-4400 - [CI] install of clang tools failing
* ARROW-4403 - [Rust] CI fails due to formatting errors
* ARROW-4404 - [CI] AppVeyor toolchain build does not build anything
* ARROW-4407 - [C++] ExternalProject\_Add does not capture CC/CXX correctly
* ARROW-4410 - [C++] Fix InvertKernel edge cases
* ARROW-4413 - [Python] pyarrow.hdfs.connect() failing
* ARROW-4414 - [C++] Stop using cmake COMMAND\_EXPAND\_LISTS because it breaks package builds for older distros
* ARROW-4417 - [C++] Doc build broken
* ARROW-4420 - [INTEGRATION] Make spark integration test pass and test against spark's master branch
* ARROW-4421 - [Flight][C++] Handle large Flight data messages
* ARROW-4434 - [Python] Cannot create empty StructArray via pa.StructArray.from\_arrays
* ARROW-4440 - [C++] Fix flatbuffers build using msvc
* ARROW-4457 - [Python] Cannot create Decimal128 array using integers
* ARROW-4469 - [Python][C++] CI Failing for Python 2.7 and 3.6 with valgrind
* ARROW-4471 - [C++] Pass AR and RANLIB to all external projects
* ARROW-4474 - [Flight] FlightInfo should use signed integer types for payload size
* ARROW-4496 - [CI] CI failing for python Xcode 7.3
* ARROW-4498 - [Plasma] Plasma fails building with CUDA enabled
* ARROW-4500 - [C++] librt and pthread hacks can cause linking problems
* ARROW-4501 - [C++] Unique returns non-unique strings
* ARROW-4525 - [Rust] [Parquet] Convert ArrowError to ParquetError
* ARROW-4527 - [Packaging] Update linux packaging tasks to align with the LLVM 7 migration
* ARROW-4532 - [Java] varchar value buffer much larger than expected
* ARROW-4533 - [Python] Document how to run hypothesis tests
* ARROW-4535 - [C++] Fix MakeBuilder to preserve ListType's field name
* ARROW-4536 - Add data\_type argument in garrow\_list\_array\_new
* ARROW-4538 - [PYTHON] Remove index column from subschema in write\_to\_dataframe
* ARROW-4549 - [C++] Can't build benchmark code on CUDA enabled build
* ARROW-4550 - [JS] Fix AMD pattern
* ARROW-4559 - [Python] pyarrow can't read/write filenames with special characters
* ARROW-4563 - [Python] pa.decimal128 should validate inputs
* ARROW-4571 - [Format] Tensor.fbs file has multiple root\_type declarations
* ARROW-4576 - [Python] Benchmark failures
* ARROW-4577 - [C++] Interface link libraries declared on arrow\_shared target that are actually non-interface
* ARROW-4581 - [C++] gbenchmark\_ep is a dependency of unit tests when ARROW\_BUILD\_BENCHMARKS=ON
* ARROW-4582 - [C++/Python] Memory corruption on Pandas->Arrow conversion
* ARROW-4584 - [Python] Add built wheel to manylinux1 dockerignore.
* ARROW-4585 - [C++] Dependency of Flight C++ sources on generated protobuf is not respected
* ARROW-4587 - Flight C++ DoPut segfaults
* ARROW-4597 - [C++] Targets for system Google Mock shared library are missing
* ARROW-4601 - [Python] Master build is broken due to missing licence for .dockerignore
* ARROW-4608 - [C++] cmake script assumes that double-conversion installs static libs
* ARROW-4617 - [C++] Support double-conversion<3.1
* ARROW-4624 - [C++] Linker errors when building benchmarks
* ARROW-4629 - [Python] Pandas to arrow conversion slowed down by local imports
* ARROW-4639 - [CI] Crossbow build failing for Gandiva jars
* ARROW-4641 - [C++] Flight builds complain of -Wstrict-aliasing
* ARROW-4642 - [R] Change \`f\` to \`file\` in \`read\_parquet\_file()\`
* ARROW-4654 - [C++] Implicit Flight target dependencies cause compilation failure
* ARROW-4657 - [Release] gbenchmark should not be needed for verification
* ARROW-4658 - [C++] Shared gflags is also a run-time conda requirement
* ARROW-4659 - [CI] ubuntu/debian nightlies fail because of missing gandiva files
* ARROW-4660 - [C++] gflags fails to build due to CMake error
* ARROW-4664 - [C++] DCHECK macro conditions are evaluated in release builds
* ARROW-4669 - [Java] No Bounds checking on ArrowBuf.slice
* ARROW-4672 - [C++] clang-7 matrix entry is build using gcc
* ARROW-4680 - [CI] [Rust] Travis CI builds fail with latest Rust 1.34.0-nightly (2019-02-25)
* ARROW-4684 - [Python] CI failures in test\_cython.py
* ARROW-4687 - [Python] FlightServerBase.run should exit on Ctrl-C
* ARROW-4688 - [C++][Parquet] 16MB limit on (nested) column chunk prevents tuning row\_group\_size
* ARROW-4696 - Verify release script is over optimist with CUDA detection
* ARROW-4699 - [C++] json parser should not rely on null terminated buffers
* ARROW-4710 - [C++][R] New linting script skip files with "cpp" extension
* ARROW-4712 - [C++][CI] Clang7 Valgrind complains when not move shared\_ptr
* ARROW-4721 - [Rust] [DataFusion] Propagate schema in filter
* ARROW-4728 - [JS] Failing test Table#assign with a zero-length Null column round-trips through serialization
* ARROW-4737 - [C#] tests are not running in CI
* ARROW-4744 - [CI][C++] Mingw32 builds failing
* ARROW-4750 - [C++] RapidJSON triggers Wclass-memaccess on GCC 8+
* ARROW-4760 - [C++] protobuf 3.7 defines EXPECT\_OK that clashes with Arrow's macro
* ARROW-4766 - [C++] Casting empty boolean array causes segfault
* ARROW-4767 - [C#] ArrowStreamReader crashes while reading the end of a stream
* ARROW-4774 - [C++][Parquet] Call Table::Validate when writing a table
* ARROW-4775 - [Website] Site navbar cannot be expanded
* ARROW-4783 - [C++][CI] Mingw32 builds sometimes timeout
* ARROW-4796 - [Flight][Python] segfault in simple server implementation
* ARROW-4802 - [Python] Hadoop classpath discovery broken HADOOP\_HOME is a symlink
* ARROW-4807 - [Rust] Fix csv\_writer benchmark
* ARROW-4811 - [C++] An incorrect dependency leads "ninja" to re-evaluate steps unnecessarily on subsequent calls
* ARROW-4820 - [Python] hadoop class path derived not correct
* ARROW-4822 - [C++/Python] pyarrow.Table.equals segmentation fault on None
* ARROW-4828 - [Python] manylinux1 docker-compose context should be python/manylinux1
* ARROW-4850 - [CI] Integration test failures do not fail the Travis CI build
* ARROW-4853 - [Rust] Array slice doesn't work on ListArray and StructArray
* ARROW-4857 - [C++/Python/CI] docker-compose in manylinux1 crossbow jobs too old
* ARROW-4866 - [C++] zstd ExternalProject failing on Windows
* ARROW-4867 - [Python] Table.from\_pandas() column order not respected
* ARROW-4869 - [C++] Use of gmock fails in compute/kernels/util-internal-test.cc
* ARROW-4870 - [Ruby] gemspec has wrong msys2 dependency listed
* ARROW-4871 - [Flight][Java] Handle large Flight messages
* ARROW-4872 - [Python] Keep backward compatibility for ParquetDatasetPiece
* ARROW-4881 - [Python] bundle\_zlib CMake function still uses ARROW\_BUILD\_TOOLCHAIN
* ARROW-4900 - mingw-w64 < 5 does not have \_\_cpuidex
* ARROW-4903 - [C++] Building tests using only static libs not possible
* ARROW-4906 - [Format] Fix document to describe that SparseMatrixIndexCSR assumes indptr is sorted for each row
* ARROW-4918 - [C++] Add cmake-format to pre-commit
* ARROW-4928 - [Python] Hypothesis test failures
* ARROW-4931 - [C++] CMake fails on gRPC ExternalProject
* ARROW-4948 - [JS] Nightly test failing with "Cannot assign to read only property"
* ARROW-4950 - [C++] Thirdparty CMake error get\_target\_property() called with non-existent target LZ4::lz4
* ARROW-4952 - [C++] Equals / ApproxEquals behaviour undefined on FP NaNs
* ARROW-4954 - [Python] test failure with Flight enabled
* ARROW-4958 - [C++] Purely static linking broken
* ARROW-4961 - [C++][Python] Add GTest\_SOURCE=BUNDLED to relevant build docs that use conda-forge toolchain
* ARROW-4962 - [C++] Warning level to CHECKIN can't compile on modern GCC
* ARROW-4976 - [JS] RecordBatchReader should reset its Node/DOM streams
* ARROW-4984 - [Flight][C++] Flight server segfaults when port is in use
* ARROW-4986 - [CI] Travis fails to install llvm@7
* ARROW-4989 - [C++] Builds fails to find Ubuntu-packaged re2 library
* ARROW-4991 - [CI] Bump travis node version to 11.12
* ARROW-4997 - [C#] ArrowStreamReader doesn't consume whole stream and doesn't implement sync read
* ARROW-5009 - [C++] Cleanup using to std::\* in files
* ARROW-5010 - [Release] Fix release script with llvm-7
* ARROW-5012 - [C++] "testing" headers not installed
* ARROW-5023 - [Release] Default value syntax in shell is wrong
* ARROW-5024 - [Release] crossbow.py --arrow-version causes missing variable error
* ARROW-5025 - [Python][Packaging] wheel for Windows are broken
* ARROW-5026 - [Python][Packaging] conda package on non Windows is broken
* ARROW-5029 - [C++] Compilation warnings in release mode
* ARROW-5031 - [Dev] Release verification script does not run CUDA tests in Python
* ARROW-5042 - [Release] Wrong ARROW\_DEPENDENCY\_SOURCE in verification script
* ARROW-5043 - [Release][Ruby] red-arrow dependency can't be resolve in verification script
* ARROW-5044 - [Release][Rust] Format error in verification script
* ARROW-5046 - [Release][C++] Plasma test is fragile in verification script
* ARROW-5047 - [Release] Always set up parquet-testing in verification script
* ARROW-5048 - [Release][Rust] arrow-testing is missing in verification script
## Improvement
* ARROW-1425 - [Python] Document semantic differences between Spark timestamps and Arrow timestamps
* ARROW-1639 - [Python] More efficient serialization for RangeIndex in serialize\_pandas
* ARROW-1807 - [JAVA] Reduce Heap Usage (Phase 3): consolidate buffers
* ARROW-1896 - [C++] Do not allocate memory for primitive outputs in CastKernel::Call implementation
* ARROW-2015 - [Java] Use Java Time and Date APIs instead of JodaTime
* ARROW-2022 - [Format] Add custom metadata field specific to a RecordBatch message
* ARROW-2112 - [C++] Enable cpplint to be run on Windows
* ARROW-2627 - [Python] Add option (or some equivalent) to toggle memory mapping functionality when using parquet.ParquetFile or other read entry points
* ARROW-3149 - [C++] Use gRPC (when it exists) from conda-forge for CI builds
* ARROW-3239 - [C++] Improve random data generation functions
* ARROW-3292 - [C++] Test Flight RPC in Travis CI
* ARROW-3297 - [Python] Python bindings for Flight C++ client
* ARROW-331 - [Python] Timeline for dropping Python 2.7 support
* ARROW-3361 - [R] Run cpp/build-support/cpplint.py on C++ source files
* ARROW-3364 - [Doc] Document docker compose setup
* ARROW-3367 - [INTEGRATION] Port Spark integration test to the docker-compose setup
* ARROW-3422 - [C++] Add "toolchain" target to ensure that all required toolchain libraries are built
* ARROW-3532 - [Python] Schema, StructType, StructArray field retrieval by name should raise warning or exception for multiple matches
* ARROW-3550 - [C++] Use kUnknownNullCount in NumericArray constructor
* ARROW-3554 - [C++] Reverse traits for C++
* ARROW-3619 - [R] Expose global thread pool optins
* ARROW-3653 - [Python/C++] Support data copying between different GPU devices
* ARROW-3735 - [Python] Proper error handling in \_ensure\_type
* ARROW-3769 - [C++] Support reading non-dictionary encoded binary Parquet columns directly as DictionaryArray
* ARROW-3770 - [C++] Validate or add option to validate arrow::Table schema in parquet::arrow::FileWriter::WriteTable
* ARROW-3824 - [R] Document developer workflow for building project, running unit tests in r/README.md
* ARROW-3838 - [Rust] Implement CSV Writer
* ARROW-3846 - [Gandiva] Build on Windows
* ARROW-3882 - [Rust] PrimitiveArray<T> should support cast operations
* ARROW-3903 - [Python] Random array generator for Arrow conversion and Parquet testing
* ARROW-3926 - [Python] Add Gandiva bindings to Python wheels
* ARROW-3951 - [Go] implement a CSV writer
* ARROW-3954 - [Rust] Add Slice to Array and ArrayData
* ARROW-3965 - [Java] JDBC-to-Arrow Conversion: Configuration Object
* ARROW-3966 - [Java] JDBC-to-Arrow Conversion: JDBC Metadata in Schema Fields
* ARROW-3972 - [C++] Update to LLVM and Clang bits to 7.0
* ARROW-3985 - [C++] Pass -C option when compiling with ccache to avoid some warnings
* ARROW-4012 - [Documentation][C++] Document how to install Apache Arrow on MSYS2
* ARROW-4014 - [C++] Fix "LIBCMT" warnings on MSVC
* ARROW-4024 - [Python] Cython compilation error on cython==0.27.3
* ARROW-4031 - [C++] Refactor ArrayBuilder bitmap logic into TypedBufferBuilder<bool>
* ARROW-4056 - [C++] Upgrade to boost-cpp 1.69.0 again
* ARROW-4094 - [Python] Store RangeIndex in Parquet files as metadata rather than a physical data column
* ARROW-4110 - [C++] Do not generate distinct cast kernels when input and output type are the same
* ARROW-4123 - [C++] Improve linting workflow and documentation for Windows-based developers
* ARROW-4124 - [C++] Abstract aggregation kernel API
* ARROW-4142 - [Java] JDBC-to-Arrow: JDBC Arrays
* ARROW-4165 - [C++] Port cpp/apidoc/Windows.md and other files to Sphinx / rst
* ARROW-4180 - [Java] Reduce verbose logging of ArrowBuf creation events?
* ARROW-4196 - [Rust] Add explicit SIMD vectorization for arithmetic ops in "array\_ops"
* ARROW-4198 - [Gandiva] Add support to cast timestamp
* ARROW-4212 - [Python] [CUDA] Creating a CUDA buffer from Numba device array should be easier
* ARROW-4230 - [C++] Enable building flight against system gRPC
* ARROW-4234 - [C++] Add memory bandwidth benchmarks to arrow/util/machine-benchmark.cc
* ARROW-4235 - [GLib] Use "column\_builder" in GArrowRecordBatchBuilder
* ARROW-4236 - [JAVA] Distinct plasma client create exceptions
* ARROW-4245 - [Rust] Add Rustdoc header to each source file
* ARROW-4247 - [Packaging] Update verify script for 0.12.0
* ARROW-4251 - [C++] Add option to use vendored Boost in verify-release-candidate.sh
* ARROW-4263 - [Rust] Donate DataFusion
* ARROW-4268 - [C++] Add C primitive to Arrow:Type compile time in TypeTraits
* ARROW-4277 - [C++] Add gmock to toolchain
* ARROW-4285 - [Python] Use proper builder interface for serialization
* ARROW-4297 - [C++] Fix build for 32-bit MSYS2
* ARROW-4299 - [Ruby] Depend on the same version as Red Arrow
* ARROW-4305 - [Rust] Fix parquet version number in README
* ARROW-4307 - [C++] FIx doxygen warnings, include doxygen warning checks in CI linting
* ARROW-4310 - [Website] Update install document for 0.12.0
* ARROW-4315 - [Website] Home page of https://arrow.apache.org/ does not mention Go or Rust
* ARROW-4330 - [C++] Use FindThreads.cmake to handle -pthread compiler/link options
* ARROW-4332 - [Website] Instructions and scripts for publishing web site appear to be incorrect
* ARROW-4335 - [C++] Better document sparse tensor support
* ARROW-4336 - [C++] Default BUILD\_WARNING\_LEVEL to CHECKIN
* ARROW-4339 - [C++] rewrite cpp/README shorter, with a separate contribution guide
* ARROW-4340 - [C++] Update IWYU version in the \`lint\` dockerfile
* ARROW-4341 - [C++] Use TypedBufferBuilder<bool> in BooleanBuilder
* ARROW-4344 - [Java] Further cleanup maven output
* ARROW-4345 - [C++] Add Apache 2.0 license file to the Parquet-testing repository
* ARROW-4346 - [C++] Fix compiler warnings with gcc 8.2.0
* ARROW-4353 - [CI] Add jobs for 32-bit and 64-bit MinGW
* ARROW-4361 - [Website] Update commiters list
* ARROW-4362 - [Java] Test OpenJDK 11 in CI
* ARROW-4363 - [C++] Add CMake format checks
* ARROW-4372 - [C++] Embed precompiled bitcode in the gandiva library
* ARROW-4373 - [Packaging] Travis fails to deploy conda packages on OSX
* ARROW-4375 - [CI] Sphinx dependencies were removed from docs conda environment
* ARROW-4376 - [Rust] Implement from\_buf\_reader for csv::Reader
* ARROW-4377 - [Rust] Implement std::fmt::Debug for all PrimitiveArrays
* ARROW-4379 - Register pyarrow serializers for collections.Counter and collections.deque.
* ARROW-4383 - [C++] Use the CMake's standard find features
* ARROW-4388 - [Go] add DimNames() method to tensor Interface?
* ARROW-4393 - [Rust] coding style: apply 90 characters per line limit
* ARROW-4396 - Update Typedoc to support TypeScript 3.2
* ARROW-4399 - [C++] Remove usage of "extern template class" from NumericArray<T>
* ARROW-4401 - [Python] Alpine dockerfile fails to build because pandas requires numpy as build dependency
* ARROW-4406 - Ignore "\*\_$folder$" files on S3
* ARROW-4422 - [Plasma] Enforce memory limit in plasma, rather than relying on dlmalloc\_set\_footprint\_limit
* ARROW-4423 - [C++] Update version of vendored gtest to 1.8.1
* ARROW-4424 - [Python] Manylinux CI builds failing
* ARROW-4430 - [C++] add unit test for currently unused append method
* ARROW-4431 - [C++] Build gRPC as ExternalProject without allowing it to build its vendored dependencies
* ARROW-4436 - [Documentation] Clarify instructions for building documentation
* ARROW-4442 - [JS] Overly broad type annotation for Chunked typeId leading to type mismatches in generated typing
* ARROW-4444 - [Testing] Add DataFusion test files to arrow-testing repo
* ARROW-4445 - [C++][Gandiva] Run Gandiva-LLVM tests in Appveyor
* ARROW-4446 - [Python] Run Gandiva tests on Windows and Appveyor
* ARROW-4448 - [JAVA][Flight] Flaky Flight java test
* ARROW-4454 - [C++] fix unused parameter warnings
* ARROW-4455 - [Plasma] g++ 8 reports class-memaccess warnings
* ARROW-4459 - [Testing] Add git submodule for arrow-testing data files
* ARROW-4460 - [Website] Write blog post to announce DataFusion donation
* ARROW-4462 - [C++] Upgrade LZ4 v1.7.5 to v1.8.3 to compile with VS2017
* ARROW-4464 - [Rust] [DataFusion] Add support for LIMIT
* ARROW-4466 - [Rust] [DataFusion] Add support for Parquet data sources
* ARROW-4468 - [Rust] Implement BitAnd/BitOr for &Buffer (with SIMD)
* ARROW-4475 - [Python] Serializing objects that contain themselves
* ARROW-4476 - [Rust] [DataFusion] Post donation clean up tasks
* ARROW-4481 - [Website] Instructions for publishing web site are missing a step
* ARROW-4483 - [Website] Fix broken link (author) in DataFusion blog post
* ARROW-4485 - [CI] Determine maintenance approach to pinned conda-forge binutils package
* ARROW-4486 - [Python][CUDA] pyarrow.cuda.Context.foreign\_buffer should have a \`base=None\` argument
* ARROW-4488 - [Rust] From AsRef<[u8]> for Buffer does not ensure correct padding
* ARROW-4489 - [Rust] PrimitiveArray.value\_slice performs bounds checking when it should not
* ARROW-4490 - [Rust] Add explicit SIMD vectorization for boolean ops in "array\_ops"
* ARROW-4491 - [Python] Remove usage of std::to\_string and std::stoi
* ARROW-4499 - [Python][CI] Upgrade to latest flake8 3.7.5 in travis\_lint.sh
* ARROW-4502 - [C#] Add support for zero-copy reads
* ARROW-4513 - [Rust] Implement BitAnd/BitOr for &Bitmap
* ARROW-4528 - [C++] Update lint docker container to LLVM-7
* ARROW-4529 - [C++] Add test coverage for BitUtils::RoundDown
* ARROW-4531 - [C++] Handling of non-aligned slices in Sum kernel
* ARROW-4537 - [CI] Suppress shell warning on travis-ci
* ARROW-4547 - [Python][Documentation] Update python/development.rst with instructions for CUDA-enabled builds
* ARROW-4558 - [C++][Flight] Avoid undefined behavior with gRPC memory optimizations
* ARROW-4560 - [R] array() needs to take single input, not ...
* ARROW-4562 - [C++][Flight] Create outgoing composite grpc::ByteBuffer instead of allocating contiguous slice and copying IpcPayload into it
* ARROW-4565 - [R] Reading records with all non-null decimals SEGFAULTs
* ARROW-4568 - [C++] Add version macros to headers
* ARROW-4572 - [C++] Remove memory zeroing from PrimitiveAllocatingUnaryKernel
* ARROW-4583 - [Plasma] There are bugs reported by code scan tool
* ARROW-4586 - [Rust] Remove arrow/mod.rs as it is not needed
* ARROW-4590 - [Rust] Add explicit SIMD vectorization for comparison ops in "array\_ops"
* ARROW-4592 - [GLib] Stop configure immediately when GLib isn't available
* ARROW-4593 - [Ruby] Arrow::Array#[out\_of\_range] returns nil
* ARROW-4594 - [Ruby] Arrow::StructArray#[] returns Arrow::Struct instead of Arrow::Array
* ARROW-4595 - [Rust] [DataFusion] Implement DataFrame style API
* ARROW-4598 - [CI] Remove needless LLVM\_DIR for macOS
* ARROW-4602 - [Rust][ [DataFusion] Integrate query optimizer with ExecutionContext
* ARROW-4605 - [Rust] Move filter and limit code from DataFusion into compute module
* ARROW-4609 - [C++] Use google benchmark from toolchain
* ARROW-4610 - [Plasma] Avoid JNI from crashing
* ARROW-4611 - [C++] Rework CMake third-party logic
* ARROW-4612 - [Python] Use cython from PyPI for windows wheels build
* ARROW-4613 - [C++] Alpine build failing as libgtestd.so is not found
* ARROW-4614 - [C++/CI] Activate flight build in ci/docker\_build\_cpp.sh
* ARROW-4615 - [C++] Add checked\_pointer\_cast
* ARROW-4616 - [C++] Log message in BuildUtils as STATUS
* ARROW-4618 - [Docker] Makefile to build dependent docker images
* ARROW-4623 - [R] update Rcpp dependency
* ARROW-4628 - [Rust] [DataFusion] Implement type coercion query optimizer rule
* ARROW-4634 - [Rust] [Parquet] Reorganize test\_common mod to allow more test util codes.
* ARROW-4637 - [Python] Avoid importing Pandas unless necessary
* ARROW-4638 - [R] install instructions using brew
* ARROW-4640 - [Python] Add docker-compose configuration to build and test the project without pandas installed
* ARROW-4643 - [C++] Add compiler diagnostic color when using Ninja
* ARROW-4644 - [C++/Docker] Build Gandiva in the docker containers
* ARROW-4645 - [C++/Packaging] Ship Gandiva with OSX and Windows wheels
* ARROW-4646 - [C++/Packaging] Ship gandiva with the conda-forge packages
* ARROW-4655 - [Packaging] Parallelize binary upload
* ARROW-4667 - [C++] Suppress unused function warnings with MinGW
* ARROW-4670 - [Rust] compute::sum performance issue
* ARROW-4673 - [C++] Implement AssertDatumEquals
* ARROW-4676 - [C++] Add support for debug build with MinGW
* ARROW-4678 - [Rust] Minimize unstable feature usage
* ARROW-4679 - [Rust] [DataFusion] Implement in-memory DataSource
* ARROW-4681 - [Rust] [DataFusion] Implement parallel query execution using threads
* ARROW-4686 - Only accept 'y' or 'n' in merge\_arrow\_pr.py prompts
* ARROW-4689 - [Go] add support for WASM
* ARROW-4690 - [Python] Building TensorFlow compatible wheels for Arrow
* ARROW-4697 - [C++] Add URI parsing facility
* ARROW-4705 - [Rust] CSV reader should show line number and error message when failing to parse a line
* ARROW-4718 - Add ArrowStreamWriter/Reader ctors that leave open the underlying Stream
* ARROW-4727 - [Rust] Implement ability to check if two schemas are the same
* ARROW-4730 - [C++] Add docker-compose entry for testing Fedora build with system packages
* ARROW-4731 - [C++] Add docker-compose entry for testing Ubuntu Xenial build with system packages
* ARROW-4732 - [C++] Add docker-compose entry for testing Debian Testing build with system packages
* ARROW-4733 - [C++] Add CI entry that builds without the conda-forge toolchain but with system packages
* ARROW-4734 - [Go] Add option to write a header for CSV writer
* ARROW-4735 - [Go] Benchmark strconv.Format vs. fmt.Sprintf for CSV writer
* ARROW-4739 - [Rust] [DataFusion] It should be possible to share a logical plan between threads
* ARROW-4745 - [C++][Documentation] Document process for replicating static\_crt builds on windows
* ARROW-4749 - [Rust] RecordBatch::new() should return result instead of panicking
* ARROW-4754 - [CI][Java] Flaky TestAuth Flight test
* ARROW-4769 - [Rust] Improve array limit function where max records > len
* ARROW-4776 - [C++] DictionaryBuilder should support bootstrapping from an existing dict type
* ARROW-4777 - [C++/Python] manylinux1: Update lz4 to 1.8.3
* ARROW-4789 - [C++] Deprecate and and later remove arrow::io::ReadableFileInterface
* ARROW-4791 - Unused dependencies in arrow and datafusion
* ARROW-4794 - [Python] Make pandas an optional test dependency
* ARROW-4797 - [Plasma] Avoid store crash if not enough memory is available
* ARROW-4801 - [GLib] Suppress pkgconfig.generate() warnings
* ARROW-4817 - [Rust] [DataFusion] Small re-org of modules
* ARROW-4826 - [Go] export Flush method for CSV writer
* ARROW-4831 - [C++] CMAKE\_AR is not passed to ZSTD thirdparty dependency
* ARROW-4833 - [Release] Document how to update the brew formula in the release management guide
* ARROW-4834 - [R] Feature flag to disable parquet
* ARROW-4837 - [C++] Support c++filt on a custom path in the run-test.sh script
* ARROW-4839 - [C#] Add NuGet support
* ARROW-4846 - [Java] Update Jackson to 2.9.8
* ARROW-4849 - [C++] Add docker-compose entry for testing Ubuntu Bionic build with system packages
* ARROW-4854 - [Rust] Use Array Slice for limit kernel
* ARROW-4855 - [Packaging] Generate default package version based on cpp tags in crossbow.py
* ARROW-4858 - [Flight][Python] Enable custom FlightDataStream in Python
* ARROW-4865 - [Rust] Support casting lists and primitives to lists
* ARROW-4873 - [C++] Clarify documentation about how to use external ARROW\_PACKAGE\_PREFIX while also using CONDA dependency resolution
* ARROW-4878 - [C++] ARROW\_DEPENDENCY\_SOURCE=CONDA does not work properly with MSVC
* ARROW-4889 - [C++] Add STATUS messages for Protobuf in CMake
* ARROW-4891 - [C++] ZLIB include directories not added
* ARROW-4893 - [C++] conda packages should use $PREFIX inside of conda-build
* ARROW-4894 - [Rust] [DataFusion] Remove all uses of panic! from aggregate.rs
* ARROW-4896 - [Rust] [DataFusion] Remove all uses of panic! from tests
* ARROW-4897 - [Rust] [DataFusion] Improve Rustdoc
* ARROW-4898 - [C++] Old versions of FindProtobuf.cmake use ALL-CAPS for variables
* ARROW-4899 - [Rust] [DataFusion] Remove all uses of panic! from expression.rs
* ARROW-4905 - [C++][Plasma] Remove dlmalloc from client library
* ARROW-4908 - [Rust] [DataFusion] Add support for parquet date/time in int32/64 encoding
* ARROW-4910 - [Rust] [DataFusion] Remove all uses of unimplemented!
* ARROW-4922 - [Packaging] Use system libraris for .deb and .rpm
* ARROW-4926 - [Rust] [DataFusion] Update README for 0.13.0 release
* ARROW-4933 - [R] Autodetect Parquet support using pkg-config
* ARROW-4937 - [R] Clean pkg-config related logic
* ARROW-4939 - [Python] Add wrapper for "sum" kernel
* ARROW-4940 - [Rust] Enhance documentation for datafusion
* ARROW-4944 - [C++] Raise minimal required thrift-cpp to 0.11 in conda environment
* ARROW-4946 - [C++] Support detection of flatbuffers without FlatbuffersConfig.cmake
* ARROW-4947 - [Flight][C++/Python] Remove redundant schema parameter in DoGet
* ARROW-4964 - [Ruby] Add closed check if available on auto close
* ARROW-4969 - [C++] Set RPATH in correct order for test executables on OSX
* ARROW-4977 - [Ruby] Add support for building on Windows
* ARROW-4978 - [Ruby] Fix wrong internal variable name for table data
* ARROW-4979 - [GLib] Add missing lock to garrow::GIOInputStream
* ARROW-4980 - [GLib] Use GInputStream as the parent of GArrowInputStream
* ARROW-4983 - [Plasma] Unmap memory when the client is destroyed
* ARROW-4995 - [R] Make sure winbuilder tests pass for package
* ARROW-4996 - [Plasma] There are many log files in /tmp
* ARROW-5003 - [R] remove dependency on withr
* ARROW-5006 - [R] parquet.cpp does not include enough Rcpp
* ARROW-5011 - [Release] Add support in the source release script for custom hash
* ARROW-5013 - [Rust] [DataFusion] Refactor runtime expression support
* ARROW-5014 - [Java] Fix typos in Flight module
* ARROW-5018 - [Release] Include JavaScript implementation
* ARROW-5032 - [C++] Headers in vendored/datetime directory aren't installed
* ARROW-572 - [C++] Apply visitor pattern in IPC metadata
## New Feature
* ARROW-1572 - [C++] Implement "value counts" kernels for tabulating value frequencies
* ARROW-3107 - [C++] arrow::PrettyPrint for Column instances
* ARROW-3121 - [C++] Mean kernel aggregate
* ARROW-3123 - [C++] Incremental Count, Count Not Null aggregator
* ARROW-3135 - [C++] Add helper functions for validity bitmap propagation in kernel context
* ARROW-3162 - [Python] Enable Flight servers to be implemented in pure Python
* ARROW-3289 - [C++] Implement DoPut command for Flight on client and server side
* ARROW-3311 - [R] Functions for deserializing IPC components from arrow::Buffer or from IO interface
* ARROW-3631 - [C#] Add Appveyor build for C#
* ARROW-3761 - [R] Bindings for CompressedInputStream, CompressedOutputStream
* ARROW-3816 - [R] nrow.RecordBatch method
* ARROW-4262 - [Website] Blog post to give preview into using R and Arrow with Apache Spark
* ARROW-4265 - [C++] Automatic conversion between Table and std::vector<std::tuple<..>>
* ARROW-4287 - [C++] Ensure minimal bison version on OSX for Thrift
* ARROW-4289 - [C++] Forward AR and RANLIB to thirdparty builds
* ARROW-4290 - [C++/Gandiva] Support detecting correct LLVM version in Homebrew
* ARROW-4291 - [Dev] Support selecting features in release scripts
* ARROW-4294 - [Plasma] Add support for evicting objects to external store
* ARROW-4298 - [Java] Building Flight fails with OpenJDK 11
* ARROW-4300 - [C++] Restore apache-arrow Homebrew recipe and define process for maintaining and updating for releases
* ARROW-4313 - Define general benchmark database schema
* ARROW-4318 - [C++] Add Tensor::CountNonZero
* ARROW-4352 - [C++] Add support for system Google Test
* ARROW-4386 - [Rust] Implement Date and Time Arrays
* ARROW-4397 - [C++] dim\_names in Tensor and SparseTensor
* ARROW-4449 - [Rust] Convert File to T: Read + Seek for schema inference
* ARROW-4472 - [Website][Python] Blog post about Python string memory use improvements in 0.12
* ARROW-4506 - [Ruby] Add Arrow::RecordBatch#raw\_records
* ARROW-4632 - [Ruby] Add BigDecimal#to\_arrow
* ARROW-4662 - [Python] Add type\_codes property in UnionType
* ARROW-4671 - [C++] MakeBuilder doesn't support Type::DICTIONARY
* ARROW-4692 - [Format][Documentation] Add more details about "sidecar" to flight proto
* ARROW-47 - [C++] Consider adding a scalar type object model
* ARROW-4707 - [C++] move BitsetStack to bit-util.h
* ARROW-4740 - [Java] Upgrade to JUnit 5
* ARROW-4782 - [C++] Prototype scalar and array expression types for developing deferred operator algebra
* ARROW-4835 - [GLib] Add boolean operations
* ARROW-4859 - [GLib] Add garrow\_numeric\_array\_mean()
* ARROW-4862 - [GLib] Add GArrowCastOptions::allow-invalid-utf8 property
* ARROW-4882 - [GLib] Add "Sum" functions
* ARROW-4887 - [GLib] Add garrow\_array\_count()
* ARROW-4901 - [Go] Run tests in Appveyor
* ARROW-4915 - [GLib] Add support for arrow::NullBuilder
* ARROW-4924 - [Ruby] Add Decimal128#to\_s(scale=nil)
* ARROW-4929 - [GLib] Add garrow\_array\_count\_values()
* ARROW-4955 - [GLib] Add garrow\_file\_is\_closed()
* ARROW-4981 - [Ruby] Add support for CSV data encoding conversion
* ARROW-5041 - [Release][C++] use bundled gtest and gmock in verify-release-candidate.bat
* ARROW-549 - [C++] Add function to concatenate like-typed arrays
* ARROW-585 - [C++] Define public API for user-defined data types
* ARROW-694 - [C++] Build JSON "scanner" for reading record batches from line-delimited JSON files
## Sub-task
* ARROW-3596 - [Packaging] Build gRPC in conda-forge
* ARROW-4061 - [Rust] [Parquet] Implement "spaced" version for non-dictionary encoding/decoding
* ARROW-4461 - [C++] Expose bit-util methods for binary boolean operations that don't allocate
* ARROW-4540 - [Rust] Add basic JSON reader
* ARROW-4543 - [C#] Update Flat Buffers code to latest version
* ARROW-4556 - [Rust] Preserve order of JSON inferred schema
* ARROW-4599 - [C++] Add support for system GFlags
* ARROW-4743 - [Java] Fix documentation in arrow memory module
* ARROW-4772 - Provide new ORC adapter interface that allow user to specify row number
* ARROW-4892 - [Rust] [DataFusion] Move SQL parser and planner into sql package
* ARROW-4895 - [Rust] [DataFusion] Move error.rs to top level package
## Task
* ARROW-2409 - [Rust] Test for build warnings, remove current warnings
* ARROW-3511 - [Gandiva] support input selection vectors for both projector and filter
* ARROW-4071 - [Rust] Add rustfmt as a pre-commit hook
* ARROW-4072 - [Rust] Set default value for PARQUET\_TEST\_DATA
* ARROW-4204 - [Gandiva] implement decimal subtract
* ARROW-4205 - [Gandiva] Implement decimal multiply
* ARROW-4206 - [Gandiva] Implement decimal divide
* ARROW-4271 - [Rust] Move Parquet specific info to Parquet Readme
* ARROW-4273 - [Release] Fix verification script to use cf201901 conda-forge label
* ARROW-4281 - [CI] Use Ubuntu Xenial (16.04) VMs on Travis-CI
* ARROW-4303 - [Gandiva/Python] Build LLVM with RTTI in manylinux1 container
* ARROW-4321 - [CI] Setup conda-forge channel globally in docker containers
* ARROW-4334 - [CI] Setup conda-forge channel globally in travis builds
* ARROW-4358 - [Gandiva][Crossbow] Trusty build broken
* ARROW-4408 - [CPP/Doc] Remove outdated Parquet documentation
* ARROW-4425 - Add link to 'Contributing' page in the top-level Arrow README
* ARROW-4435 - [C#] Add .sln file and minor .csproj fix ups
* ARROW-4518 - [JS] add jsdelivr to package.json
* ARROW-4539 - [Java]List vector child value count not set correctly
* ARROW-4619 - [R]: Fix the autobrew script
* ARROW-4620 - [C#] Add unit tests for "Types" in arrow/csharp
* ARROW-4693 - [CI] Build boost library with multi precision
* ARROW-4751 - [C++] Add pkg-config to conda\_env\_cpp.yml
* ARROW-4756 - [CI] document the procedure to update docker image for manylinux1 builds
* ARROW-4758 - [Flight] Build fails on Mac due to missing Schema\_generated.h
* ARROW-4778 - [C++/Python] manylinux1: Update Thrift to 0.12.0
* ARROW-4786 - [C++/Python] Support better parallelisation in manylinux1 base build
* ARROW-4790 - [Python/Packaging] Update manylinux docker image in crossbow task
* ARROW-4808 - [Java][Vector] Convenience methods for setting decimal vector
* ARROW-4907 - [CI] Add docker container to inspect docker context
* ARROW-4909 - [CI] Use hadolint to lint Dockerfiles
* ARROW-4932 - [GLib] Use G\_DECLARE\_DERIVABLE\_TYPE macro
* ARROW-4951 - [C++] Turn off cpp benchmarks in cpp docker images
* ARROW-4994 - [website] Update Details for ptgoetz
## Test
* ARROW-4320 - [C++] Add tests for non-contiguous tensors
* ARROW-4704 - [CI][GLib] Plasma test is flaky
* ARROW-4724 - [C++] Python not being built nor test under MinGW builds
* ARROW-4768 - [C++][CI] arrow-test-array sometimes gets stuck in MinGW build
* ARROW-4793 - [Ruby] Suppress unused variable warning
* ARROW-4813 - [Ruby] Add tests for #== and #!=
* ARROW-4942 - [Ruby] Remove needless omits
* ARROW-4982 - [GLib][CI] Run tests on AppVeyor
## Wish
* ARROW-3981 - [C++] Rename json.h
# Apache Arrow 0.12.0 (16 January 2019)
## Bug
* ARROW-1847 - [Doc] Document the difference between RecordBatch and Table in an FAQ fashion
* ARROW-1994 - [Python] Test against Pandas master
* ARROW-2026 - [Python] Cast all timestamp resolutions to INT96 use\_deprecated\_int96\_timestamps=True
* ARROW-2038 - [Python] Follow-up bug fixes for s3fs Parquet support
* ARROW-2113 - [Python] Incomplete CLASSPATH with "hadoop" contained in it can fool the classpath setting HDFS logic
* ARROW-2591 - [Python] Segmentation fault when writing empty ListType column to Parquet
* ARROW-2592 - [Python] Error reading old Parquet file due to metadata backwards compatibility issue
* ARROW-2708 - [C++] Internal GetValues function in arrow::compute should check for nullptr
* ARROW-2970 - [Python] NumPyConverter::Visit for Binary/String/FixedSizeBinary can overflow
* ARROW-3058 - [Python] Feather reads fail with unintuitive error when conversion from pandas yields ChunkedArray
* ARROW-3186 - [GLib] mesonbuild failures in Travis CI
* ARROW-3202 - [C++] Build does not succeed on Alpine Linux
* ARROW-3225 - [C++/Python] Pandas object conversion of ListType<DateType> and ListType<TimeType>
* ARROW-3324 - [Parquet] Free more internal resources when writing multiple row groups
* ARROW-3343 - [Java] Java tests fail non-deterministically with memory leak from Flight tests
* ARROW-3405 - [Python] Document CSV reader
* ARROW-3428 - [Python] from\_pandas gives incorrect results when converting floating point to bool
* ARROW-3436 - [C++] Boost version required by Gandiva is too new for Ubuntu 14.04
* ARROW-3437 - [Gandiva][C++] Configure static linking of libgcc, libstdc++ with LDFLAGS
* ARROW-3438 - [Packaging] Escaped bulletpoints in changelog
* ARROW-3445 - [GLib] Parquet GLib doesn't link Arrow GLib
* ARROW-3449 - [C++] Support CMake 3.2 for "out of the box" builds
* ARROW-3466 - [Python] Crash when importing tensorflow and pyarrow
* ARROW-3467 - Building against external double conversion is broken
* ARROW-3470 - [C++] Row-wise conversion tutorial has fallen out of date
* ARROW-3477 - [C++] Testsuite fails on 32 bit arch
* ARROW-3480 - [Website] Install document for Ubuntu is broken
* ARROW-3485 - [C++] Examples fail with Protobuf error
* ARROW-3494 - [C++] re2 conda-forge package not working in toolchain
* ARROW-3516 - [C++] Use unsigned type for difference of pointers in parallel\_memcpy
* ARROW-3517 - [C++] MinGW 32bit build causes g++ segv
* ARROW-3524 - [C++] Fix compiler warnings from ARROW-3409 on clang-6
* ARROW-3527 - [R] Unused variables in R-package C++ code
* ARROW-3528 - [R] Typo in R documentation
* ARROW-3535 - [Python] pip install tensorflow install too new numpy in manylinux1 build
* ARROW-3541 - [Rust] Update BufferBuilder to allow for new bit-packed BooleanArray
* ARROW-3544 - [Gandiva] Populate function registry in multiple compilation units to mitigate long compile times in release mode
* ARROW-3549 - [Rust] Replace i64 with usize for some bit utility functions
* ARROW-3573 - [Rust] with\_bitset does not set valid bits correctly
* ARROW-3580 - [Gandiva][C++] Build error with g++ 8.2.0
* ARROW-3586 - [Python] Segmentation fault when converting empty table to pandas with categoricals
* ARROW-3598 - [Plasma] plasma\_store\_server fails linking with GPU enabled
* ARROW-3613 - [Go] Resize does not correctly update the length
* ARROW-3614 - [R] Handle Type::TIMESTAMP from Arrow to R
* ARROW-3658 - [Rust] validation of offsets buffer is incorrect for \`List<T>\`
* ARROW-3670 - [C++] Use FindBacktrace to find execinfo.h support
* ARROW-3687 - [Rust] Anything measuring array slots should be \`usize\`
* ARROW-3698 - [C++] Segmentation fault when using a large table in Gandiva
* ARROW-3700 - [C++] CSV parser should allow ignoring empty lines
* ARROW-3703 - [Python] DataFrame.to\_parquet crashes if datetime column has time zones
* ARROW-3707 - [C++] test failure with zstd 1.3.7
* ARROW-3711 - [C++] Don't pass CXX\_FLAGS to C\_FLAGS
* ARROW-3712 - [CI] License check regression (RAT failure)
* ARROW-3715 - [C++] gflags\_ep fails to build with CMake 3.13
* ARROW-3716 - [R] Missing cases for ChunkedArray conversion
* ARROW-3728 - [Python] Merging Parquet Files - Pandas Meta in Schema Mismatch
* ARROW-3734 - [C++] Linking static zstd library fails on Arch x86-64
* ARROW-3740 - [C++] Calling ArrayBuilder::Resize with length smaller than current appended length results in invalid state
* ARROW-3742 - Fix pyarrow.types & gandiva cython bindings
* ARROW-3745 - [C++] CMake passes static libraries multiple times to linker
* ARROW-3754 - [Packaging] Zstd configure error on linux package builds
* ARROW-3756 - [CI/Docker/Java] Java tests are failing in docker-compose setup
* ARROW-3762 - [C++] Parquet arrow::Table reads error when overflowing capacity of BinaryArray
* ARROW-3765 - [Gandiva] Segfault when the validity bitmap has not been allocated
* ARROW-3766 - [Python] pa.Table.from\_pandas doesn't use schema ordering
* ARROW-3768 - [Python] set classpath to hdfs not hadoop executable
* ARROW-3790 - [C++] Signed to unsigned integer cast yields incorrect results when type sizes are the same
* ARROW-3792 - [Python] Segmentation fault when writing empty RecordBatches to Parquet
* ARROW-3793 - [C++] TestScalarAppendUnsafe is not testing unsafe appends
* ARROW-3797 - [Rust] BinaryArray::value\_offset incorrect in offset case
* ARROW-3805 - [Gandiva] handle null validity bitmap in if-else expressions
* ARROW-3831 - [C++] arrow::util::Codec::Decompress() doesn't return decompressed data size
* ARROW-3835 - [C++] arrow::io::CompressedOutputStream::raw() impementation is missing
* ARROW-3837 - [C++] gflags link errors on Windows
* ARROW-3866 - [Python] Column metadata is not transferred to tables in pyarrow
* ARROW-3874 - [Gandiva] Cannot build: LLVM not detected correctly
* ARROW-3879 - [C++] cuda-test failure
* ARROW-3888 - [C++] Compilation warnings with gcc 7.3.0
* ARROW-3889 - [Python] creating schema with invalid paramaters causes segmanetation fault
* ARROW-3890 - [Python] Creating Array with explicit string type fails on Python 2.7
* ARROW-3894 - [Python] Error reading IPC file with no record batches
* ARROW-3898 - parquet-arrow example has compilation errors
* ARROW-3920 - Plasma reference counting not properly done in TensorFlow custom operator.
* ARROW-3931 - Make possible to build regardless of LANG
* ARROW-3936 - Add \_O\_NOINHERIT to the file open flags on Windows
* ARROW-3937 - [Rust] Rust nightly build is failing
* ARROW-3940 - [Python/Documentation] Add required packages to the development instruction
* ARROW-3941 - [R] RecordBatchStreamReader$schema
* ARROW-3942 - [R] Feather api fixes
* ARROW-3953 - Compat with pandas 0.24 rename of MultiIndex labels -> codes
* ARROW-3955 - [GLib] Add (transfer full) to free when no longer needed
* ARROW-3957 - [Python] Better error message when user connects to HDFS cluster with wrong port
* ARROW-3961 - [Python/Documentation] Fix wrong path in the pyarrow README
* ARROW-3969 - [Rust] CI build broken because rustfmt not available on nightly toolchain
* ARROW-3976 - [Ruby] Homebrew donation solicitation on CLI breaking CI builds
* ARROW-3977 - [Gandiva] gandiva cpp tests not running in CI
* ARROW-3979 - [Gandiva] fix all valgrind reported errors
* ARROW-3980 - [C++] Fix CRTP use in json-simple.cc
* ARROW-3989 - [Rust] CSV reader should handle case sensitivity for boolean values
* ARROW-3996 - [C++] Insufficient description on build
* ARROW-4008 - [C++] Integration test executable failure
* ARROW-4011 - [Gandiva] Refer irhelpers.bc in build directory
* ARROW-4019 - [C++] Fix coverity issues
* ARROW-4033 - [C++] thirdparty/download\_dependencies.sh uses tools or options not available in older Linuxes
* ARROW-4034 - [Ruby] Interface for FileOutputStream doesn't respect append=True
* ARROW-4041 - [CI] Python 2.7 run uses Python 3.6
* ARROW-4049 - [C++] Arrow never use glog even though glog is linked.
* ARROW-4052 - [C++] Linker errors with glog and gflags
* ARROW-4053 - [Python/Integration] HDFS Tests failing with I/O operation on closed file
* ARROW-4055 - [Python] Fails to convert pytz.utc with versions 2018.3 and earlier
* ARROW-4058 - [C++] arrow-io-hdfs-test fails when run against HDFS cluster from docker-compose
* ARROW-4065 - [C++] arrowTargets.cmake is broken
* ARROW-4066 - Instructions to create Sphinx documentation
* ARROW-4070 - [C++] ARROW\_BOOST\_VENDORED doesn't work properly with ninja build
* ARROW-4073 - [Python] Parquet test failures on AppVeyor
* ARROW-4074 - [Python] test\_get\_library\_dirs\_win32 fails if libraries installed someplace different from conda or wheel packages
* ARROW-4078 - [CI] Run Travis job where documentation is built when docs/ is changed
* ARROW-4088 - [Python] Table.from\_batches() fails when passed a schema with metadata
* ARROW-4089 - [Plasma] The tutorial is wrong regarding the parameter type of PlasmaClient.Create
* ARROW-4101 - [C++] Binary identity cast not implemented
* ARROW-4106 - [Python] Tests fail to run because hypothesis update broke its API
* ARROW-4109 - [Packaging] Missing glog dependency from arrow-cpp conda recipe
* ARROW-4113 - [R] Version number patch broke build
* ARROW-4114 - [C++][DOCUMENTATION]
* ARROW-4115 - [Gandiva] valgrind complains that boolean output data buffer has uninited data
* ARROW-4118 - [Python] Error with "asv run"
* ARROW-4125 - [Python] ASV benchmarks fail to run if Plasma extension is not built (e.g. on Windows)
* ARROW-4126 - [Go] offset not used when accessing boolean array
* ARROW-4128 - [C++][DOCUMENTATION] Update style guide to reflect some more exceptions
* ARROW-4130 - [Go] offset not used when accessing binary array
* ARROW-4134 - [Packaging] Properly setup timezone in docker tests to prevent ORC adapter's abort
* ARROW-4135 - [Python] Can't reload a pandas dataframe containing a list of datetime.time
* ARROW-4138 - [Python] setuptools\_scm customization does not work for versions above 0.9.0 on Windows
* ARROW-4147 - [JAVA] Reduce heap usage for variable width vectors
* ARROW-4149 - [CI/C++] Parquet test misses ZSTD compression codec in CMake 3.2 nightly builds
* ARROW-4157 - [C++] -Wdocumentation failures with clang 6.0 on Ubuntu 18.04
* ARROW-4171 - [Rust] fix parquet crate release version
* ARROW-4173 - JIRA library name is wrong in error message of dev/merge\_arrow\_pr.py
* ARROW-4178 - [C++] Fix TSan and UBSan errors
* ARROW-4179 - [Python] Tests crashing on all platforms in CI
* ARROW-4185 - [Rust] Appveyor builds are broken
* ARROW-4186 - [C++] BitmapWriters clobber the first byte when length=0
* ARROW-4188 - [Rust] There should be a README in the top level rust directory
* ARROW-4197 - [C++] Emscripten compiler fails building Arrow
* ARROW-4200 - [C++] conda\_env\_\* files cannot be used to create a fresh conda environment on Windows
* ARROW-4209 - [Gandiva] returning IR structs causes issues with windows
* ARROW-4215 - [GLib] Fix typos in documentation
* ARROW-4227 - [GLib] Field in composite data type returns wrong data type
* ARROW-4237 - [Packaging] Fix CMAKE\_INSTALL\_LIBDIR in release verification script
* ARROW-4238 - [Packaging] Fix RC version conflict between crossbow and rake
* ARROW-4246 - [Plasma][Python] PlasmaClient.list doesn't work with CUDA enabled Plasma
* ARROW-4256 - [Release] Update Windows verification script for 0.12 release
* ARROW-4258 - [Python] Safe cast fails from numpy float64 array with nans to integer
* ARROW-4260 - [Python] test\_serialize\_deserialize\_pandas is failing in multiple build entries
## Improvement
* ARROW-1423 - [C++] Create non-owned CudaContext from context handle provided by thirdparty user
* ARROW-1688 - [Java] Fail build on checkstyle warnings
* ARROW-1993 - [Python] Add function for determining implied Arrow schema from pandas.DataFrame
* ARROW-2211 - [C++] Use simpler hash functions for integers
* ARROW-2216 - [CI] CI descriptions and envars are misleading
* ARROW-2475 - [Format] Confusing array length description
* ARROW-2483 - [Rust] use bit-packing for boolean vectors
* ARROW-2504 - [Website] Add ApacheCon NA link
* ARROW-2624 - [Python] Random schema and data generator for Arrow conversion and Parquet testing
* ARROW-2637 - [C++/Python] Build support and instructions for development on Alpine Linux
* ARROW-2670 - [C++/Python] Add Ubuntu 18.04 / gcc7 as a nightly build
* ARROW-2673 - [Python] Add documentation + docstring for ARROW-2661
* ARROW-2684 - [Python] Various documentation improvements
* ARROW-2759 - Export notification socket of Plasma
* ARROW-2803 - [C++] Put hashing function into src/arrow/util
* ARROW-2807 - [Python] Enable memory-mapping to be toggled in get\_reader when reading Parquet files
* ARROW-2808 - [Python] Add unit tests for ProxyMemoryPool, enable new default MemoryPool to be constructed
* ARROW-2919 - [C++] Improve error message when listing empty HDFS file
* ARROW-2968 - [R] Multi-threaded conversion from Arrow table to R data.frame
* ARROW-3038 - [Go] add support for StringArray
* ARROW-3063 - [Go] move list of supported/TODO features to confluence
* ARROW-3070 - [Release] Host binary artifacts for RCs and releases on ASF Bintray account instead of dist/mirror system
* ARROW-3131 - [Go] add test for Go-1.11
* ARROW-3161 - [Packaging] Ensure to run pyarrow unit tests in conda and wheel builds
* ARROW-3169 - [C++] Break array-test.cc and array.cc into multiple compilation units
* ARROW-3199 - [Plasma] Check for EAGAIN in recvmsg and sendmsg
* ARROW-3209 - [C++] Rename libarrow\_gpu to libarrow\_cuda
* ARROW-3230 - [Python] Missing comparisons on ChunkedArray, Table
* ARROW-3233 - [Python] Sphinx documentation for pyarrow.cuda GPU support
* ARROW-3278 - [Python] Retrieve StructType's and StructArray's field by name
* ARROW-3291 - [C++] Convenience API for constructing arrow::io::BufferReader from std::string
* ARROW-3312 - [R] Use same .clang-format file for both R binding C++ code and main C++ codebase
* ARROW-3318 - [C++] Convenience method for reading all batches from an IPC stream or file as arrow::Table
* ARROW-3331 - [C++] Add re2 to ThirdpartyToolchain
* ARROW-3353 - [Packaging] Build python 3.7 wheels
* ARROW-3358 - [Gandiva][C++] Replace usages of gandiva/status.h with arrow/status.h
* ARROW-3362 - [R] Guard against null buffers
* ARROW-3366 - [R] Dockerfile for docker-compose setup
* ARROW-3368 - [Integration/CI/Python] Add dask integration test to docker-compose setup
* ARROW-3402 - [Gandiva][C++] Utilize common bitmap operation implementations in precompiled IR routines
* ARROW-3409 - [C++] Add streaming compression interfaces
* ARROW-3421 - [C++] Add include-what-you-use setup to primary docker-compose.yml
* ARROW-3429 - [Packaging] Add a script to release binaries that use source archive at dist.apache.orgtable bit
* ARROW-3430 - [Packaging] Add workaround to verify 0.11.0
* ARROW-3431 - [GLib] Include Gemfile to archive
* ARROW-3432 - [Packaging] Variables aren't expanded Subversion commit message
* ARROW-3440 - [Gandiva][C++] Remove outdated cpp/src/gandiva/README.md, add build documentation to cpp/README.md
* ARROW-3441 - [Gandiva][C++] Produce fewer test executables
* ARROW-3442 - [C++] Use dynamic linking for unit tests, ensure coverage working properly with clang
* ARROW-3451 - [Python] Allocate CUDA memory from a CUcontext created by numba.cuda
* ARROW-3455 - [Gandiva][C++] Support pkg-config for Gandiva
* ARROW-3456 - [CI] Reuse docker images and optimize docker-compose containers
* ARROW-3460 - [Packaging] Add a script to rebase master on local release branch
* ARROW-3461 - [Packaging] Add a script to upload RC artifacts as the official release
* ARROW-3462 - [Packaging] Update CHANGELOG for 0.11.0
* ARROW-3463 - [Website] Update for 0.11.0
* ARROW-3465 - [Documentation] Fix gen\_apidocs' docker image
* ARROW-3473 - [Format] Update Layout.md document to clarify use of 64-bit array lengths
* ARROW-3474 - [GLib] Extend gparquet API with get\_schema and read\_column
* ARROW-3479 - [R] Support to write record\_batch as stream
* ARROW-3482 - [C++] Build with JEMALLOC by default
* ARROW-3488 - [Packaging] Separate crossbow task definition files for packaging and tests
* ARROW-3492 - [C++] Build jemalloc in parallel
* ARROW-3493 - [Java] Document BOUNDS\_CHECKING\_ENABLED
* ARROW-3506 - [Packaging] Nightly tests for docker-compose images
* ARROW-3518 - [C++] Detect HOMEBREW\_PREFIX automatically
* ARROW-3521 - [GLib] Run Python using find\_program in meson.build
* ARROW-3530 - [Java/Python] Add conversion for pyarrow.Schema from org.apache…pojo.Schema
* ARROW-3533 - [Python/Documentation] Use sphinx\_rtd\_theme instead of Bootstrap
* ARROW-3539 - [CI/Packaging] Update scripts to build against vendored jemalloc
* ARROW-3542 - [C++] Use unsafe appends when building array from CSV
* ARROW-3545 - [C++/Python] Normalize child/field terminology with StructType
* ARROW-3547 - [R] Protect against Null crash when reading from RecordBatch
* ARROW-3548 - Speed up storing small objects in the object store.
* ARROW-3551 - Change MapD to OmniSci on Powered By page
* ARROW-3556 - [CI] Disable optimizations on Windows
* ARROW-3557 - [Python] Set language\_level in Cython sources
* ARROW-3558 - [Plasma] Remove fatal error when plasma client calls get on an unsealed object that it created.
* ARROW-3559 - Statically link libraries for plasma\_store\_server executable.
* ARROW-3562 - [R] Disallow creation of objects with null shared\_ptr<T>
* ARROW-3563 - [C++] Declare public link dependencies so arrow\_static, plasma\_static automatically pull in transitive dependencies
* ARROW-3566 - Clarify that the type of dictionary encoded field should be the encoded(index) type
* ARROW-3574 - Fix remaining bug with plasma static versus shared libraries.
* ARROW-3576 - [Python] Expose compressed file readers as NativeFile
* ARROW-3577 - [Go] add support for ChunkedArray
* ARROW-3581 - [Gandiva][C++] ARROW\_PROTOBUF\_USE\_SHARED isn't used
* ARROW-3582 - [CI] Gandiva C++ build is always triggered
* ARROW-3584 - [Go] add support for Table
* ARROW-3587 - [Python] Efficient serialization for Arrow Objects (array, table, tensor, etc)
* ARROW-3589 - [Gandiva] Make it possible to compile gandiva without JNI
* ARROW-3591 - [R] Support to collect decimal type
* ARROW-3600 - [Packaging] Support Ubuntu 18.10
* ARROW-3601 - [Rust] Release 0.11.0
* ARROW-3602 - [Gandiva] [Python] Add preliminary Cython bindings for Gandiva
* ARROW-3603 - [Gandiva][C++] Can't build with vendored Boost
* ARROW-3605 - Remove AE library from plasma header files.
* ARROW-3607 - [Java] delete() method via JNI for plasma
* ARROW-3611 - Give error more quickly when pyarrow serialization context is used incorrectly.
* ARROW-3612 - [Go] implement RecordBatch and RecordBatchReader
* ARROW-3615 - [R] Support for NaN
* ARROW-3618 - [Packaging/Documentation] Add \`-c conda-forge\` option to avoid PackagesNotFoundError
* ARROW-3620 - [Python] Document multithreading options in Sphinx and add to api.rst
* ARROW-3621 - [Go] implement TableBatchReader
* ARROW-3622 - [Go] implement Schema.Equal
* ARROW-3623 - [Go] implement Field.Equal
* ARROW-3624 - [Python/C++] Support for zero-sized device buffers
* ARROW-3626 - [Go] add a CSV TableReader
* ARROW-3629 - [Python] Add write\_to\_dataset to Python Sphinx API listing
* ARROW-3632 - [Packaging] Update deb names in dev/tasks/tasks.yml in dev/release/00-prepare.sh
* ARROW-3633 - [Packaging] Update deb names in dev/tasks/tasks.yml for 0.12.0
* ARROW-3634 - [GLib] cuda.cpp compile error
* ARROW-3636 - [C++/Python] Update arrow/python/pyarrow\_api.h
* ARROW-3638 - [C++][Python] Move reading from Feather as Table feature to C++ from Python
* ARROW-3639 - [Packaging] Run gandiva nightly packaging tasks
* ARROW-3640 - [Go] add support for Tensors
* ARROW-3641 - [C++/Python] remove public keyword from Cython api functions
* ARROW-3642 - [C++] Add arrowConfig.cmake generation
* ARROW-3645 - [Python] Document compression support in Sphinx
* ARROW-3646 - [Python] Add convenience factories to create IO streams
* ARROW-3647 - [R] Crash after unloading bit64 package
* ARROW-3648 - [Plasma] Add API to get metadata and data at the same time
* ARROW-3649 - [Rust] Refactor MutableBuffer's resize
* ARROW-3656 - [C++] Allow whitespace in numeric CSV fields
* ARROW-3657 - [R] Require bit64 package
* ARROW-3659 - [C++] Clang Travis build (matrix entry 2) might not actually be using clang
* ARROW-3661 - [Gandiva][GLib] Improve constant name
* ARROW-3666 - [C++] Improve CSV parser performance
* ARROW-3672 - [Go] implement Time32 array
* ARROW-3673 - [Go] implement Time64 array
* ARROW-3674 - [Go] implement Date32 array
* ARROW-3675 - [Go] implement Date64 array
* ARROW-3677 - [Go] implement FixedSizedBinary array
* ARROW-3681 - [Go] add benchmarks for CSV reader
* ARROW-3682 - [Go] unexport encoding/csv.Reader from CSV reader
* ARROW-3683 - [Go] add functional-option style to CSV reader
* ARROW-3684 - [Go] add chunk size option to CSV reader
* ARROW-3693 - [R] Invalid buffer for empty characters with null data
* ARROW-3694 - [Java] Avoid superfluous string creation when logging level is disabled
* ARROW-3695 - [Gandiva] Use add\_arrow\_lib()
* ARROW-3696 - [C++] Add feather::TableWriter::Write(table)
* ARROW-3697 - [Ruby] Add schema#[]
* ARROW-3704 - [Gandiva] Can't build with g++ 8.2.0
* ARROW-3708 - [Packaging] Nightly CentOS builds are failing
* ARROW-3718 - [Gandiva] Remove spurious gtest include
* ARROW-3719 - [GLib] Support read/write tabl to/from Feather
* ARROW-3720 - [GLib] Use "indices" instead of "indexes"
* ARROW-3721 - [Gandiva] [Python] Support all Gandiva literals
* ARROW-3722 - [C++] Allow specifying column types to CSV reader
* ARROW-3724 - [GLib] Update gitignore
* ARROW-3725 - [GLib] Add field readers to GArrowStructDataType
* ARROW-3727 - [Python] Document use of pyarrow.foreign\_buffer, cuda.foreign\_buffer in Sphinx
* ARROW-3733 - [GLib] Add to\_string() to GArrowTable and GArrowColumn
* ARROW-3736 - [CI/Docker] Ninja test in docker-compose run cpp hangs
* ARROW-3743 - [Ruby] Add support for saving/loading Feather
* ARROW-3744 - [Ruby] Use garrow\_table\_to\_string() in Arrow::Table#to\_s
* ARROW-3746 - [Gandiva] [Python] Make it possible to list all functions registered with Gandiva
* ARROW-3747 - [C++] Flip order of data members in arrow::Decimal128
* ARROW-3748 - [GLib] Add GArrowCSVReader
* ARROW-3749 - [GLib] Typos in documentation and test case name
* ARROW-3751 - [Python] Add more cython bindings for gandiva
* ARROW-3752 - [C++] Remove unused status::ArrowError
* ARROW-3753 - [Gandiva] Remove debug print
* ARROW-3773 - [C++] Remove duplicated AssertArraysEqual code in parquet/arrow/arrow-reader-writer-test.cc
* ARROW-3778 - [C++] Don't put implementations in test-util.h
* ARROW-3781 - [C++] Configure buffer size in arrow::io::BufferedOutputStream
* ARROW-3784 - [R] Array with type fails with x is not a vector
* ARROW-3785 - [C++] Use double-conversion conda package in CI toolchain
* ARROW-3787 - Implement From<ListArray> for BinaryArray
* ARROW-3788 - [Ruby] Add support for CSV parser writtin in C++
* ARROW-3795 - [R] Support for retrieving NAs from INT64 arrays
* ARROW-3796 - [Rust] Add Example for PrimitiveArrayBuilder
* ARROW-3800 - [C++] Vendor a string\_view backport
* ARROW-3803 - [C++/Python] Split C++ and Python unit test Travis CI jobs, run all C++ tests (including Gandiva) together
* ARROW-3819 - [Packaging] Update conda variant files to conform with feedstock after compiler migration
* ARROW-3821 - [Format/Documentation]: Fix typos and grammar issues in Flight.proto comments
* ARROW-3825 - [Python] The Python README.md does not show how to run the unit test suite
* ARROW-3834 - [Doc] Merge Python & C++ and move to top-level
* ARROW-3836 - [C++] Add PREFIX option to ADD\_ARROW\_BENCHMARK
* ARROW-3839 - [Rust] Add ability to infer schema in CSV reader
* ARROW-3841 - [C++] warning: catching polymorphic type by value
* ARROW-3845 - [Gandiva] [GLib] Add GGandivaNode
* ARROW-3847 - [GLib] Remove unnecessary “\”.
* ARROW-3849 - Leverage Armv8 crc32 extension instructions to accelerate the hash computation for Arm64.
* ARROW-3852 - [C++] used uninitialized warning
* ARROW-3853 - [C++] Implement string to timestamp cast
* ARROW-3854 - [GLib] Deprecate garrow\_gio\_{input,output}\_stream\_get\_raw()
* ARROW-3855 - [Rust] Schema/Field/Datatype should implement serde traits
* ARROW-3856 - [Ruby] Support compressed CSV save/load
* ARROW-3858 - [GLib] Use {class\_name}\_get\_instance\_private
* ARROW-3862 - [C++] Improve dependencies download script
* ARROW-3863 - [GLib] Use travis\_retry with brew bundle command
* ARROW-3865 - [Packaging] Add double-conversion dependency to conda forge recipes and the windows wheel build
* ARROW-3868 - [Rust] Build against nightly Rust in CI
* ARROW-3870 - [C++] Add Peek to InputStream API
* ARROW-3871 - [R] Replace usages of C++ GetValuesSafely with new methods on ArrayData
* ARROW-3878 - [Rust] Improve primitive types
* ARROW-3880 - [Rust] PrimitiveArray<T> should support simple math operations
* ARROW-3883 - [Rust] Update Rust README to reflect new functionality
* ARROW-3884 - [Python] Add LLVM6 to manylinux1 base image
* ARROW-3885 - [Rust] Update version to 0.12.0 and update release instructions on wiki
* ARROW-3886 - [C++] Additional test cases for ARROW-3831
* ARROW-3893 - [C++] Improve adaptive int builder performance
* ARROW-3895 - [Rust] CSV reader should return Result<Option<>> not Option<Result<>>
* ARROW-3905 - [Ruby] Add StructDataType#[]
* ARROW-3906 - [C++] Break builder.cc into multiple compilation units
* ARROW-3908 - [Rust] Update rust dockerfile to use nightly toolchain
* ARROW-3910 - [Python] Set date\_as\_object to True in \*.to\_pandas as default after deduplicating logic implemented
* ARROW-3911 - [Python] Deduplicate datetime.date objects in Table.to\_pandas internals
* ARROW-3913 - [Gandiva] [GLib] Add GGandivaLiteralNode
* ARROW-3914 - [C++/Python/Packaging] Docker-compose setup for Alpine linux
* ARROW-3922 - [C++] improve the performance of bitmap operations
* ARROW-3925 - [Python] Include autoconf in Linux/macOS dependencies in conda environment
* ARROW-3928 - [Python] Add option to deduplicate PyBytes / PyString / PyUnicode objects in Table.to\_pandas conversion path
* ARROW-3929 - [Go] improve memory usage of CSV reader to improve runtime performances
* ARROW-3930 - [C++] Random test data generation is slow
* ARROW-3932 - [Python/Documentation] Include Benchmarks.md in Sphinx docs
* ARROW-3934 - [Gandiva] Don't compile precompiled tests if ARROW\_GANDIVA\_BUILD\_TESTS=off
* ARROW-3950 - [Plasma] Don't force loading the TensorFlow op on import
* ARROW-3952 - [Rust] Specify edition="2018" in Cargo.toml
* ARROW-3958 - [Plasma] Reduce number of IPCs
* ARROW-3960 - [Rust] remove extern crate for Rust 2018
* ARROW-3963 - [Packaging/Docker] Nightly test for building sphinx documentations
* ARROW-3964 - [Go] More readable example for csv.Reader
* ARROW-3967 - [Gandiva] [C++] Make gandiva/node.h public
* ARROW-3971 - [Python] Remove APIs deprecated in 0.11 and prior
* ARROW-3974 - [C++] Combine field\_builders\_ and children\_ members in array/builder.h
* ARROW-3982 - [C++] Allow "binary" input in simple JSON format
* ARROW-3984 - [C++] Exit with error if user hits zstd ExternalProject path
* ARROW-3986 - [C++] Write prose documentation
* ARROW-3988 - [C++] Do not build unit tests by default in build system
* ARROW-3994 - [C++] Remove ARROW\_GANDIVA\_BUILD\_TESTS option
* ARROW-3995 - [CI] Use understandable names in Travis Matrix
* ARROW-3997 - [C++] [Doc] Clarify dictionary encoding integer signedness (and width?)
* ARROW-4002 - [C++][Gandiva] Remove CMake version check
* ARROW-4004 - [GLib] Replace GPU with CUDA
* ARROW-4005 - [Plasma] [GLib] Add gplasma\_client\_disconnect()
* ARROW-4006 - Add CODE\_OF\_CONDUCT.md
* ARROW-4009 - [CI] Run Valgrind and C++ code coverage in different bulds
* ARROW-4015 - [Plasma] remove legacy interfaces for plasma manager
* ARROW-4017 - [C++] Check and update vendored libraries
* ARROW-4026 - [C++] Use separate modular $COMPONENT-test targets for unit tests
* ARROW-4029 - [C++] Define and document naming convention for internal / private header files not to be installed
* ARROW-4030 - [CI] Use travis\_terminate to halt builds when a step fails
* ARROW-4035 - [Ruby] Support msys2 mingw dependencies
* ARROW-4037 - [Packaging] Remove workaround to verify 0.11.0
* ARROW-4038 - [Rust] Add array\_ops methods for boolean AND, OR, NOT
* ARROW-4042 - [Rust] Inconsistent method naming between BinaryArray and PrimitiveArray
* ARROW-4048 - [GLib] Return ChunkedArray instead of Array in gparquet\_arrow\_file\_reader\_read\_column
* ARROW-4051 - [Gandiva] [GLib] Add support for null literal
* ARROW-4054 - [Python] Update gtest, flatbuffers and OpenSSL in manylinux1 base image
* ARROW-4069 - [Python] Add tests for casting from binary to utf8
* ARROW-4080 - [Rust] Improving lengthy build times in Appveyor
* ARROW-4082 - [C++] CMake tweaks: allow RelWithDebInfo, improve FindClangTools
* ARROW-4084 - [C++] Simplify Status and stringstream boilerplate
* ARROW-4085 - [GLib] Use "field" for struct data type
* ARROW-4087 - [C++] Make CSV nulls configurable
* ARROW-4093 - [C++] Deprecated method suggests wrong method
* ARROW-4098 - [Python] Deprecate pyarrow.open\_stream,open\_file in favor of pa.ipc.open\_stream/open\_file
* ARROW-4102 - [C++] FixedSizeBinary identity cast not implemented
* ARROW-4103 - [Documentation] Add README to docs/ root
* ARROW-4105 - Add rust-toolchain to enforce user to use nightly toolchain for building
* ARROW-4107 - [Python] Use ninja in pyarrow manylinux1 build
* ARROW-4116 - [Python] Clarify in development.rst that virtualenv cannot be used with miniconda/Anaconda
* ARROW-4122 - [C++] Initialize some uninitialized class members
* ARROW-4127 - [Documentation] Add Docker build instructions
* ARROW-4129 - [Python] Fix syntax problem in benchmark docs
* ARROW-4152 - [GLib] Remove an example to show Torch integration
* ARROW-4155 - [Rust] Implement array\_ops::sum() for PrimitiveArray<T>
* ARROW-4158 - [Dev] Allow maintainers to use a GitHub API token when merging pull requests
* ARROW-4160 - [Rust] Add README and executable files to parquet
* ARROW-4168 - [GLib] Use property to keep GArrowDataType passed in garrow\_field\_new()
* ARROW-4177 - [C++] Add ThreadPool and TaskGroup microbenchmarks
* ARROW-4191 - [C++] Use same CC and AR for jemalloc as for the main sources
* ARROW-4199 - [GLib] Add garrow\_seekable\_input\_stream\_peek()
* ARROW-4207 - [Gandiva] [GLib] Add support for IfNode
* ARROW-4211 - [GLib] Add GArrowFixedSizeBinaryDataType
* ARROW-4216 - [Python] Add CUDA API docs
* ARROW-4228 - [GLib] Add garrow\_list\_data\_type\_get\_field()
* ARROW-4229 - [Packaging] Set crossbow target explicitly to enable building arbitrary arrow repo
* ARROW-4233 - [Packaging] Create a Dockerfile to build source archive
* ARROW-4240 - [Packaging] Documents for Plasma GLib and Gandiva GLib are missing in source archive
* ARROW-4243 - [Python] Test failure with pandas 0.24.0rc1
* ARROW-4249 - [Plasma] Remove reference to logging.h from plasma/common.h
* ARROW-4257 - [Release] Update release verification script to check binaries on Bintray
* ARROW-4269 - [Python] AttributeError: module 'pandas.core' has no attribute 'arrays'
* ARROW-912 - [Python] Account for multiarch systems in development.rst
## New Feature
* ARROW-1019 - [C++] Implement input stream and output stream with Gzip codec
* ARROW-1492 - [C++] Type casting function kernel suite
* ARROW-1696 - [C++] Add codec benchmarks
* ARROW-2712 - [C#] Initial C# .NET library
* ARROW-3020 - [Python] Addition of option to allow empty Parquet row groups
* ARROW-3108 - [C++] arrow::PrettyPrint for Table instances
* ARROW-3126 - [Python] Make Buffered\* IO classes available to Python, incorporate into input\_stream, output\_stream factory functions
* ARROW-3184 - [C++] Add modular build targets, "all" target, and require explicit target when invoking make or ninja
* ARROW-3303 - [C++] Enable example arrays to be written with a simplified JSON representation
* ARROW-3306 - [R] Objects and support functions different kinds of arrow::Buffer
* ARROW-3307 - [R] Convert chunked arrow::Column to R vector
* ARROW-3310 - [R] Create wrapper classes for various Arrow IO interfaces
* ARROW-3340 - [R] support for dates and time classes
* ARROW-3355 - [R] Support for factors
* ARROW-3380 - [Python] Support reading CSV files and more from a gzipped file
* ARROW-3381 - [C++] Implement InputStream for bz2 files
* ARROW-3387 - [C++] Function to cast binary to string/utf8 with UTF8 validation
* ARROW-3398 - [Rust] Update existing Builder to use MutableBuffer internally
* ARROW-3407 - [C++] Add UTF8 conversion modes in CSV reader conversion options
* ARROW-3439 - [R] R language bindings for Feather format
* ARROW-3450 - [R] Wrap MemoryMappedFile class
* ARROW-3490 - [R] streaming arrow objects to output streams
* ARROW-3499 - [R] Expose arrow::ipc::Message type
* ARROW-3504 - [Plasma] Add support for Plasma Client to put/get raw bytes without pyarrow serialization.
* ARROW-3505 - [R] Read record batch and table
* ARROW-3515 - Introduce NumericTensor class
* ARROW-3529 - [Ruby] Import Red Parquet
* ARROW-3536 - [C++] Fast UTF8 validation functions
* ARROW-3537 - [Rust] Implement Tensor Type
* ARROW-3540 - [Rust] Incorporate BooleanArray into PrimitiveArray
* ARROW-3555 - [Plasma] Unify plasma client get function using metadata.
* ARROW-3567 - [Gandiva] [GLib] Add GLib bindings of Gandiva
* ARROW-3583 - [Python/Java] Create RecordBatch from VectorSchemaRoot
* ARROW-3592 - [Python] Get BinaryArray value as zero copy memory view
* ARROW-3608 - [R] Support for time32 and time64 array types
* ARROW-3610 - [C++] Add interface to turn stl\_allocator into arrow::MemoryPool
* ARROW-3630 - [Plasma] [GLib] Add GLib bindings of Plasma
* ARROW-3660 - [C++] Don't unnecessarily lock MemoryMappedFile for resizing in readonly files
* ARROW-3662 - [C++] Add a const overload to MemoryMappedFile::GetSize
* ARROW-3692 - [Gandiva] [Ruby] Add Ruby bindings of Gandiva
* ARROW-3723 - [Plasma] [Ruby] Add Ruby bindings of Plasma
* ARROW-3726 - [Rust] CSV Reader & Writer
* ARROW-3731 - [R] R API for reading and writing Parquet files
* ARROW-3738 - [C++] Add CSV conversion option to parse ISO8601-like timestamp strings
* ARROW-3741 - [R] Add support for arrow::compute::Cast to convert Arrow arrays from one type to another
* ARROW-3755 - [GLib] Support for CompressedInputStream, CompressedOutputStream
* ARROW-3760 - [R] Support Arrow CSV reader
* ARROW-3782 - [C++] Implement BufferedReader for C++
* ARROW-3798 - [GLib] Add support for column type CSV read options
* ARROW-3807 - [R] Missing Field API
* ARROW-3823 - [R] + buffer.complex
* ARROW-3830 - [GLib] Add GArrowCodec
* ARROW-3842 - [R] RecordBatchStreamWriter api
* ARROW-3864 - [GLib] Add support for allow-float-truncate cast option
* ARROW-3900 - [GLib] Add garrow\_mutable\_buffer\_set\_data()
* ARROW-3912 - [Plasma][GLib] Add support for creating and referring objects
* ARROW-3916 - [Python] Support caller-provided filesystem in \`ParquetWriter\` constructor
* ARROW-3924 - [Packaging][Plasma] Add support for Plasma deb/rpm packages
* ARROW-3938 - [Packaging] Stop to refer java/pom.xml to get version information
* ARROW-3945 - [Website] Blog post about Gandiva code donation
* ARROW-3946 - [GLib] Add support for union
* ARROW-3959 - [Rust] Time and Timestamp Support
* ARROW-4028 - [Rust] Merge parquet-rs codebase
* ARROW-4112 - [Packaging][Gandiva] Add support for deb packages
* ARROW-4132 - [GLib] Add more GArrowTable constructors
* ARROW-4141 - [Ruby] Add support for creating schema from raw Ruby objects
* ARROW-4153 - [GLib] Add builder\_append\_value() for consistency
* ARROW-4154 - [GLib] Add GArrowDecimal128DataType
* ARROW-4161 - [GLib] Add GPlasmaClientOptions
* ARROW-4162 - [Ruby] Add support for creating data types from description
* ARROW-4166 - [Ruby] Add support for saving to and loading from buffer
* ARROW-4174 - [Ruby] Add support for building composite array from raw Ruby objects
* ARROW-4175 - [GLib] Add support for decimal compare operators
* ARROW-4183 - [Ruby] Add Arrow::Struct as an element of Arrow::StructArray
* ARROW-4184 - [Ruby] Add Arrow::RecordBatch#to\_table
* ARROW-4214 - [Ruby] Add support for building RecordBatch from raw Ruby objects
* ARROW-45 - [Python] Add unnest/flatten function for List types
* ARROW-554 - [C++] Implement functions to conform unequal dictionaries amongst multiple Arrow arrays
* ARROW-854 - [Format] Support sparse tensor
## Sub-task
* ARROW-3272 - [Java] Document checkstyle deviations from Google style guide
* ARROW-3273 - [Java] checkstyle - fix javadoc style
* ARROW-3323 - [Java] checkstyle - fix naming
* ARROW-3347 - [Rust] Implement PrimitiveArrayBuilder
* ARROW-3568 - [Packaging] Run pyarrow unittests for windows wheels
* ARROW-3569 - [Packaging] Run pyarrow unittests when building conda package
* ARROW-3588 - [Java] checkstyle - fix license
* ARROW-3616 - [Java] checkstyle - fix remaining coding checks
* ARROW-3664 - [Rust] Add benchmark for PrimitiveArrayBuilder
* ARROW-3665 - [Rust] Implement StructArrayBuilder
* ARROW-3713 - [Rust] Implement BinaryArrayBuilder
* ARROW-3891 - [Java] Remove Long.bitCount with simple bitmap operations
* ARROW-3939 - [Rust] Remove macro definition for ListArrayBuilder
* ARROW-3948 - [CI][GLib] Set timeout to Homebrew
* ARROW-4060 - [Rust] Add Parquet/Arrow schema converter
* ARROW-4075 - [Rust] Reuse array builder after calling finish()
* ARROW-4172 - [Rust] more consistent naming in array builders
## Task
* ARROW-2337 - [Scripts] Windows release verification script should use boost DSOs instead of static linkage
* ARROW-2535 - [Python] Provide pre-commit hooks that check flake8
* ARROW-2560 - [Rust] The Rust README should include Rust-specific information on contributing
* ARROW-2653 - [C++] Refactor hash table support
* ARROW-2720 - [C++] Clean up cmake CXX\_STANDARD and PIC flag setting
* ARROW-3194 - [Java] Fix setValueCount in spitAndTransfer for variable width vectors
* ARROW-3383 - [Java] Run Gandiva tests in Travis CI
* ARROW-3384 - [Gandiva] Sync remaining commits from gandiva repo
* ARROW-3385 - [Java] [Gandiva] Deploy gandiva snapshot jars automatically
* ARROW-3427 - [C++] Add Windows support, Unix static libs for double-conversion package in conda-forge
* ARROW-3469 - [Gandiva] add travis entry for gandiva on OSX
* ARROW-3472 - [Gandiva] remove gandiva helpers library
* ARROW-3487 - [Gandiva] simplify NULL\_IF\_NULL functions that can return errors
* ARROW-3489 - [Gandiva] Support for in expressions
* ARROW-3501 - [Gandiva] Enable building with gcc 4.8.x on Ubuntu Trusty, similar distros
* ARROW-3519 - [Gandiva] Add support for functions that can return variable len output
* ARROW-3597 - [Gandiva] gandiva should integrate with ADD\_ARROW\_TEST for tests
* ARROW-3609 - [Gandiva] Move benchmark tests out of unit test
* ARROW-3701 - [Gandiva] Add support for decimal operations
* ARROW-3859 - [Java] Fix ComplexWriter backward incompatible change
* ARROW-3860 - [Gandiva] [C++] Add option to use -static-libstdc++ when building libgandiva\_jni.so
* ARROW-3867 - [Documentation] Uploading binary realase artifacts to Bintray
* ARROW-3970 - [Gandiva][C++] Remove unnecessary boost dependencies
* ARROW-3983 - [Gandiva][Crossbow] Use static boost while packaging
* ARROW-3993 - [JS] CI Jobs Failing
* ARROW-4039 - Update link to 'development.rst' page from Python README.md
* ARROW-4043 - [Packaging/Docker] Python tests on alpine miss pytest dependency
* ARROW-4044 - [Packaging/Python] Add hypothesis test dependency to pyarrow conda recipe
* ARROW-4045 - [Packaging/Python] Add hypothesis test dependency to wheel crossbow tests
* ARROW-4100 - [Gandiva][C++] Fix regex to ignore "." character
* ARROW-4148 - [CI/Python] Disable ORC on nightly Alpine builds
* ARROW-4151 - [Rust] Restructure project directories
* ARROW-4210 - [Python] Mention boost-cpp directly in the conda meta.yaml for pyarrow
* ARROW-4239 - [Release] Updating .deb package names in the prepare script failed to run on OSX
* ARROW-4241 - [Packaging] Disable crossbow conda OSX clang builds
* ARROW-4266 - [Python][CI] Disable ORC tests in dask integration test
* ARROW-4270 - [Packaging][Conda] Update xcode version and remove toolchain builds
## Test
* ARROW-4137 - [Rust] Move parquet code into a separate crate
## Wish
* ARROW-3248 - [C++] Arrow tests should have label "arrow"
* ARROW-3260 - [CI] Make linting a separate job
* ARROW-3844 - [C++] Remove ARROW\_USE\_SSE and ARROW\_SSE3
* ARROW-3851 - [C++] "make check-format" is slow
* ARROW-4079 - [C++] Add machine benchmarks
* ARROW-4150 - [C++] Do not return buffers containing nullptr from internal allocations
* ARROW-4156 - [C++] xcodebuild failure for cmake generated project
# Apache Arrow 0.11.0 (08 October 2018)
## Bug
* ARROW-1380 - [C++] Fix "still reachable" valgrind warnings when PLASMA\_VALGRIND=1
* ARROW-1661 - [Python] Python 3.7 support
* ARROW-1799 - [Plasma C++] Make unittest does not create plasma store executable
* ARROW-1996 - [Python] pyarrow.read\_serialized cannot read concatenated records
* ARROW-2027 - [C++] ipc::Message::SerializeTo does not pad the message body
* ARROW-2220 - Change default fix version in merge tool to be the next mainline release version
* ARROW-2310 - Source release scripts fail with Java8
* ARROW-2646 - [C++/Python] Pandas roundtrip for date objects
* ARROW-2776 - [C++] Do not pass -Wno-noexcept-type for compilers that do not support it
* ARROW-2782 - [Python] Ongoing Travis CI failures in Plasma unit tests
* ARROW-2814 - [Python] Unify PyObject\* sequence conversion paths for built-in sequences, NumPy arrays
* ARROW-2854 - [C++/Python] Casting float NaN to int should raise an error on safe cast
* ARROW-2925 - [JS] Documentation failing in docker container
* ARROW-2965 - [Python] Possible uint64 overflow issues in python\_to\_arrow.cc
* ARROW-2966 - [Python] Data type conversion error
* ARROW-2973 - [Python] pitrou/asv.git@customize\_commands does not work with the "new" way of activating conda
* ARROW-2974 - [Python] Replace usages of "source activate" with "conda activate" in CI scripts
* ARROW-2986 - [C++] /EHsc possibly needed for Visual Studio 2015 builds
* ARROW-2992 - [Python] Parquet benchmark failure
* ARROW-3006 - [GLib] .gir/.typelib for GPU aren't installed
* ARROW-3007 - [Packaging] libarrow-gpu10 deb for Ubuntu 18.04 has broken dependencies
* ARROW-3011 - [CI] Remove Slack notification
* ARROW-3012 - [Python] Installation crashes with setuptools\_scm error
* ARROW-3013 - [Website] Fix download links on website for tarballs, checksums
* ARROW-3015 - [Python] Fix documentation typo for pa.uint8
* ARROW-3047 - [C++] cmake downloads and builds ORC even though it's installed
* ARROW-3049 - [C++/Python] ORC reader fails on empty file
* ARROW-3053 - [Python] Pandas decimal conversion segfault
* ARROW-3056 - [Python] Indicate in NativeFile docstrings methods that are part of the RawIOBase API but not implemented
* ARROW-3061 - [Java] headroom does not take into account reservation
* ARROW-3065 - [Python] concat\_tables() failing from bad Pandas Metadata
* ARROW-3083 - [Python] Version in manylinux1 wheel builds is wrong
* ARROW-3093 - [C++] Linking errors with ORC enabled
* ARROW-3095 - [Python] test\_plasma.py fails
* ARROW-3098 - [Python] BufferReader doesn't adhere to the seek protocol
* ARROW-3100 - [CI] C/glib build broken on OS X
* ARROW-3125 - [Python] Update ASV instructions
* ARROW-3132 - Regenerate 0.10.0 changelog
* ARROW-3140 - [Plasma] Plasma fails building with GPU enabled
* ARROW-3141 - [Python] Tensorflow support in pyarrow wheels pins numpy>=1.14
* ARROW-3145 - [C++] Thrift compiler reruns in arrow/dbi/hiveserver2/thrift when using Ninja build
* ARROW-3173 - [Rust] dynamic\_types example does not run
* ARROW-3175 - [Java] Upgrade to official FlatBuffers release (Flatbuffers incompatibility)
* ARROW-3183 - [Python] get\_library\_dirs on Windows can give the wrong directory
* ARROW-3188 - [Python] Table.from\_arrays segfaults if lists and schema are passed
* ARROW-3190 - [C++] "WriteableFile" is misspelled, should be renamed "WritableFile" with deprecation for old name
* ARROW-3206 - [C++] Building with ARROW\_HIVESERVER2=ON with unit tests disabled causes error
* ARROW-3227 - [Python] NativeFile.write shouldn't accept unicode strings
* ARROW-3228 - [Python] Immutability of bytes is ignored
* ARROW-3231 - [Python] Sphinx's autodoc\_default\_flags is now deprecated
* ARROW-3237 - [CI] Update linux packaging filenames in rat exclusion list
* ARROW-3241 - [Plasma] test\_plasma\_list test failure on Ubuntu 14.04
* ARROW-3251 - [C++] Conversion warnings in cast.cc
* ARROW-3256 - [JS] File footer and message metadata is inconsistent
* ARROW-3279 - [C++] Allow linking Arrow tests dynamically on Windows
* ARROW-3299 - [C++] Appveyor builds failing
* ARROW-3322 - [CI] Rust job always runs on AppVeyor
* ARROW-3327 - [Python] manylinux container confusing
* ARROW-3338 - [Python] Crash when schema and columns do not match
* ARROW-3342 - Appveyor builds have stopped triggering on GitHub
* ARROW-3348 - Plasma store dies when an object that a dead client is waiting for gets created.
* ARROW-3354 - [Python] read\_record\_batch interfaces differ in pyarrow and pyarrow.cuda
* ARROW-3369 - [Packaging] Wheel builds are failing due to wheel 0.32 release
* ARROW-3370 - [Packaging] Centos 6 build is failing
* ARROW-3373 - Fix bug in which plasma store can die when client gets multiple objects and object becomes available.
* ARROW-3374 - [Python] Dictionary has out-of-bound index when creating DictionaryArray from Pandas with NaN
* ARROW-3393 - [C++] Fix compiler warning in util/task-group-cc on clang 6
* ARROW-3394 - [Java] Remove duplicate dependency entry in Flight
* ARROW-3403 - [Website] Source tarball link missing from install page
* ARROW-3420 - [C++] Fix outstanding include-what-you-use issues in src/arrow, src/parquet codebases
## Improvement
* ARROW-1521 - [C++] Add Reset method to BufferOutputStream to enable object reuse
* ARROW-1949 - [Python/C++] Add option to Array.from\_pandas and pyarrow.array to perform unsafe casts
* ARROW-1963 - [C++/Python] Create Array from sequence of numpy.datetime64
* ARROW-1968 - [Python] Unit testing setup for ORC files
* ARROW-2165 - enhance AllocatorListener to listen for child allocator addition and removal
* ARROW-2520 - [Rust] CI should also build against nightly Rust
* ARROW-2555 - [Python] Provide an option to convert on coerce\_timestamps instead of error
* ARROW-2583 - [Rust] Buffer should be typeless
* ARROW-2617 - [Rust] Schema should contain fields not columns
* ARROW-2687 - [JS] Example usage in README is outdated
* ARROW-2734 - [Python] Cython api example doesn't work by default on macOS
* ARROW-2799 - [Python] Add safe option to Table.from\_pandas to avoid unsafe casts
* ARROW-2813 - [C++] Strip uninformative lcov output from Travis CI logs
* ARROW-2817 - [C++] Enable libraries to be installed in msys2 on Windows
* ARROW-2840 - [C++] See if stream alignment logic can be simplified
* ARROW-2865 - [C++/Python] Reduce some duplicated code in python/builtin\_convert.cc
* ARROW-2889 - [C++] Add optional argument to ADD\_ARROW\_TEST CMake function to add unit test prefix
* ARROW-2900 - [Python] Improve performance of appending nested NumPy arrays in builtin\_convert.cc
* ARROW-2936 - [Python] Implement Table.cast for casting from one schema to another (if possible)
* ARROW-2952 - [C++] Dockerfile for running include-what-you-use checks
* ARROW-2964 - [Go] wire all currently implemented array types in array.MakeFromData
* ARROW-2971 - [Python] Give more descriptive names to python\_to\_arrow.cc/arrow\_to\_python.cc
* ARROW-2975 - [Plasma] TensorFlow op: Compilation only working if arrow found by pkg-config
* ARROW-2976 - [Python] Directory in pyarrow.get\_library\_dirs() on Travis doesn't contain libarrow.so
* ARROW-2983 - [Packaging] Verify source release and binary artifacts in different scripts
* ARROW-2989 - [C++] Remove deprecated APIs in 0.10.0 and below
* ARROW-2994 - [C++] Only include Python C header directories for Python-related compilation units
* ARROW-2996 - [C++] Fix typo in cpp/.clang-tidy
* ARROW-2998 - [C++] Add variants of AllocateBuffer, AllocateResizeableBuffer that return unique\_ptr<Buffer>
* ARROW-2999 - [Python] Do not run ASV benchmarks in every Travis CI build to improve runtimes
* ARROW-3000 - [Python] Do not build unit tests other than python-test in travis\_script\_python.sh
* ARROW-3005 - [Website] Update website and write blog post for 0.10.0 release announcement
* ARROW-3008 - [Packaging] Verify GPU related modules if available
* ARROW-3009 - [Python] pyarrow.orc uses APIs now prohibited in 0.10.0
* ARROW-3010 - [GLib] Update README to use Bundler
* ARROW-3017 - [C++] Don't throw exception in arrow/util/thread-pool.h
* ARROW-3018 - [Plasma] Improve random ObjectID generation
* ARROW-3019 - [Packaging] Use Bundler to verify Arrow GLib
* ARROW-3021 - [Go] support for List
* ARROW-3022 - [Go] support for Struct
* ARROW-3023 - [C++] Use gold linker in builds if it is available
* ARROW-3024 - [C++] Replace usages of std::mutex with atomics in memory\_pool.cc
* ARROW-3026 - [Plasma] Only run Plasma Python unit tests under valgrind once instead of twice in CI
* ARROW-3027 - [Ruby] Stop "git tag" by "rake release"
* ARROW-3028 - [Python] Trim unneeded work from documentation build in Travis CI
* ARROW-3029 - [Python] pkg\_resources is slow
* ARROW-3031 - [Go] Streamline release of Arrays and Builders
* ARROW-3034 - [Packaging] Source archive can't be extracted by bsdtar on MSYS2
* ARROW-3035 - [Rust] Examples in README.md do not run
* ARROW-3036 - [Go] add support for slicing Arrays
* ARROW-3037 - [Go] add support NullArray
* ARROW-3042 - [Go] add badge to GoDoc in the Go-Arrow README
* ARROW-3043 - [C++] pthread doesn't exist on MinGW
* ARROW-3044 - [Python] Remove all occurrences of cython's legacy property definition syntax
* ARROW-3046 - [GLib] Use rubyish method in test-orc-file-reader.rb
* ARROW-3062 - [Python] Extend fast libtensorflow\_framework.so compatibility workaround to Python 2.7
* ARROW-3064 - [C++] Add option to ADD\_ARROW\_TEST to indicate additional dependencies for particular unit test executables
* ARROW-3067 - [Packaging] Support dev/rc/release .deb/.rpm builds
* ARROW-3068 - [Packaging] Bump version to 0.11.0-SNAPSHOT
* ARROW-3069 - [Release] Stop using SHA1 checksums per ASF policy
* ARROW-3072 - [C++] Use ARROW\_RETURN\_NOT\_OK instead of RETURN\_NOT\_OK in header files
* ARROW-3076 - [Website] Add Google Analytics tags to C++, Python API docs
* ARROW-3088 - [Rust] Use internal \`Result<T>\` type instead of \`Result<T, ArrowError>\`
* ARROW-3105 - [Plasma] Improve flushing error message
* ARROW-3106 - [Website] Update committers and PMC roster on website
* ARROW-3111 - [Java] Enable changing default logging level when running tests
* ARROW-3114 - [Website] Add information about user@ mailing list to website / Community page
* ARROW-3116 - [Plasma] Add "ls" to object store
* ARROW-3117 - [GLib] Add garrow\_chunked\_array\_to\_string()
* ARROW-3127 - [C++] Add Tutorial about Sending Tensor from C++ to Python
* ARROW-3128 - [C++] Support system shared zlib
* ARROW-3129 - [Packaging] Stop to use deprecated BuildRoot and Group in .rpm
* ARROW-3130 - [Go] add initial support for Go modules
* ARROW-3136 - [C++] Clean up arrow:: public API
* ARROW-3142 - [C++] Fetch all libs from toolchain environment
* ARROW-3143 - [C++] CopyBitmap into existing memory
* ARROW-3147 - [C++] MSVC version isn't detected in code page 932
* ARROW-3148 - [C++] MSVC shows C4819 warning on code page 932
* ARROW-3152 - [C++][Packaging] Use dynamic linking for zlib in conda recipes
* ARROW-3157 - [C++] Improve buffer creation for typed data
* ARROW-3158 - [C++] Handle float truncation during casting
* ARROW-3160 - [Python] Improve pathlib.Path support in parquet and filesystem modules
* ARROW-3163 - [Python] Cython dependency is missing in non wheel package
* ARROW-3167 - [CI] Limit clcache cache size
* ARROW-3170 - [C++] Implement "readahead spooler" class for background input buffering
* ARROW-3172 - [Rust] Update documentation for datatypes.rs
* ARROW-3174 - [Rust] run examples as part of CI
* ARROW-3177 - [Rust] Update expected error messages for tests that 'should panic'
* ARROW-3180 - [C++] Add docker-compose setup to simulate Travis CI run locally
* ARROW-3181 - [Packaging] Adjust conda package scripts to account for Parquet codebase migration
* ARROW-3195 - [C++] NumPy initialization error check is missing in test
* ARROW-3211 - [C++] gold linker doesn't work with MinGW-w64
* ARROW-3212 - [C++] Create deterministic IPC metadata
* ARROW-3213 - [C++] Use CMake to build vendored Snappy on Windows
* ARROW-3214 - [C++] Disable insecure warnings with MinGW build
* ARROW-3215 - [C++] Add support for finding libpython on MSYS2
* ARROW-3216 - [C++] libpython isn't linked to libarrow\_python in MinGW build
* ARROW-3217 - [C++] ARROW\_STATIC definition is missing in MinGW build
* ARROW-3218 - [C++] Utilities has needless pthread link in MinGW build
* ARROW-3219 - [C++] Use Win32 API in MinGW
* ARROW-3223 - [GLib] Use the same shared object versioning rule in C++
* ARROW-3229 - [Packaging]: Adjust wheel package scripts to account for Parquet codebase migration
* ARROW-3234 - [C++] Link order is wrong when ARROW\_ORC=on and ARROW\_PROTOBUF\_USE\_SHARED=ON
* ARROW-3235 - [Packaging] Update deb names
* ARROW-3236 - [C++] OutputStream bookkeeping logic when writing IPC file format is incorrect
* ARROW-3240 - [GLib] Add build instructions using Meson
* ARROW-3242 - [C++] Use coarser-grained dispatch to SIMD hash functions
* ARROW-3249 - [Python] Run flake8 on integration\_test.py and crossbow.py
* ARROW-3252 - [C++] Do not hard code the "v" part of versions in thirdparty toolchain
* ARROW-3257 - [C++] Stop to use IMPORTED\_LINK\_INTERFACE\_LIBRARIES
* ARROW-3258 - [GLib] CI is failued on macOS
* ARROW-3259 - [GLib] Rename "writeable" to "writable"
* ARROW-3261 - [Python] Add "field" method to select fields from StructArray
* ARROW-3262 - [Python] Implement \_\_getitem\_\_ with integers on pyarrow.Column
* ARROW-3267 - [Python] Create empty table from schema
* ARROW-3268 - [CI] Reduce conda times on AppVeyor
* ARROW-3269 - [Python] Fix warnings in unit test suite
* ARROW-3270 - [Release] Adjust release verification scripts to recent parquet migration
* ARROW-3274 - [Packaging] Missing glog dependency from conda-forge recipes
* ARROW-3276 - [Packaging] Add support Parquet related Linux packages
* ARROW-3281 - [Java] Make sure that WritableByteChannel in WriteChannel writes out complete bytes
* ARROW-3285 - [GLib] Add arrow\_cpp\_build\_type and arrow\_cpp\_build\_dir Meson options
* ARROW-3286 - [C++] ARROW\_EXPORT for RecordBatchBuilder is missing
* ARROW-3287 - [C++] "redeclared without dllimport attribute after being referenced with dll linkage" with MinGW
* ARROW-3288 - [GLib] Add new API index for 0.11.0
* ARROW-3300 - [Release] Update .deb package names in preparation
* ARROW-3301 - [Website] Update Jekyll and Bootstrap 4
* ARROW-3305 - [JS] Incorrect development documentation link in javascript readme
* ARROW-3309 - [JS] Missing links from DEVELOP.md
* ARROW-3313 - [R] Run clang-format, cpplint checks on R C++ code
* ARROW-3319 - [GLib] Expose AlignStream methods in InputStream, OutputStream classes
* ARROW-3320 - [C++] Improve float parsing performance
* ARROW-3321 - [C++] Improve integer parsing performance
* ARROW-3334 - [Python] Update conda packages to new numpy requirement
* ARROW-3335 - [Python] Add ccache to manylinux1 container
* ARROW-3349 - [C++] Use aligned API in MinGW
* ARROW-3356 - [Python] Document parameters of Table.to\_pandas method
* ARROW-3363 - [C++/Python] Add helper functions to detect scalar Python types
* ARROW-3375 - [Rust] Remove memory\_pool.rs
* ARROW-3376 - [C++] Add double-conversion to cpp/thirdparty/download\_dependencies.sh
* ARROW-3377 - [Gandiva][C++] Remove If statement from bit map set function
* ARROW-3392 - [Python] Support filters in disjunctive normal form in ParquetDataset
* ARROW-3395 - [C++/Python] Add docker container for linting
* ARROW-3397 - [C++] Use relative CMake path for modules
* ARROW-3400 - [Packaging] Add support Parquet GLib related Linux packages
* ARROW-3404 - [C++] Make CSV chunker faster
* ARROW-3411 - [Packaging] dev/release/01-perform.sh doesn't have executable bit
* ARROW-3412 - [Packaging] rat failure in dev/release/02-source.sh
* ARROW-3413 - [Packaging] dev/release/02-source.sh doesn't generate Parquet GLib document
* ARROW-3415 - [Packaging] dev/release/verify-release-cndidate.sh fails in "conda activate arrow-test"
* ARROW-3416 - [Packaging] dev/release/02-source.sh must use SHA512 instead of SHA1
* ARROW-3417 - [Packaging] dev/release/verify-release-cndidate.sh fails Parquet C++ test
* ARROW-3423 - [Packaging] Remove RC information from deb/rpm
## New Feature
* ARROW-1325 - [R] Bootstrap R bindings subproject
* ARROW-1424 - [Python] Initial bindings for libarrow\_gpu
* ARROW-1491 - [C++] Add casting implementations from strings to numbers or boolean
* ARROW-1563 - [C++] Implement logical unary and binary kernels for boolean arrays
* ARROW-1860 - [C++] Add data structure to "stage" a sequence of IPC messages from in-memory data
* ARROW-249 - [Flight] Define GRPC IDL / wire protocol for messaging with Arrow data
* ARROW-25 - [C++] Implement delimited file scanner / CSV reader
* ARROW-2750 - [MATLAB] Add MATLAB support for reading numeric types from Feather files
* ARROW-2979 - [GLib] Add operator functions in GArrowDecimal128
* ARROW-3050 - [C++] Adopt HiveServer2 client C++ codebase
* ARROW-3075 - [C++] Incorporate apache/parquet-cpp codebase into Arrow C++ codebase and build system
* ARROW-3090 - [Rust] Accompany error messages with assertions
* ARROW-3146 - [C++] Barebones Flight RPC server and client implementations
* ARROW-3182 - [C++] Merge Gandiva codebase
* ARROW-3187 - [Plasma] Make Plasma Log pluggable with glog
* ARROW-3196 - Enable merge\_arrow\_py.py script to merge Parquet patches and set fix versions
* ARROW-3197 - [C++] Add instructions to cpp/README.md about Parquet-only development and Arrow+Parquet
* ARROW-3250 - [C++] Create Buffer implementation that takes ownership for the memory from a std::string via std::move
* ARROW-3282 - [R] initial R functionality
* ARROW-3284 - [R] Adding R Error in Status
* ARROW-3339 - [R] Support for character vectors
* ARROW-3341 - [R] Support for logical vector
* ARROW-3360 - [GLib] Import Parquet bindings
* ARROW-3418 - [C++] Update Parquet snapshot version for release
## Sub-task
* ARROW-2948 - [Packaging] Generate changelog with crossbow
* ARROW-3115 - [Java] Style Checks - Fix import ordering
* ARROW-3171 - [Java] checkstyle - fix line length and indentation
* ARROW-3264 - [Java] checkstyle - fix whitespace
* ARROW-3357 - [Rust] Add a mutable buffer implementation
## Task
* ARROW-2338 - [Scripts] Windows release verification script should create a conda environment
* ARROW-2950 - [C++] Clean up util/bit-util.h
* ARROW-2958 - [C++] Flatbuffers EP fails to compile with GCC 8.1
* ARROW-2960 - [Packaging] Fix verify-release-candidate for binary packages and fix release cutting script for lib64 cmake issue
* ARROW-2991 - [CI] Cut down number of AppVeyor jobs
* ARROW-3001 - [Packaging] Don't modify PATH during rust release verification
* ARROW-3003 - [Doc] Enable Java doc in dev/gen\_apidocs/create\_documents.sh
* ARROW-3045 - [Python] Remove nullcheck from ipc Message and MessageReader
* ARROW-3057 - [INTEGRATION] Fix spark and hdfs dockerfiles
* ARROW-3059 - [C++] Streamline namespace array::test
* ARROW-3060 - [C++] Factor out parsing routines
* ARROW-3109 - [Python] Add Python 3.7 virtualenvs to manylinux1 container
* ARROW-3110 - [C++] Compilation warnings with gcc 7.3.0
* ARROW-3119 - [Packaging] Nightly packaging script fails
* ARROW-3153 - [Packaging] Fix broken nightly package builds introduced with recent cmake changes and orc tests
* ARROW-3350 - [Website] Fix powered by links
* ARROW-3352 - [Packaging] Fix recently failing wheel builds
* ARROW-3371 - [Python] Remove check\_metadata argument for Field.equals docstring
* ARROW-3382 - [C++] Run Gandiva tests in Travis CI
## Wish
* ARROW-3002 - [Python] Implement better DataType hash function
* ARROW-3094 - [Python] Allow lighter construction of pa.Schema / pa.StructType
* ARROW-3099 - [C++] Add benchmark for number parsing
# Apache Arrow 0.10.0 (02 August 2018)
## Bug
* ARROW-2059 - [Python] Possible performance regression in Feather read/write path
* ARROW-2101 - [Python] from\_pandas reads 'str' type as binary Arrow data with Python 2
* ARROW-2122 - [Python] Pyarrow fails to serialize dataframe with timestamp.
* ARROW-2182 - [Python] ASV benchmark setup does not account for C++ library changing
* ARROW-2193 - [Plasma] plasma\_store has runtime dependency on Boost shared libraries when ARROW\_BOOST\_USE\_SHARED=on
* ARROW-2195 - [Plasma] Segfault when retrieving RecordBatch from plasma store
* ARROW-2247 - [Python] Statically-linking boost\_regex in both libarrow and libparquet results in segfault
* ARROW-2273 - Cannot deserialize pandas SparseDataFrame
* ARROW-2300 - [Python] python/testing/test\_hdfs.sh no longer works
* ARROW-2305 - [Python] Cython 0.25.2 compilation failure
* ARROW-2314 - [Python] Union array slicing is defective
* ARROW-2326 - [Python] cannot import pip installed pyarrow on OS X (10.9)
* ARROW-2328 - Writing a slice with feather ignores the offset
* ARROW-2331 - [Python] Fix indexing implementations
* ARROW-2333 - [Python] boost bundling fails in setup.py
* ARROW-2342 - [Python] Aware timestamp type fails pickling
* ARROW-2346 - [Python] PYARROW\_CXXFLAGS doesn't accept multiple options
* ARROW-2349 - [Python] Boost shared library bundling is broken for MSVC
* ARROW-2351 - [C++] StringBuilder::append(vector<string>...) not implemented
* ARROW-2354 - [C++] PyDecimal\_Check() is much too slow
* ARROW-2355 - [Python] Unable to import pyarrow [0.9.0] OSX
* ARROW-2357 - Benchmark PandasObjectIsNull
* ARROW-2368 - DecimalVector#setBigEndian is not padding correctly for negative values
* ARROW-2369 - Large (>~20 GB) files written to Parquet via PyArrow are corrupted
* ARROW-2370 - [GLib] include path is wrong on Meson build
* ARROW-2371 - [GLib] gio-2.0 isn't required on GNU Autotools build
* ARROW-2372 - [Python] ArrowIOError: Invalid argument when reading Parquet file
* ARROW-2375 - [Rust] Buffer should release memory when dropped
* ARROW-2377 - [GLib] Travis-CI failures
* ARROW-2380 - [Python] Correct issues in numpy\_to\_arrow conversion routines
* ARROW-2382 - [Rust] List<T> was not using memory safely
* ARROW-2383 - [C++] Debian packages need to depend on libprotobuf
* ARROW-2387 - [Python] negative decimal values get spurious rescaling error
* ARROW-2391 - [Python] Segmentation fault from PyArrow when mapping Pandas datetime column to pyarrow.date64
* ARROW-2393 - [C++] arrow/status.h does not define ARROW\_CHECK needed for ARROW\_CHECK\_OK
* ARROW-2403 - [C++] arrow::CpuInfo::model\_name\_ destructed twice on exit
* ARROW-2405 - [C++] <functional> is missing in plasma/client.h
* ARROW-2418 - [Rust] List builder fails due to memory not being reserved correctly
* ARROW-2419 - [Site] Website generation depends on local timezone
* ARROW-2420 - [Rust] Memory is never released
* ARROW-2423 - [Python] PyArrow datatypes raise ValueError on equality checks against non-PyArrow objects
* ARROW-2424 - [Rust] Missing import causing broken build
* ARROW-2425 - [Rust] Array::from missing mapping for u8 type
* ARROW-2426 - [CI] glib build failure
* ARROW-2432 - [Python] from\_pandas fails when converting decimals if have None values
* ARROW-2437 - [C++] Change of arrow::ipc::ReadMessage signature breaks ABI compability
* ARROW-2441 - [Rust] Builder<T>::slice\_mut assertions are too strict
* ARROW-2443 - [Python] Conversion from pandas of empty categorical fails with ArrowInvalid
* ARROW-2450 - [Python] Saving to parquet fails for empty lists
* ARROW-2452 - [TEST] Spark integration test fails with permission error
* ARROW-2454 - [Python] Empty chunked array slice crashes
* ARROW-2455 - [C++] The bytes\_allocated\_ in CudaContextImpl isn't initialized
* ARROW-2457 - garrow\_array\_builder\_append\_values() won't work for large arrays
* ARROW-2459 - pyarrow: Segfault with pyarrow.deserialize\_pandas
* ARROW-2462 - [C++] Segfault when writing a parquet table containing a dictionary column from Record Batch Stream
* ARROW-2465 - [Plasma] plasma\_store fails to find libarrow\_gpu.so
* ARROW-2466 - [C++] misleading "append" flag to FileOutputStream
* ARROW-2468 - [Rust] Builder::slice\_mut should take mut self
* ARROW-2471 - [Rust] Assertion when pushing value to Builder/ListBuilder with zero capacity
* ARROW-2473 - [Rust] List assertion error with list of zero length
* ARROW-2474 - [Rust] Add windows support for memory pool abstraction
* ARROW-2489 - [Plasma] test\_plasma.py crashes
* ARROW-2491 - [Python] Array.from\_buffers does not work for ListArray
* ARROW-2492 - [Python] Prevent segfault on accidental call of pyarrow.Array
* ARROW-2500 - [Java] IPC Writers/readers are not always setting validity bits correctly
* ARROW-2502 - [Rust] Restore Windows Compatibility
* ARROW-2503 - [Python] Trailing space character in RowGroup statistics of pyarrow.parquet.ParquetFile
* ARROW-2509 - [CI] Intermittent npm failures
* ARROW-2511 - BaseVariableWidthVector.allocateNew is not throwing OOM when it can't allocate memory
* ARROW-2514 - [Python] Inferring / converting nested Numpy array is very slow
* ARROW-2515 - Errors with DictionaryArray inside of ListArray or other DictionaryArray
* ARROW-2518 - [Java] Restore Java unit tests and javadoc test to CI matrix
* ARROW-2530 - [GLib] Out-of-source build is failed
* ARROW-2534 - [C++] libarrow.so leaks zlib symbols
* ARROW-2545 - [Python] Arrow fails linking against statically-compiled Python
* ARROW-2554 - pa.array type inference bug when using NS-timestamp
* ARROW-2561 - [C++] Crash in cuda-test shutdown with coverage enabled
* ARROW-2564 - [C++] Rowwise Tutorial is out of date
* ARROW-2565 - [Plasma] new subscriber cannot receive notifications about existing objects
* ARROW-2570 - [Python] Add support for writing parquet files with LZ4 compression
* ARROW-2571 - [C++] Lz4Codec doesn't properly handle empty data
* ARROW-2575 - [Python] Exclude hidden files when reading Parquet dataset
* ARROW-2578 - [Plasma] Valgrind errors related to std::random\_device
* ARROW-2589 - [Python] test\_parquet.py regression with Pandas 0.23.0
* ARROW-2593 - [Python] TypeError: data type "mixed-integer" not understood
* ARROW-2594 - [Java] Vector reallocation does not properly clear reused buffers
* ARROW-2601 - [Python] MemoryPool bytes\_allocated causes seg
* ARROW-2603 - [Python] from pandas raises ArrowInvalid for date(time) subclasses
* ARROW-2615 - [Rust] Refactor introduced a bug around Arrays of String
* ARROW-2629 - [Plasma] Iterator invalidation for pending\_notifications\_
* ARROW-2630 - [Java] Typo in the document
* ARROW-2632 - [Java] ArrowStreamWriter accumulates ArrowBlock but does not use them
* ARROW-2640 - JS Writer should serialize schema metadata
* ARROW-2643 - [C++] Travis-CI build failure with cpp toolchain enabled
* ARROW-2644 - [Python] parquet binding fails building on AppVeyor
* ARROW-2655 - [C++] Failure with -Werror=conversion on gcc 7.3.0
* ARROW-2657 - Segfault when importing TensorFlow after Pyarrow
* ARROW-2668 - [C++] -Wnull-pointer-arithmetic warning with dlmalloc.c on clang 6.0, Ubuntu 14.04
* ARROW-2669 - [C++] EP\_CXX\_FLAGS not passed on when building gbenchmark
* ARROW-2675 - Arrow build error with clang-10 (Apple Clang / LLVM)
* ARROW-2683 - [Python] Resource Warning (Unclosed File) when using pyarrow.parquet.read\_table()
* ARROW-2690 - [C++] Plasma does not follow style conventions for variable and function names
* ARROW-2691 - [Rust] Travis fails due to formatting diff
* ARROW-2693 - [Python] pa.chunked\_array causes a segmentation fault on empty input
* ARROW-2694 - [Python] ArrayValue string conversion returns the representation instead of the converted python object string
* ARROW-2698 - [Python] Exception when passing a string to Table.column
* ARROW-2711 - [Python/C++] Pandas-Arrow doesn't roundtrip when column of lists has empty first element
* ARROW-2716 - [Python] Make manylinux1 base image independent of Python patch releases
* ARROW-2721 - [C++] Link error with Arrow C++ build with -DARROW\_ORC=ON on CentOS 7
* ARROW-2722 - [Python] ndarray to arrow conversion fails when downcasted from pandas to\_numeric
* ARROW-2723 - [C++] arrow-orc.pc is missing
* ARROW-2726 - [C++] The latest Boost version is wrong
* ARROW-2727 - [Java] Unable to build java/adapters module
* ARROW-2741 - [Python] pa.array from np.datetime[D] and type=pa.date64 produces invalid results
* ARROW-2744 - [Python] Writing to parquet crashes when writing a ListArray of empty lists
* ARROW-2745 - [C++] ORC ExternalProject needs to declare dependency on vendored protobuf
* ARROW-2747 - [CI] [Plasma] huge tables test failure on Travis
* ARROW-2754 - [Python] When installing pyarrow via pip, a debug build is created
* ARROW-2770 - [Packaging] Account for conda-forge compiler migration in conda recipes
* ARROW-2773 - [Python] Corrected parquet docs partition\_cols parameter name
* ARROW-2781 - [Python] Download boost using curl in manylinux1 image
* ARROW-2787 - [Python] Memory Issue passing table from python to c++ via cython
* ARROW-2795 - [Python] Run TensorFlow import workaround only on Linux
* ARROW-2806 - [Python] Inconsistent handling of np.nan
* ARROW-2810 - [Plasma] Plasma public headers leak flatbuffers.h
* ARROW-2812 - [Ruby] StructArray#[] raises NoMethodError
* ARROW-2820 - [Python] RecordBatch.from\_arrays does not validate array lengths are all equal
* ARROW-2823 - [C++] Search for flatbuffers in <root>/lib64
* ARROW-2841 - [Go] Fix recent Go build failures in Travis CI
* ARROW-2850 - [C++/Python] PARQUET\_RPATH\_ORIGIN=ON missing in manylinux1 build
* ARROW-2851 - [C++] Update RAT excludes for new install file names
* ARROW-2852 - [Rust] Mark Array as Sync and Send
* ARROW-2862 - [C++] Ensure thirdparty download directory has been created in thirdparty/download\_thirdparty.sh
* ARROW-2867 - [Python] Incorrect example for Cython usage
* ARROW-2871 - [Python] Array.to\_numpy is invalid for boolean arrays
* ARROW-2872 - [Python] Add pytest mark to opt into TensorFlow-related unit tests
* ARROW-2876 - [Packaging] Crossbow builds can hang if you cloned using SSH
* ARROW-2877 - [Packaging] crossbow submit results in duplicate Travis CI build
* ARROW-2878 - [Packaging] README.md does not mention setting GitHub API token in user's crossbow repo settings
* ARROW-2883 - [Plasma] Compilation warnings
* ARROW-2891 - Preserve schema in write\_to\_dataset
* ARROW-2894 - [Glib] Format tests broken due to recent refactor
* ARROW-2901 - [Java] Build is failing on Java9
* ARROW-2902 - [Python] HDFS Docker integration tests leave around files created by root
* ARROW-2911 - [Python] Parquet binary statistics that end in '\0' truncate last byte
* ARROW-2917 - [Python] Tensor requiring gradiant cannot be serialized with pyarrow.serialize
* ARROW-2920 - [Python] Segfault with pytorch 0.4
* ARROW-2926 - [Python] ParquetWriter segfaults in example where passed schema and table schema do not match
* ARROW-2930 - [C++] Trying to set target properties on not existing CMake target
* ARROW-2940 - [Python] Import error with pytorch 0.3
* ARROW-2945 - [Packaging] Update argument check for 02-source.sh
* ARROW-2955 - [Python] Typo in pyarrow's HDFS API result
* ARROW-2963 - [Python] Deadlock during fork-join and use\_threads=True
* ARROW-2978 - [Rust] Travis CI build is failing
* ARROW-2982 - The "--show-progress" option is only supported in wget 1.16 and higher
* ARROW-640 - [Python] Arrow scalar values should have a sensible \_\_hash\_\_ and comparison
## Improvement
* ARROW-1454 - [Python] More informative error message when attempting to write an unsupported Arrow type to Parquet format
* ARROW-1722 - [C++] Add linting script to look for C++/CLI issues
* ARROW-1731 - [Python] Provide for selecting a subset of columns to convert in RecordBatch/Table.from\_pandas
* ARROW-1744 - [Plasma] Provide TensorFlow operator to read tensors from plasma
* ARROW-1858 - [Python] Add documentation about parquet.write\_to\_dataset and related methods
* ARROW-1886 - [Python] Add function to "flatten" structs within tables
* ARROW-1928 - [C++] Add benchmarks comparing performance of internal::BitmapReader/Writer with naive approaches
* ARROW-1954 - [Python] Add metadata accessor to pyarrow.Field
* ARROW-2014 - [Python] Document read\_pandas method in pyarrow.parquet
* ARROW-2060 - [Python] Documentation for creating StructArray using from\_arrays or a sequence of dicts
* ARROW-2061 - [C++] Run ASAN builds in Travis CI
* ARROW-2074 - [Python] Allow type inference for struct arrays
* ARROW-2097 - [Python] Suppress valgrind stdout/stderr in Travis CI builds when there are no errors
* ARROW-2100 - [Python] Drop Python 3.4 support
* ARROW-2140 - [Python] Conversion from Numpy float16 array unimplemented
* ARROW-2141 - [Python] Conversion from Numpy object array to varsize binary unimplemented
* ARROW-2147 - [Python] Type inference doesn't work on lists of Numpy arrays
* ARROW-2222 - [C++] Add option to validate Flatbuffers messages
* ARROW-2224 - [C++] Get rid of boost regex usage
* ARROW-2241 - [Python] Simple script for running all current ASV benchmarks at a commit or tag
* ARROW-2264 - [Python] Efficiently serialize numpy arrays with dtype of unicode fixed length string
* ARROW-2276 - [Python] Tensor could implement the buffer protocol
* ARROW-2281 - [Python] Expose MakeArray to construct arrays from buffers
* ARROW-2285 - [Python] Can't convert Numpy string arrays
* ARROW-2287 - [Python] chunked array not iterable, not indexable
* ARROW-2301 - [Python] Add source distribution publishing instructions to package / release management documentation
* ARROW-2302 - [GLib] Run autotools and meson Linux builds in same Travis CI build entry
* ARROW-2308 - Serialized tensor data should be 64-byte aligned.
* ARROW-2315 - [C++/Python] Add method to flatten a struct array
* ARROW-2322 - Document requirements to run dev/release/01-perform.sh
* ARROW-2325 - [Python] Update setup.py to use Markdown project description
* ARROW-2332 - [Python] Provide API for reading multiple Feather files
* ARROW-2335 - [Go] Move Go README one directory higher
* ARROW-2340 - [Website] Add blog post about Go codebase donation
* ARROW-2341 - [Python] pa.union() mode argument unintuitive
* ARROW-2348 - [GLib] Remove Go example
* ARROW-2350 - Shrink size of spark\_integration Docker container
* ARROW-2376 - [Rust] Travis should run tests for Rust library
* ARROW-2378 - [Rust] Use rustfmt to format source code
* ARROW-2384 - Rust: Use Traits rather than defining methods directly
* ARROW-2388 - [C++] Arrow::StringBuilder::Append() uses null\_bytes not valid\_bytes
* ARROW-2395 - [Python] Correct flake8 errors outside of pyarrow/ directory
* ARROW-2396 - Unify Rust Errors
* ARROW-2397 - Document changes in Tensor encoding in IPC.md.
* ARROW-2400 - [C++] Status destructor is expensive
* ARROW-2402 - [C++] FixedSizeBinaryBuilder::Append lacks "const char\*" overload
* ARROW-2404 - Fix declaration of 'type\_id' hides class member warning in msvc build
* ARROW-2411 - [C++] Add method to append batches of null-terminated strings to StringBuilder
* ARROW-2413 - [Rust] Remove useless use of \`format!\`
* ARROW-2414 - [Documentation] Fix miscellaneous documentation typos
* ARROW-2415 - [Rust] Fix using references in pattern matching
* ARROW-2417 - [Rust] Review APIs for safety
* ARROW-2422 - [Python] Support more filter operators on Hive partitioned Parquet files
* ARROW-2427 - [C++] ReadAt implementations suboptimal
* ARROW-2430 - MVP for branch based packaging automation
* ARROW-2433 - [Rust] Add Builder.push\_slice(&[T])
* ARROW-2434 - [Rust] Add windows support
* ARROW-2435 - [Rust] Add memory pool abstraction.
* ARROW-2436 - [Rust] Add windows CI
* ARROW-2442 - [C++] Disambiguate Builder::Append overloads
* ARROW-2445 - [Rust] Add documentation and make some fields private
* ARROW-2448 - Segfault when plasma client goes out of scope before buffer.
* ARROW-2451 - Handle more dtypes efficiently in custom numpy array serializer.
* ARROW-2453 - [Python] Improve Table column access
* ARROW-2458 - [Plasma] PlasmaClient uses global variable
* ARROW-2463 - [C++] Update flatbuffers to 1.9.0
* ARROW-2469 - Make out arguments last in ReadMessage API.
* ARROW-2470 - [C++] FileGetSize() should not seek
* ARROW-2472 - [Rust] The Schema and Fields types should not have public attributes
* ARROW-2478 - [C++] Introduce a checked\_cast function that performs a dynamic\_cast in debug mode
* ARROW-2480 - [C++] Enable casting the value of a decimal to int32\_t or int64\_t
* ARROW-2481 - [Rust] Move calls to free() into memory.rs
* ARROW-2484 - [C++] Document ABI compliance checking
* ARROW-2485 - [C++] Output diff when run\_clang\_format.py reports a change
* ARROW-2486 - [C++/Python] Provide a Docker image that contains all dependencies for development
* ARROW-2488 - [C++] List Boost 1.67 as supported version
* ARROW-2506 - [Plasma] Build error on macOS
* ARROW-2507 - [Rust] Don't take a reference when not needed
* ARROW-2508 - [Python] pytest API changes make tests fail
* ARROW-2513 - [Python] DictionaryType should give access to index type and dictionary array
* ARROW-2516 - AppVeyor Build Matrix should be specific to the changes made in a PR
* ARROW-2521 - [Rust] Refactor Rust API to use traits and generics
* ARROW-2522 - [C++] Version shared library files
* ARROW-2525 - [GLib] Add garrow\_struct\_array\_flatten()
* ARROW-2526 - [GLib] Update .gitignore
* ARROW-2527 - [GLib] Enable GPU document
* ARROW-2529 - [C++] Update mention of clang-format to 5.0 in the docs
* ARROW-2531 - [C++] Update clang bits to 6.0
* ARROW-2533 - [CI] Fast finish failing AppVeyor builds
* ARROW-2536 - [Rust] ListBuilder uses wrong initial size for offset builder
* ARROW-2539 - [Plasma] Use unique\_ptr instead of raw pointer
* ARROW-2540 - [Plasma] add constructor/destructor to make sure dlfree is called automatically
* ARROW-2541 - [Plasma] Clean up macro usage
* ARROW-2544 - [CI] Run C++ tests with two jobs on Travis-CI
* ARROW-2547 - [Format] Fix off-by-one in List<List<byte>> example
* ARROW-2548 - [Format] Clarify \`List<Char>\` Array example
* ARROW-2549 - [GLib] Apply arrow::StatusCodes changes to GArrowError
* ARROW-2550 - [C++] Add missing status codes into arrow::StatusCode::CodeAsString()
* ARROW-2551 - [Plasma] Improve notification logic
* ARROW-2553 - [Python] Set MACOSX\_DEPLOYMENT\_TARGET in wheel build
* ARROW-2558 - [Plasma] avoid walk through all the objects when a client disconnects
* ARROW-2563 - [Rust] Poor caching in Travis-CI
* ARROW-2567 - [C++/Python] Unit is ignored on comparison of TimestampArrays
* ARROW-2568 - [Python] Expose thread pool size setting to Python, and deprecate "nthreads"
* ARROW-2569 - [C++] Improve thread pool size heuristic
* ARROW-2574 - [CI] Collect and publish Python coverage
* ARROW-2577 - [Plasma] Add ASV benchmarks
* ARROW-2580 - [GLib] Fix abs functions for Decimal128
* ARROW-2582 - [GLib] Add negate functions for Decimal128
* ARROW-2585 - [C++] Add Decimal128::FromBigEndian
* ARROW-2586 - [C++] Make child builders of ListBuilder and StructBuilder shared\_ptr's
* ARROW-2595 - [Plasma] operator[] creates entries in map
* ARROW-2596 - [GLib] Use the default value of GTK-Doc
* ARROW-2597 - [Plasma] remove UniqueIDHasher
* ARROW-2611 - [Python] Python 2 integer serialization
* ARROW-2612 - [Plasma] Fix deprecated PLASMA\_DEFAULT\_RELEASE\_DELAY
* ARROW-2626 - [Python] pandas ArrowInvalid message should include failing column name
* ARROW-2634 - [Go] Add LICENSE additions for Go subproject
* ARROW-2635 - [Ruby] LICENSE.txt isn't suitable
* ARROW-2636 - [Ruby] "Unofficial" package note is missing
* ARROW-2638 - [Python] Prevent calling extension class constructors directly
* ARROW-2639 - [Python] Remove unnecessary \_check\_nullptr methods
* ARROW-2641 - [C++] Investigate spurious memset() calls
* ARROW-2645 - [Java] ArrowStreamWriter accumulates DictionaryBatch ArrowBlocks
* ARROW-2649 - [C++] Add std::generate()-like function for faster bitmap writing
* ARROW-2656 - [Python] Improve ParquetManifest creation time
* ARROW-2662 - [Python] Add to\_pandas / to\_numpy to ChunkedArray
* ARROW-2663 - [Python] Make dictionary\_encode and unique accesible on Column / ChunkedArray
* ARROW-2664 - [Python] Implement \_\_getitem\_\_ / slicing on Buffer
* ARROW-2666 - [Python] numpy.asarray should trigger to\_pandas on Array/ChunkedArray
* ARROW-2672 - [Python] Build ORC extension in manylinux1 wheels
* ARROW-2674 - [Packaging] Start building nightlies
* ARROW-2676 - [Packaging] Deploy build artifacts to github releases
* ARROW-2677 - [Python] Expose Parquet ZSTD compression
* ARROW-2678 - [GLib] Add extra information to common build problems on macOS
* ARROW-2680 - [Python] Add documentation about type inference in Table.from\_pandas
* ARROW-2682 - [CI] Notify in Slack about broken builds
* ARROW-2689 - [Python] Remove references to timestamps\_to\_ms argument from documentation
* ARROW-2692 - [Python] Add test for writing dictionary encoded columns to chunked Parquet files
* ARROW-2695 - [Python] Prevent calling scalar contructors directly
* ARROW-2696 - [JAVA] enhance AllocationListener with an onFailedAllocation() call
* ARROW-2700 - [Python] Add simple examples to Array.cast docstring
* ARROW-2704 - [Java] IPC stream handling should be more friendly to low level processing
* ARROW-2713 - [Packaging] Fix linux package builds
* ARROW-2724 - [Packaging] Determine whether all the expected artifacts are uploaded
* ARROW-2725 - [JAVA] make Accountant.AllocationOutcome publicly visible
* ARROW-2731 - Allow usage of external ORC library
* ARROW-2732 - Update brew packages for macOS
* ARROW-2733 - [GLib] Cast garrow\_decimal128 to gint64
* ARROW-2738 - [GLib] Use Brewfile on installation process
* ARROW-2739 - [GLib] Use G\_DECLARE\_DERIVABLE\_TYPE for GArrowDecimalDataType and GArrowDecimal128ArrayBuilder
* ARROW-2740 - [Python] Add address property to Buffer
* ARROW-2742 - [Python] Allow Table.from\_batches to use Iterator of ArrowRecordBatches
* ARROW-2748 - [GLib] Add garrow\_decimal\_data\_type\_get\_scale() (and \_precision())
* ARROW-2749 - [GLib] Rename \*garrow\_decimal128\_array\_get\_value to \*garrow\_decimal128\_array\_format\_value
* ARROW-2752 - [GLib] Document garrow\_decimal\_data\_type\_new()
* ARROW-2755 - [Python] Allow using Ninja to build extension
* ARROW-2756 - [Python] Remove redundant imports and minor fixes in parquet tests
* ARROW-2758 - [Plasma] Use Scope enum in Plasma
* ARROW-2760 - [Python] Remove legacy property definition syntax from parquet module and test them
* ARROW-2761 - Support set filter operators on Hive partitioned Parquet files
* ARROW-2763 - [Python] Make parquet \_metadata file accessible from ParquetDataset
* ARROW-2780 - [Go] Run code coverage analysis
* ARROW-2794 - [Plasma] Add Delete method for multiple objects
* ARROW-2798 - [Plasma] Use hashing function that takes into account all UniqueID bytes
* ARROW-2802 - [Docs] Move release management guide to project wiki
* ARROW-2804 - [Website] Link to Developer wiki (Confluence) from front page
* ARROW-2805 - [Python] TensorFlow import workaround not working with tensorflow-gpu if CUDA is not installed
* ARROW-2809 - [C++] Decrease verbosity of lint checks in Travis CI
* ARROW-2811 - [Python] Test serialization for determinism
* ARROW-2815 - [CI] Suppress DEBUG logging when building Java library in C++ CI entries
* ARROW-2816 - [Python] Add \_\_iter\_\_ method to NativeFile
* ARROW-2821 - [C++] Only zero memory in BooleanBuilder in one place
* ARROW-2822 - [C++] Zero padding bytes in PoolBuffer::Resize
* ARROW-2827 - [C++] LZ4 and Zstd build may be failed in parallel build
* ARROW-2829 - [GLib] Add GArrowORCFileReader
* ARROW-2830 - [Packaging] Enable parallel build for deb package build again
* ARROW-2833 - [Python] Column.\_\_repr\_\_ will lock up Jupyter with large datasets
* ARROW-2834 - [GLib] Remove "enable\_" prefix from Meson options
* ARROW-2838 - [Python] Speed up null testing with Pandas semantics
* ARROW-2844 - [Packaging] Test OSX wheels after build
* ARROW-2847 - [Packaging] Fix artifact name matching for conda forge packages
* ARROW-2848 - [Packaging] lib\*.deb package name doesn't match so version
* ARROW-2849 - [Ruby] Arrow::Table#load supports ORC
* ARROW-2859 - [Python] Handle objects exporting the buffer protocol in open\_stream, open\_file, and RecordBatch\*Reader APIs
* ARROW-2861 - [Python] Add extra tips about using Parquet to store index-less pandas data
* ARROW-2864 - [Plasma] Add deletion cache to delete objects later
* ARROW-2869 - [Python] Add documentation for Array.to\_numpy
* ARROW-2886 - [Release] An unused variable exists
* ARROW-2890 - [Plasma] Make Python PlasmaClient.release private
* ARROW-2893 - [C++] Remove PoolBuffer class from public API and hide implementation details behind factory functions
* ARROW-2897 - Organize supported Ubuntu versions
* ARROW-2906 - [Website] Remove the link to slack channel
* ARROW-2907 - [GitHub] Improve "How to contribute patches"
* ARROW-2914 - [Integration] Add WindowPandasUDFTests to Spark Integration
* ARROW-2918 - [C++] Improve formatting of Struct pretty prints
* ARROW-2921 - [Release] Update .deb/.rpm changelos in preparation
* ARROW-2922 - [Release] Make python command name customizable
* ARROW-2923 - [Doc] Add instructions for running Spark integration tests
* ARROW-2937 - [Java] Follow-up changes to ARROW-2704
* ARROW-2943 - [C++] Implement BufferedOutputStream::Flush
* ARROW-2946 - [Packaging] Stop to use PWD in debian/rules
* ARROW-2947 - [Packaging] Remove Ubuntu Artful
* ARROW-2949 - [CI] repo.continuum.io can be flaky in builds
* ARROW-2951 - [CI] Changes in format/ should cause Appveyor builds to run
* ARROW-2953 - [Plasma] Store memory usage
* ARROW-2954 - [Plasma] Store object\_id only once in object table
* ARROW-2985 - [Ruby] Run unit tests in verify-release-candidate.sh
* ARROW-2988 - [Release] More automated release verification on Windows
* ARROW-2990 - [GLib] Fail to build with rpath-ed Arrow C++ on macOS
* ARROW-889 - [C++] Implement arrow::PrettyPrint for ChunkedArray
* ARROW-906 - [C++] Serialize Field metadata to IPC metadata
## New Feature
* ARROW-1018 - [C++] Add option to create FileOutputStream, ReadableFile from OS file descriptor
* ARROW-1163 - [Plasma][Java] Java client for Plasma
* ARROW-1388 - [Python] Add Table.drop method for removing columns
* ARROW-1715 - [Python] Implement pickling for Column, ChunkedArray, RecordBatch, Table
* ARROW-1780 - [Java] JDBC Adapter for Apache Arrow
* ARROW-1964 - [Python] Expose Builder classes
* ARROW-2207 - [GLib] Support decimal type
* ARROW-2267 - Rust bindings
* ARROW-2299 - [Go] Go language implementation
* ARROW-2319 - [C++] Add buffered output class implementing OutputStream interface
* ARROW-2330 - [C++] Optimize delta buffer creation with partially finishable array builders
* ARROW-2344 - [Go] Run Go unit tests in Travis CI
* ARROW-2361 - [Rust] Start native Rust Implementation
* ARROW-2381 - [Rust] Buffer<T> should have an Iterator
* ARROW-2385 - [Rust] Implement to\_json() for Field and DataType
* ARROW-2398 - [Rust] Provide a zero-copy builder for type-safe Buffer<T>
* ARROW-2401 - Support filters on Hive partitioned Parquet files
* ARROW-2407 - [GLib] Add garrow\_string\_array\_builder\_append\_values()
* ARROW-2408 - [Rust] It should be possible to get a &mut[T] from Builder<T>
* ARROW-2440 - [Rust] Implement ListBuilder<T>
* ARROW-2482 - [Rust] support nested types
* ARROW-2493 - [Python] Add support for pickling to buffers and arrays
* ARROW-2537 - [Ruby] Import
* ARROW-2576 - [GLib] Add abs functions for Decimal128.
* ARROW-2604 - [Java] Add method overload for VarCharVector.set(int,String)
* ARROW-2608 - [Java/Python] Add pyarrow.{Array,Field}.from\_jvm / jvm\_buffer
* ARROW-2613 - [Docs] Update the gen\_apidocs docker script
* ARROW-2661 - [Python/C++] Allow passing HDFS Config values via map/dict instead of needing an hdfs-site.xml file
* ARROW-2699 - [C++/Python] Add Table method that replaces a column with a new supplied column
* ARROW-2701 - [C++] Make MemoryMappedFile resizable
* ARROW-2729 - [GLib] Add decimal128 array builder
* ARROW-2751 - [GLib] Add garrow\_table\_replace\_column()
* ARROW-2753 - [GLib] Add garrow\_schema\_\*\_field()
* ARROW-2784 - [C++] MemoryMappedFile::WriteAt allow writing past the end
* ARROW-2790 - [C++] Buffers contain uninitialized memory
* ARROW-2824 - [GLib] Add garrow\_decimal128\_array\_get\_value()
* ARROW-2881 - [Website] Add Community tab to website
* ARROW-530 - C++/Python: Provide subpools for better memory allocation tracking
* ARROW-564 - [Python] Add methods to return vanilla NumPy arrays (plus boolean mask array if there are nulls)
## Sub-task
* ARROW-1868 - [Java] Change vector getMinorType to use MinorType instead of Types.MinorType
* ARROW-1913 - [Java] Fix Javadoc generation bugs with JDK8
* ARROW-2416 - [C++] Support system libprotobuf
* ARROW-2494 - Return status codes from PlasmaClient::Seal
* ARROW-2498 - [Java] Upgrade to JDK 1.8
* ARROW-2717 - [Packaging] Postfix conda artifacts with target arch
* ARROW-2718 - [Packaging] GPG sign downloaded artifacts
## Task
* ARROW-2055 - [Java] Upgrade to Java 8
* ARROW-2334 - [C++] Update boost to 1.66.0
* ARROW-2343 - [Java/Packaging] Run mvn clean in API doc builds
* ARROW-2345 - [Documentation] Fix bundle exec and set sphinx nosidebar to True
* ARROW-2353 - Test correctness of built wheel on AppVeyor
* ARROW-2464 - [Python] Use a python\_version marker instead of a condition
* ARROW-2477 - [Rust] Set up code coverage in CI
* ARROW-2543 - [Rust] CI should cache dependencies for faster builds
* ARROW-2562 - [C++] Upload coverage data to codecov.io
* ARROW-2566 - [CI] Add codecov.io badge to README
* ARROW-2614 - [CI] Remove 'group: deprecated' in Travis
* ARROW-2791 - [Packaging] Build Ubuntu 18.04 packages
* ARROW-2792 - [Packaging] Consider uploading tarballs to avoid naming conflicts
* ARROW-2836 - [Packaging] Expand build matrices to multiple tasks
* ARROW-2837 - [C++] ArrayBuilder::null\_bitmap returns PoolBuffer
* ARROW-2845 - [Packaging] Upload additional debian artifacts
* ARROW-2846 - [Packaging] Update nightly build in crossbow as well as the sample configuration
* ARROW-2855 - [C++] Blog post that outlines the benefits of using jemalloc
* ARROW-2868 - [Packaging] Fix centos-7 build
* ARROW-2875 - [Packaging] Don't attempt to download arrow archive in linux builds
* ARROW-2884 - [Packaging] Options to build packages from apache source archive
* ARROW-2898 - [Packaging] Setuptools\_scm just shipped a new version which fails to parse \`apache-arrow-<version>\` tag
* ARROW-2908 - [Rust] Update version to 0.10.0
* ARROW-2915 - [Packaging] Remove artifact form ubuntu-trusty build
* ARROW-2924 - [Java] mvn release fails when an older maven javadoc plugin is installed
* ARROW-2927 - [Packaging] AppVeyor wheel task is failing on initial checkout
* ARROW-2928 - [Packaging] AppVeyor crossbow conda builds are picking up boost 1.63.0 instead of the installed version
* ARROW-2929 - [C++] ARROW-2826 Breaks parquet-cpp 1.4.0 builds
* ARROW-2934 - [Packaging] Add checksums creation to sign subcommand
* ARROW-2935 - [Packaging] Add verify\_binary\_artifacts function to verify-release-candidate.sh
* ARROW-2944 - [Format] Arrow columnar format docs mentions VectorLayout that does not exist anymore
* ARROW-2962 - [Packaging] Bintray descriptor files are no longer needed
* ARROW-2977 - [Packaging] Release verification script should check rust too
## Test
* ARROW-2557 - [Rust] Add badge for code coverage in README
* ARROW-2895 - [Ruby] CI isn't ran when C++ is changed
* ARROW-2896 - [GLib] export are missing
## Wish
* ARROW-2286 - [Python] Allow subscripting pyarrow.lib.StructValue
* ARROW-2364 - [Plasma] PlasmaClient::Get() could take vector of object ids
* ARROW-2389 - [C++] Add StatusCode::OverflowError
* ARROW-2390 - [C++/Python] CheckPyError() could inspect exception type
* ARROW-2479 - [C++] Have a global thread pool
* ARROW-2499 - [C++] Add iterator facility for Python sequences
* ARROW-2505 - [C++] Disable MSVC warning C4800
* ARROW-2660 - [Python] Experiment with zero-copy pickling
* ARROW-2825 - [C++] Need AllocateBuffer / AllocateResizableBuffer variant with default memory pool
* ARROW-2826 - [C++] Clarification needed between ArrayBuilder::Init(), Resize() and Reserve()
* ARROW-902 - [C++] Build C++ project including thirdparty dependencies from local tarballs
# Apache Arrow 0.9.0 (16 March 2018)
## Bug
* ARROW-1345 - [Python] Conversion from nested NumPy arrays fails on integers other than int64, float32
* ARROW-1646 - [Python] pyarrow.array cannot handle NumPy scalar types
* ARROW-1856 - [Python] Auto-detect Parquet ABI version when using PARQUET\_HOME
* ARROW-1909 - [C++] Bug: Build fails on windows with "-DARROW\_BUILD\_BENCHMARKS=ON"
* ARROW-1912 - [Website] Add org affiliations to committers.html
* ARROW-1919 - Plasma hanging if object id is not 20 bytes
* ARROW-1924 - [Python] Bring back pickle=True option for serialization
* ARROW-1933 - [GLib] Build failure with --with-arrow-cpp-build-dir and GPU enabled Arrow C++
* ARROW-1940 - [Python] Extra metadata gets added after multiple conversions between pd.DataFrame and pa.Table
* ARROW-1941 - Table <–> DataFrame roundtrip failing
* ARROW-1943 - Handle setInitialCapacity() for deeply nested lists of lists
* ARROW-1944 - FindArrow has wrong ARROW\_STATIC\_LIB
* ARROW-1945 - [C++] Fix doxygen documentation of array.h
* ARROW-1946 - Add APIs to decimal vector for writing big endian data
* ARROW-1948 - [Java] ListVector does not handle ipc with all non-null values with none set
* ARROW-1950 - [Python] pandas\_type in pandas metadata incorrect for List types
* ARROW-1953 - [JS] JavaScript builds broken on master
* ARROW-1958 - [Python] Error in pandas conversion for datetimetz row index
* ARROW-1961 - [Python] Writing Parquet file with flavor='spark' loses pandas schema metadata
* ARROW-1966 - [C++] Support JAVA\_HOME paths in HDFS libjvm loading that include the jre directory
* ARROW-1971 - [Python] Add pandas serialization to the default
* ARROW-1972 - Deserialization of buffer objects (and pandas dataframes) segfaults on different processes.
* ARROW-1973 - [Python] Memory leak when converting Arrow tables with array columns to Pandas dataframes.
* ARROW-1976 - [Python] Handling unicode pandas columns on parquet.read\_table
* ARROW-1979 - [JS] JS builds handing in es2015:umd tests
* ARROW-1980 - [Python] Race condition in `write\_to\_dataset`
* ARROW-1982 - [Python] Return parquet statistics min/max as values instead of strings
* ARROW-1991 - [GLib] Docker-based documentation build is broken
* ARROW-1992 - [Python] to\_pandas crashes when using strings\_to\_categoricals on empty string cols on 0.8.0
* ARROW-1997 - [Python] to\_pandas with strings\_to\_categorical fails
* ARROW-1998 - [Python] Table.from\_pandas crashes when data frame is empty
* ARROW-1999 - [Python] from\_numpy\_dtype returns wrong types
* ARROW-2000 - Deduplicate file descriptors when plasma store replies to get request.
* ARROW-2002 - use pyarrow download file will raise queue.Full exceptions sometimes
* ARROW-2003 - [Python] Do not use deprecated kwarg in pandas.core.internals.make\_block
* ARROW-2005 - [Python] pyflakes warnings on Cython files not failing build
* ARROW-2008 - [Python] Type inference for int32 NumPy arrays (expecting list<int32>) returns int64 and then conversion fails
* ARROW-2010 - [C++] Compiler warnings with CHECKIN warning level in ORC adapter
* ARROW-2017 - Array initialization with large (>2**31-1) uint64 values fails
* ARROW-2023 - [C++] Test opening IPC stream reader or file reader on an empty InputStream
* ARROW-2025 - [Python/C++] HDFS Client disconnect closes all open clients
* ARROW-2029 - [Python] Program crash on `HdfsFile.tell` if file is closed
* ARROW-2032 - [C++] ORC ep installs on each call to ninja build (even if no work to do)
* ARROW-2033 - pa.array() doesn't work with iterators
* ARROW-2039 - [Python] pyarrow.Buffer().to\_pybytes() segfaults
* ARROW-2040 - [Python] Deserialized Numpy array must keep ref to underlying tensor
* ARROW-2047 - [Python] test\_serialization.py uses a python executable in PATH rather than that used for a test run
* ARROW-2049 - ARROW-2049: [Python] Use python -m cython to run Cython, instead of CYTHON\_EXECUTABLE
* ARROW-2062 - [C++] Stalled builds in test\_serialization.py in Travis CI
* ARROW-2070 - [Python] chdir logic in setup.py buggy
* ARROW-2072 - [Python] decimal128.byte\_width crashes
* ARROW-2080 - [Python] Update documentation after ARROW-2024
* ARROW-2085 - HadoopFileSystem.isdir and .isfile should return False if the path doesn't exist
* ARROW-2106 - [Python] pyarrow.array can't take a pandas Series of python datetime objects.
* ARROW-2109 - [C++] Boost 1.66 compilation fails on Windows on linkage stage
* ARROW-2124 - [Python] ArrowInvalid raised if the first item of a nested list of numpy arrays is empty
* ARROW-2128 - [Python] Cannot serialize array of empty lists
* ARROW-2129 - [Python] Segmentation fault on conversion of empty array to Pandas
* ARROW-2131 - [Python] Serialization test fails on Windows when library has been built in place / not installed
* ARROW-2133 - [Python] Segmentation fault on conversion of empty nested arrays to Pandas
* ARROW-2135 - [Python] NaN values silently casted to int64 when passing explicit schema for conversion in Table.from\_pandas
* ARROW-2145 - [Python] Decimal conversion not working for NaN values
* ARROW-2150 - [Python] array equality defaults to identity
* ARROW-2151 - [Python] Error when converting from list of uint64 arrays
* ARROW-2153 - [C++/Python] Decimal conversion not working for exponential notation
* ARROW-2157 - [Python] Decimal arrays cannot be constructed from Python lists
* ARROW-2160 - [C++/Python] Fix decimal precision inference
* ARROW-2161 - [Python] Skip test\_cython\_api if ARROW\_HOME isn't defined
* ARROW-2162 - [Python/C++] Decimal Values with too-high precision are multiplied by 100
* ARROW-2167 - [C++] Building Orc extensions fails with the default BUILD\_WARNING\_LEVEL=Production
* ARROW-2170 - [Python] construct\_metadata fails on reading files where no index was preserved
* ARROW-2171 - [Python] OwnedRef is fragile
* ARROW-2172 - [Python] Incorrect conversion from Numpy array when stride % itemsize != 0
* ARROW-2173 - [Python] NumPyBuffer destructor should hold the GIL
* ARROW-2175 - [Python] arrow\_ep build is triggering during parquet-cpp build in Travis CI
* ARROW-2178 - [JS] Fix JS html FileReader example
* ARROW-2179 - [C++] arrow/util/io-util.h missing from libarrow-dev
* ARROW-2192 - Commits to master should run all builds in CI matrix
* ARROW-2209 - [Python] Partition columns are not correctly loaded in schema of ParquetDataset
* ARROW-2210 - [C++] TestBuffer\_ResizeOOM has a memory leak with jemalloc
* ARROW-2212 - [C++/Python] Build Protobuf in base manylinux 1 docker image
* ARROW-2223 - [JS] installing umd release throws an error
* ARROW-2227 - [Python] Table.from\_pandas does not create chunked\_arrays.
* ARROW-2230 - [Python] JS version number is sometimes picked up
* ARROW-2232 - [Python] pyarrow.Tensor constructor segfaults
* ARROW-2234 - [JS] Read timestamp low bits as Uint32s
* ARROW-2240 - [Python] Array initialization with leading numpy nan fails with exception
* ARROW-2244 - [C++] Slicing NullArray should not cause the null count on the internal data to be unknown
* ARROW-2245 - [Python] Revert static linkage of parquet-cpp in manylinux1 wheel
* ARROW-2246 - [Python] Use namespaced boost in manylinux1 package
* ARROW-2251 - [GLib] Destroying GArrowBuffer while GArrowTensor that uses the buffer causes a crash
* ARROW-2254 - [Python] Local in-place dev versions picking up JS tags
* ARROW-2258 - [C++] Appveyor builds failing on master
* ARROW-2263 - [Python] test\_cython.py fails if pyarrow is not in import path (e.g. with inplace builds)
* ARROW-2265 - [Python] Serializing subclasses of np.ndarray returns a np.ndarray.
* ARROW-2268 - Remove MD5 checksums from release process
* ARROW-2269 - [Python] Cannot build bdist\_wheel for Python
* ARROW-2270 - [Python] ForeignBuffer doesn't tie Python object lifetime to C++ buffer lifetime
* ARROW-2272 - [Python] test\_plasma spams /tmp
* ARROW-2275 - [C++] Buffer::mutable\_data\_ member uninitialized
* ARROW-2280 - [Python] pyarrow.Array.buffers should also include the offsets
* ARROW-2284 - [Python] test\_plasma error on plasma\_store error
* ARROW-2288 - [Python] slicing logic defective
* ARROW-2297 - [JS] babel-jest is not listed as a dev dependency
* ARROW-2304 - [C++] MultipleClients test in io-hdfs-test fails on trunk
* ARROW-2306 - [Python] HDFS test failures
* ARROW-2307 - [Python] Unable to read arrow stream containing 0 record batches
* ARROW-2311 - [Python] Struct array slicing defective
* ARROW-2312 - [JS] verify-release-candidate-sh must be updated to include JS in integration tests
* ARROW-2313 - [GLib] Release builds must define NDEBUG
* ARROW-2316 - [C++] Revert Buffer::mutable\_data member to always inline
* ARROW-2318 - [C++] TestPlasmaStore.MultipleClientTest is flaky (hangs) in release builds
* ARROW-2320 - [C++] Vendored Boost build does not build regex library
## Improvement
* ARROW-1021 - [Python] Add documentation about using pyarrow from other Cython and C++ projects
* ARROW-1035 - [Python] Add ASV benchmarks for streaming columnar deserialization
* ARROW-1463 - [JAVA] Restructure ValueVector hierarchy to minimize compile-time generated code
* ARROW-1579 - [Java] Add dockerized test setup to validate Spark integration
* ARROW-1580 - [Python] Instructions for setting up nightly builds on Linux
* ARROW-1623 - [C++] Add convenience method to construct Buffer from a string that owns its memory
* ARROW-1632 - [Python] Permit categorical conversions in Table.to\_pandas on a per-column basis
* ARROW-1643 - [Python] Accept hdfs:// prefixes in parquet.read\_table and attempt to connect to HDFS
* ARROW-1706 - [Python] StructArray.from\_arrays should handle sequences that are coercible to arrays
* ARROW-1712 - [C++] Add method to BinaryBuilder to reserve space for value data
* ARROW-1835 - [C++] Create Arrow schema from std::tuple types
* ARROW-1861 - [Python] Fix up ASV setup, add developer instructions for writing new benchmarks and running benchmark suite locally
* ARROW-1872 - [Website] Populate hard-coded fields for current release from a YAML file
* ARROW-1927 - [Plasma] Implement delete function
* ARROW-1929 - [C++] Move various Arrow testing utility code from Parquet to Arrow codebase
* ARROW-1937 - [Python] Add documentation for different forms of constructing nested arrays from Python data structures
* ARROW-1942 - [C++] Hash table specializations for small integers
* ARROW-1947 - [Plasma] Change Client Create and Get to use Buffers
* ARROW-1951 - Add memcopy\_threads to serialization context
* ARROW-1962 - [Java] Add reset() to ValueVector interface
* ARROW-1969 - [C++] Do not build ORC adapter by default
* ARROW-1977 - [C++] Update windows dev docs
* ARROW-1978 - [Website] Add more visible link to "Powered By" page to front page, simplify Powered By
* ARROW-2004 - [C++] Add shrink\_to\_fit option in BufferBuilder::Resize
* ARROW-2007 - [Python] Sequence converter for float32 not implemented
* ARROW-2011 - Allow setting the pickler to use in pyarrow serialization.
* ARROW-2012 - [GLib] Support "make distclean"
* ARROW-2018 - [C++] Build instruction on macOS and Homebrew is incomplete
* ARROW-2019 - Control the memory allocated for inner vector in LIST
* ARROW-2024 - [Python] Remove global SerializationContext variables
* ARROW-2028 - [Python] extra\_cmake\_args needs to be passed through shlex.split
* ARROW-2031 - HadoopFileSystem isn't pickleable
* ARROW-2035 - [C++] Update vendored cpplint.py to a Py3-compatible one
* ARROW-2036 - NativeFile should support standard IOBase methods
* ARROW-2042 - [Plasma] Revert API change of plasma::Create to output a MutableBuffer
* ARROW-2043 - [C++] Change description from OS X to macOS
* ARROW-2046 - [Python] Add support for PEP519 - pathlib and similar objects
* ARROW-2048 - [Python/C++] Upate Thrift pin to 0.11
* ARROW-2050 - Support `setup.py pytest` to automatically fetch the test dependencies
* ARROW-2064 - [GLib] Add common build problems link to the install section
* ARROW-2065 - Fix bug in SerializationContext.clone().
* ARROW-2068 - [Python] Expose Array's buffers to Python users
* ARROW-2069 - [Python] Document that Plasma is not (yet) supported on Windows
* ARROW-2071 - [Python] Reduce runtime of builds in Travis CI
* ARROW-2073 - [Python] Create StructArray from sequence of tuples given a known data type
* ARROW-2076 - [Python] Display slowest test durations
* ARROW-2083 - Support skipping builds
* ARROW-2084 - [C++] Support newer Brotli static library names
* ARROW-2086 - [Python] Shrink size of arrow\_manylinux1\_x86\_64\_base docker image
* ARROW-2087 - [Python] Binaries of 3rdparty are not stripped in manylinux1 base image
* ARROW-2088 - [GLib] Add GArrowNumericArray
* ARROW-2089 - [GLib] Rename to GARROW\_TYPE\_BOOLEAN for consistency
* ARROW-2090 - [Python] Add context manager methods to ParquetWriter
* ARROW-2093 - [Python] Possibly do not test pytorch serialization in Travis CI
* ARROW-2094 - [Python] Use toolchain libraries and PROTOBUF\_HOME for protocol buffers
* ARROW-2095 - [C++] Suppress ORC EP build logging by default
* ARROW-2096 - [C++] Turn off Boost\_DEBUG to trim build output
* ARROW-2099 - [Python] Support DictionaryArray::FromArrays in Python bindings
* ARROW-2107 - [GLib] Follow arrow::gpu::CudaIpcMemHandle API change
* ARROW-2110 - [Python] Only require pytest-runner on test commands
* ARROW-2111 - [C++] Linting could be faster
* ARROW-2117 - [C++] Pin clang to version 5.0
* ARROW-2118 - [Python] Improve error message when calling parquet.read\_table on an empty file
* ARROW-2120 - Add possibility to use empty \_MSVC\_STATIC\_LIB\_SUFFIX for Thirdparties
* ARROW-2121 - [Python] Consider special casing object arrays in pandas serializers.
* ARROW-2132 - [Doc] Add links / mentions of Plasma store to main README
* ARROW-2137 - [Python] Don't print paths that are ignored when reading Parquet files
* ARROW-2138 - [C++] Have FatalLog abort instead of exiting
* ARROW-2142 - [Python] Conversion from Numpy struct array unimplemented
* ARROW-2143 - [Python] Provide a manylinux1 wheel for cp27m
* ARROW-2146 - [GLib] Implement Slice for ChunkedArray
* ARROW-2154 - [Python] \_\_eq\_\_ unimplemented on Buffer
* ARROW-2155 - [Python] pa.frombuffer(bytearray) returns immutable Buffer
* ARROW-2163 - Install apt dependencies separate from built-in Travis commands, retry on flakiness
* ARROW-2168 - [C++] Build toolchain builds with jemalloc
* ARROW-2169 - [C++] MSVC is complaining about uncaptured variables
* ARROW-2174 - [JS] Export format and schema enums
* ARROW-2177 - [C++] Remove support for specifying negative scale values in DecimalType
* ARROW-2180 - [C++] Remove APIs deprecated in 0.8.0 release
* ARROW-2181 - [Python] Add concat\_tables to API reference, add documentation on use
* ARROW-2184 - [C++] Add static constructor for FileOutputStream returning shared\_ptr to base OutputStream
* ARROW-2185 - Remove CI directives from squashed commit messages
* ARROW-2191 - [C++] Only use specific version of jemalloc
* ARROW-2198 - [Python] Docstring for parquet.read\_table is misleading or incorrect
* ARROW-2199 - [JAVA] Follow up fixes for ARROW-2019. Ensure density driven capacity is never less than 1 and propagate density throughout the vector tree
* ARROW-2203 - [C++] StderrStream class
* ARROW-2204 - [C++] Build fails with TLS error on parquet-cpp clone
* ARROW-2206 - [JS] Add Perspective as a community project
* ARROW-2218 - [Python] PythonFile should infer mode when not given
* ARROW-2231 - [CI] Use clcache on AppVeyor
* ARROW-2238 - [C++] Detect clcache in cmake configuration
* ARROW-2250 - plasma\_store process should cleanup on INT and TERM signals
* ARROW-2261 - [GLib] Can't share the same memory in GArrowBuffer safely
* ARROW-2279 - [Python] Better error message if lib cannot be found
* ARROW-2282 - [Python] Create StringArray from buffers
* ARROW-2283 - [C++] Support Arrow C++ installed in /usr detection by pkg-config
* ARROW-2289 - [GLib] Add Numeric, Integer and FloatingPoint data types
* ARROW-2291 - [C++] README missing instructions for libboost-regex-dev
* ARROW-2292 - [Python] More consistent / intuitive name for pyarrow.frombuffer
* ARROW-2321 - [C++] Release verification script fails with if CMAKE\_INSTALL\_LIBDIR is not $ARROW\_HOME/lib
* ARROW-764 - [C++] Improve performance of CopyBitmap, add benchmarks
## New Feature
* ARROW-1394 - [Plasma] Add optional extension for allocating memory on GPUs
* ARROW-1705 - [Python] Create StructArray from sequence of dicts given a known data type
* ARROW-1757 - [C++] Add DictionaryArray::FromArrays alternate ctor that can check or sanitized "untrusted" indices
* ARROW-1832 - [JS] Implement JSON reader for integration tests
* ARROW-1920 - Add support for reading ORC files
* ARROW-1926 - [GLib] Add garrow\_timestamp\_data\_type\_get\_unit()
* ARROW-1930 - [C++] Implement Slice for ChunkedArray and Column
* ARROW-1931 - [C++] w4996 warning due to std::tr1 failing builds on Visual Studio 2017
* ARROW-1965 - [GLib] Add garrow\_array\_builder\_get\_value\_data\_type() and garrow\_array\_builder\_get\_value\_type()
* ARROW-1970 - [GLib] Add garrow\_chunked\_array\_get\_value\_data\_type() and garrow\_chunked\_array\_get\_value\_type()
* ARROW-2166 - [GLib] Implement Slice for Column
* ARROW-2176 - [C++] Extend DictionaryBuilder to support delta dictionaries
* ARROW-2190 - [GLib] Add add/remove field functions for RecordBatch.
* ARROW-2205 - [Python] Option for integer object nulls
* ARROW-2252 - [Python] Create buffer from address, size and base
* ARROW-2253 - [Python] Support \_\_eq\_\_ on scalar values
* ARROW-2262 - [Python] Support slicing on pyarrow.ChunkedArray
* ARROW-232 - C++/Parquet: Support writing chunked arrays as part of a table
* ARROW-633 - [Java] Add support for FixedSizeBinary type
* ARROW-634 - Add integration tests for FixedSizeBinary
* ARROW-969 - [C++/Python] Add add/remove field functions for RecordBatch
## Sub-task
* ARROW-1815 - [Java] Rename MapVector to StructVector
## Task
* ARROW-2052 - Unify OwnedRef and ScopedRef
* ARROW-2054 - Compilation warnings
* ARROW-2108 - [Python] Update instructions for ASV
* ARROW-2114 - [Python] Pull latest docker manylinux1 image
* ARROW-2123 - [JS] Upgrade to TS 2.7.1
* ARROW-2134 - [CI] Make Travis commit inspection more robust
* ARROW-2149 - [Python] reorganize test\_convert\_pandas.py
* ARROW-2156 - [CI] Isolate Sphinx dependencies
* ARROW-2197 - Document "undefined symbol" issue and workaround
* ARROW-2239 - [C++] Update build docs for Windows
* ARROW-2309 - [C++] Use std::make\_unsigned
## Test
* ARROW-1589 - [C++] Fuzzing for certain input formats
# Apache Arrow 0.8.0 (12 December 2017)
## Bug
* ARROW-1282 - Large memory reallocation by Arrow causes hang in jemalloc
* ARROW-1341 - [C++] Deprecate arrow::MakeTable in favor of new ctor from ARROW-1334
* ARROW-1347 - [JAVA] List null type should use consistent name for inner field
* ARROW-1398 - [Python] No support reading columns of type decimal(19,4)
* ARROW-1409 - [Format] Use for "page" attribute in Buffer in metadata
* ARROW-1540 - [C++] Fix valgrind warnings in cuda-test if possible
* ARROW-1541 - [C++] Race condition with arrow\_gpu
* ARROW-1543 - [C++] row\_wise\_conversion example doesn't correspond to ListBuilder constructor arguments
* ARROW-1555 - [Python] write\_to\_dataset on s3
* ARROW-1584 - [PYTHON] serialize\_pandas on empty dataframe
* ARROW-1585 - serialize\_pandas round trip fails on integer columns
* ARROW-1586 - [PYTHON] serialize\_pandas roundtrip loses columns name
* ARROW-1609 - Plasma: Build fails with Xcode 9.0
* ARROW-1615 - CXX flags for development more permissive than Travis CI builds
* ARROW-1617 - [Python] Do not use symlinks in python/cmake\_modules
* ARROW-1620 - Python: Download Boost in manylinux1 build from bintray
* ARROW-1624 - [C++] Follow up fixes / tweaks to compiler warnings for Plasma / LLVM 4.0, add to readme
* ARROW-1625 - [Serialization] Support OrderedDict properly
* ARROW-1629 - [C++] Fix problematic code paths identified by infer tool
* ARROW-1633 - [Python] numpy "unicode" arrays not understood
* ARROW-1640 - Resolve OpenSSL issues in Travis CI
* ARROW-1647 - [Plasma] Potential bug when reading/writing messages.
* ARROW-1653 - [Plasma] Use static cast to avoid compiler warning.
* ARROW-1656 - [C++] Endianness Macro is Incorrect on Windows And Mac
* ARROW-1657 - [C++] Multithreaded Read Test Failing on Arch Linux
* ARROW-1658 - [Python] Out of bounds dictionary indices causes segfault after converting to pandas
* ARROW-1663 - [Java] Follow up on ARROW-1347 and make schema backward compatible
* ARROW-1670 - [Python] Speed up deserialization code path
* ARROW-1672 - [Python] Failure to write Feather bytes column
* ARROW-1673 - [Python] NumPy boolean arrays get converted to uint8 arrays on NdarrayToTensor roundtrip
* ARROW-1676 - [C++] Correctly truncate oversized validity bitmaps when writing Feather format
* ARROW-1678 - [Python] Incorrect serialization of numpy.float16
* ARROW-1680 - [Python] Timestamp unit change not done in from\_pandas() conversion
* ARROW-1686 - Documentation generation script creates "apidocs" directory under site/java
* ARROW-1693 - [JS] Error reading dictionary-encoded integration test files
* ARROW-1695 - [Serialization] Fix reference counting of numpy arrays created in custom serialializer
* ARROW-1698 - [JS] File reader attempts to load the same dictionary batch more than once
* ARROW-1704 - [GLib] Go example in test suite is broken
* ARROW-1708 - [JS] Linter problem breaks master build
* ARROW-1709 - [C++] Decimal.ToString is incorrect for negative scale
* ARROW-1711 - [Python] flake8 checks still not failing builds
* ARROW-1714 - [Python] No named pd.Series name serialized as u'None'
* ARROW-1720 - [Python] Segmentation fault while trying to access an out-of-bound chunk
* ARROW-1723 - Windows: \_\_declspec(dllexport) specified when building arrow static library
* ARROW-1730 - [Python] Incorrect result from pyarrow.array when passing timestamp type
* ARROW-1732 - [Python] RecordBatch.from\_pandas fails on DataFrame with no columns when preserve\_index=False
* ARROW-1735 - [C++] Cast kernels cannot write into sliced output array
* ARROW-1738 - [Python] Wrong datetime conversion when pa.array with unit
* ARROW-1739 - [Python] Fix usages of assertRaises causing broken build
* ARROW-1742 - C++: clang-format is not detected correct on OSX anymore
* ARROW-1743 - [Python] Table to\_pandas fails when index contains categorical column
* ARROW-1745 - Compilation failure on Mac OS in plasma tests
* ARROW-1749 - [C++] Handle range of Decimal128 values that require 39 digits to be displayed
* ARROW-1751 - [Python] Pandas 0.21.0 introduces a breaking API change for MultiIndex construction
* ARROW-1754 - [Python] Fix buggy Parquet roundtrip when an index name is the same as a column name
* ARROW-1756 - [Python] Observed int32 overflow in Feather write/read path
* ARROW-1762 - [C++] unittest failure for language environment
* ARROW-1764 - [Python] Add -c conda-forge for Windows dev installation instructions
* ARROW-1766 - [GLib] Fix failing builds on OSX
* ARROW-1768 - [Python] Fix suppressed exception in ParquetWriter.\_\_del\_\_
* ARROW-1770 - [GLib] Fix GLib compiler warning
* ARROW-1771 - [C++] ARROW-1749 Breaks Public API test in parquet-cpp
* ARROW-1776 - [C++[ arrow::gpu::CudaContext::bytes\_allocated() isn't defined
* ARROW-1778 - [Python] Link parquet-cpp statically, privately in manylinux1 wheels
* ARROW-1781 - [CI] OSX Builds on Travis-CI time out often
* ARROW-1788 - Plasma store crashes when trying to abort objects for disconnected client
* ARROW-1791 - Integration tests generate date[DAY] values outside of reasonable range
* ARROW-1793 - [Integration] fix a typo for README.md
* ARROW-1800 - [C++] Fix and simplify random\_decimals
* ARROW-1805 - [Python] ignore non-parquet files when exploring dataset
* ARROW-1811 - [C++/Python] Rename all Decimal based APIs to Decimal128
* ARROW-1812 - Plasma store modifies hash table while iterating during client disconnect
* ARROW-1829 - [Plasma] Clean up eviction policy bookkeeping
* ARROW-1830 - [Python] Error when loading all the files in a dictionary
* ARROW-1836 - [C++] Fix C4996 warning from arrow/util/variant.h on MSVC builds
* ARROW-1840 - [Website] The installation command failed on Windows10 anaconda environment.
* ARROW-1845 - [Python] Expose Decimal128Type
* ARROW-1852 - [Plasma] Make retrieving manager file descriptor const
* ARROW-1853 - [Plasma] Fix off-by-one error in retry processing
* ARROW-1863 - [Python] PyObjectStringify could render bytes-like output for more types of objects
* ARROW-1865 - [C++] Adding a column to an empty Table fails
* ARROW-1869 - Fix typo in LowCostIdentityHashMap
* ARROW-1871 - [Python/C++] Appending Python Decimals with different scales requires rescaling
* ARROW-1873 - [Python] Segmentation fault when loading total 2GB of parquet files
* ARROW-1877 - Incorrect comparison in JsonStringArrayList.equals
* ARROW-1879 - [Python] Dask integration tests are not skipped if dask is not installed
* ARROW-1881 - [Python] setuptools\_scm picks up JS version tags
* ARROW-1882 - [C++] Reintroduce DictionaryBuilder
* ARROW-1883 - [Python] BUG: Table.to\_pandas metadata checking fails if columns are not present
* ARROW-1889 - [Python] --exclude is not available in older git versions
* ARROW-1890 - [Python] Masking for date32 arrays not working
* ARROW-1891 - [Python] NaT date32 values are only converted to nulls if from\_pandas is used
* ARROW-1892 - [Python] Unknown list item type: binary
* ARROW-1893 - [Python] test\_primitive\_serialization fails on Python 2.7.3
* ARROW-1895 - [Python] Add field\_name to pandas index metadata
* ARROW-1897 - [Python] Incorrect numpy\_type for pandas metadata of Categoricals
* ARROW-1904 - [C++] Deprecate PrimitiveArray::raw\_values
* ARROW-1906 - [Python] Creating a pyarrow.Array with timestamp of different unit is not casted
* ARROW-1908 - [Python] Construction of arrow table from pandas DataFrame with duplicate column names crashes
* ARROW-1910 - CPP README Brewfile link incorrect
* ARROW-1914 - [C++] make -j may fail to build with -DARROW\_GPU=on
* ARROW-1915 - [Python] Parquet tests should be optional
* ARROW-1916 - [Java] Do not exclude java/dev/checkstyle from source releases
* ARROW-1917 - [GLib] Must set GI\_TYPELIB\_PATH in verify-release-candidate.sh
* ARROW-226 - [C++] libhdfs: feedback to help determining cause of failure in opening file path
* ARROW-641 - [C++] Do not build/run io-hdfs-test if ARROW\_HDFS=off
## Improvement
* ARROW-1087 - [Python] add get\_include to expose directory containing header files
* ARROW-1134 - [C++] Allow C++/CLI projects to build with Arrow
* ARROW-1178 - [Python] Create alternative to Table.from\_pandas that yields a list of RecordBatch objects with a given chunk size
* ARROW-1226 - [C++] Improve / correct doxygen function documentation in arrow::ipc
* ARROW-1371 - [Website] Add "Powered By" page to the website
* ARROW-1455 - [Python] Add Dockerfile for validating Dask integration outside of usual CI
* ARROW-1488 - [C++] Implement ArrayBuilder::Finish in terms of internal::ArrayData
* ARROW-1498 - [GitHub] Add CONTRIBUTING.md and ISSUE\_TEMPLATE.md
* ARROW-1503 - [Python] Add serialization callbacks for pandas objects in pyarrow.serialize
* ARROW-1522 - [C++] Support pyarrow.Buffer as built-in type in pyarrow.serialize
* ARROW-1523 - [C++] Add helper data struct with methods for reading a validity bitmap possibly having a non-zero offset
* ARROW-1524 - [C++] More graceful solution for handling non-zero offsets on inputs and outputs in compute library
* ARROW-1525 - [C++] Change functions in arrow/compare.h to not return Status
* ARROW-1526 - [Python] Unit tests to exercise code path in PARQUET-1100
* ARROW-1535 - [Python] Enable sdist source tarballs to build assuming that Arrow C++ libraries are available on the host system
* ARROW-1538 - [C++] Support Ubuntu 14.04 in .deb packaging automation
* ARROW-1539 - [C++] Remove functions deprecated as of 0.7.0 and prior releases
* ARROW-1556 - [C++] Incorporate AssertArraysEqual function from PARQUET-1100 patch
* ARROW-1588 - [C++/Format] Harden Decimal Format
* ARROW-1593 - [PYTHON] serialize\_pandas should pass through the preserve\_index keyword
* ARROW-1594 - [Python] Enable multi-threaded conversions in Table.from\_pandas
* ARROW-1600 - [C++] Zero-copy Buffer constructor from std::string
* ARROW-1602 - [C++] Add IsValid/IsNotNull method to arrow::Array
* ARROW-1603 - [C++] Add BinaryArray method to get a value as a std::string
* ARROW-1604 - [Python] Support common type aliases in cast(...) and various type= arguments
* ARROW-1605 - [Python] pyarrow.array should be able to yield smaller integer types without an explicit cast
* ARROW-1607 - [C++] Implement DictionaryBuilder for Decimals
* ARROW-1613 - [Java] ArrowReader should not close the input ReadChannel
* ARROW-1616 - [Python] Add "write" method to RecordBatchStreamWriter that dispatches to write\_table/write\_back as appropriate
* ARROW-1626 - Add make targets to run the inter-procedural static analysis tool called "infer".
* ARROW-1627 - [JAVA] Reduce heap usage(Phase 2) - memory footprint in AllocationManager.BufferLedger
* ARROW-1630 - [Serialization] Support Python datetime objects
* ARROW-1635 - Add release management guide for PMCs
* ARROW-1641 - [C++] Do not include <mutex> in public headers
* ARROW-1651 - [JS] Lazy row accessor in Table
* ARROW-1652 - [JS] Separate Vector into BatchVector and CompositeVector
* ARROW-1654 - [Python] pa.DataType cannot be pickled
* ARROW-1662 - Move OSX Dependency management into brew bundle Brewfiles
* ARROW-1665 - [Serialization] Support more custom datatypes in the default serialization context
* ARROW-1666 - [GLib] Enable gtk-doc on Travis CI Mac environment
* ARROW-1671 - [C++] Change arrow::MakeArray to not return Status
* ARROW-1675 - [Python] Use RecordBatch.from\_pandas in FeatherWriter.write
* ARROW-1677 - [Blog] Add blog post on Ray and Arrow Python serialization
* ARROW-1679 - [GLib] Add garrow\_record\_batch\_reader\_read\_next()
* ARROW-1683 - [Python] Restore "TimestampType" to pyarrow namespace
* ARROW-1684 - [Python] Simplify user API for reading nested Parquet columns
* ARROW-1689 - [Python] Categorical Indices Should Be Zero-Copy
* ARROW-1691 - [Java] Conform Java Decimal type implementation to format decisions in ARROW-1588
* ARROW-1701 - [Serialization] Support zero copy PyTorch Tensor serialization
* ARROW-1702 - Update jemalloc in manylinux1 build
* ARROW-1703 - [C++] Vendor exact version of jemalloc we depend on
* ARROW-1707 - Update dev README after movement to GitBox
* ARROW-1716 - [Format/JSON] Use string integer value for Decimals in JSON
* ARROW-1721 - [Python] Support null mask in places where it isn't supported in numpy\_to\_arrow.cc
* ARROW-1724 - [Packaging] Support Ubuntu 17.10
* ARROW-1725 - [Packaging] Upload .deb for Ubuntu 17.10
* ARROW-1726 - [GLib] Add setup description to verify C GLib build
* ARROW-1727 - [Format] Expand Arrow streaming format to permit new dictionaries and deltas / additions to existing dictionaries
* ARROW-1728 - [C++] Run clang-format checks in Travis CI
* ARROW-1737 - [GLib] Use G\_DECLARE\_DERIVABLE\_TYPE
* ARROW-1746 - [Python] Add build dependencies for Arch Linux
* ARROW-1747 - [C++] Don't export symbols of statically linked libraries
* ARROW-1750 - [C++] Remove the need for arrow/util/random.h
* ARROW-1753 - [Python] Provide for matching subclasses with register\_type in serialization context
* ARROW-1755 - [C++] Add build options for MSVC to use static runtime libraries
* ARROW-1758 - [Python] Remove pickle=True option for object serialization
* ARROW-1763 - [Python] DataType should be hashable
* ARROW-1765 - [Doc] Use dependencies from conda in C++ docker build
* ARROW-1785 - [Format/C++/Java] Remove VectorLayout metadata from Flatbuffers metadata
* ARROW-1787 - [Python] Support reading parquet files into DataFrames in a backward compatible way
* ARROW-1794 - [C++/Python] Rename DecimalArray to Decimal128Array
* ARROW-1801 - [Docs] Update install instructions to use red-data-tools repos
* ARROW-1808 - [C++] Make RecordBatch interface virtual to permit record batches that lazy-materialize columns
* ARROW-1809 - [GLib] Use .xml instead of .sgml for GTK-Doc main file
* ARROW-1810 - [Plasma] Remove test shell scripts
* ARROW-1817 - Configure JsonFileReader to read NaN for floats
* ARROW-1826 - [JAVA] Avoid branching at cell level (copyFrom)
* ARROW-1828 - [C++] Implement hash kernel specialization for BooleanType
* ARROW-1834 - [Doc] Build documentation in separate build folders
* ARROW-1838 - [C++] Use compute::Datum uniformly for input argument to kernels
* ARROW-1841 - [JS] Update text-encoding-utf-8 and tslib for node ESModules support
* ARROW-1849 - [GLib] Add input checks to GArrowRecordBatch
* ARROW-1850 - [C++] Use const void* in Writable::Write instead of const uint8\_t*
* ARROW-1854 - [Python] Improve performance of serializing object dtype ndarrays
* ARROW-1855 - [GLib] Add workaround for build failure on macOS
* ARROW-1864 - [Java] Upgrade Netty to 4.1.x
* ARROW-1884 - [C++] Make JsonReader/JsonWriter classes internal APIs
* ARROW-1901 - [Python] Support recursive mkdir for DaskFilesystem
* ARROW-1902 - [Python] Remove mkdir race condition from write\_to\_dataset
* ARROW-1905 - [Python] Add more functions for checking exact types in pyarrow.types
* ARROW-1911 - Add Graphistry to Arrow JS proof points
* ARROW-905 - [Docs] Add Dockerfile for reproducible documentation generation
* ARROW-942 - Support integration testing on Python 2.7
* ARROW-950 - [Site] Add Google Analytics tag
## New Feature
* ARROW-1032 - [JS] Support custom\_metadata
* ARROW-1047 - [Java] Add generalized stream writer and reader interfaces that are decoupled from IO / message framing
* ARROW-1114 - [C++] Create Record Batch Builder class as a reusable and efficient way to transpose row-by-row data to columns
* ARROW-1250 - [Python] Define API for user type checking of array types
* ARROW-1482 - [C++] Implement casts between date32 and date64
* ARROW-1483 - [C++] Implement casts between time32 and time64
* ARROW-1484 - [C++] Implement (safe and unsafe) casts between timestamps and times of different units
* ARROW-1486 - [C++] Decide if arrow::RecordBatch needs to be copyable
* ARROW-1487 - [C++] Implement casts from List<A> to List<B>, where a cast function is defined from any A to B
* ARROW-1559 - [C++] Kernel implementations for "unique" (compute distinct elements of array)
* ARROW-1573 - [C++] Implement stateful kernel function that uses DictionaryBuilder to compute dictionary indices
* ARROW-1575 - [Python] Add pyarrow.column factory function
* ARROW-1577 - [JS] Package release script for NPM modules
* ARROW-1631 - [C++] Add GRPC to ThirdpartyToolchain.cmake
* ARROW-1637 - [C++] IPC round-trip for null type
* ARROW-1648 - C++: Add cast from Dictionary[NullType] to NullType
* ARROW-1649 - C++: Print number of nulls in PrettyPrint for NullArray
* ARROW-1667 - [GLib] Support Meson
* ARROW-1685 - [GLib] Add GArrowTableReader
* ARROW-1690 - [GLib] Add garrow\_array\_is\_valid()
* ARROW-1697 - [GitHub] Add ISSUE\_TEMPLATE.md
* ARROW-1718 - [Python] Implement casts from timestamp to date32/date64 and support in Array.from\_pandas
* ARROW-1734 - C++/Python: Add cast function on Column-level
* ARROW-1736 - [GLib] Add GArrowCastOptions:allow-time-truncate
* ARROW-1748 - [GLib] Add GArrowRecordBatchBuilder
* ARROW-1752 - [Packaging] Add GPU packages for Debian and Ubuntu
* ARROW-1767 - [C++] Support file reads and writes over 2GB on Windows
* ARROW-1772 - [C++] Add public-api-test module in style of parquet-cpp
* ARROW-1773 - [C++] Add casts from date/time types to compatible signed integers
* ARROW-1775 - Ability to abort created but unsealed Plasma objects
* ARROW-1777 - [C++] Add static ctor ArrayData::Make for nicer syntax in places
* ARROW-1782 - [Python] Expose compressors as pyarrow.compress, pyarrow.decompress
* ARROW-1783 - [Python] Convert SerializedPyObject to/from sequence of component buffers with minimal memory allocation / copying
* ARROW-1784 - [Python] Read and write pandas.DataFrame in pyarrow.serialize by decomposing the BlockManager rather than coercing to Arrow format
* ARROW-1802 - [GLib] Add Arrow GPU support
* ARROW-1806 - [GLib] Add garrow\_record\_batch\_writer\_write\_table()
* ARROW-1844 - [C++] Basic benchmark suite for hash kernels
* ARROW-1857 - [Python] Add switch for boost linkage with static parquet in wheels
* ARROW-1859 - [GLib] Add GArrowDictionaryDataType
* ARROW-1862 - [GLib] Add GArrowDictionaryArray
* ARROW-1874 - [GLib] Add garrow\_array\_unique()
* ARROW-1878 - [GLib] Add garrow\_array\_dictionary\_encode()
* ARROW-480 - [Python] Add accessors for Parquet column statistics
* ARROW-504 - [Python] Add adapter to write pandas.DataFrame in user-selected chunk size to streaming format
* ARROW-507 - [C++/Python] Construct List container from offsets and values subarrays
* ARROW-541 - [JS] Implement JavaScript-compatible implementation
* ARROW-571 - [Python] Add APIs to build Parquet files incrementally from Arrow tables
* ARROW-587 - Add JIRA fix version to merge tool
* ARROW-609 - [C++] Function for casting from days since UNIX epoch to int64 date
* ARROW-838 - [Python] Efficient construction of arrays from non-pandas 1D NumPy arrays
* ARROW-972 - [Python] Add test cases and basic APIs for UnionArray
## Sub-task
* ARROW-1471 - [JAVA] Document requirements and non/requirements for ValueVector updates
* ARROW-1472 - [JAVA] Design updated ValueVector Object Hierarchy
* ARROW-1473 - [JAVA] Create Prototype Code Hierarchy (Implementation Phase 1)
* ARROW-1474 - [JAVA] ValueVector hierarchy (Implementation Phase 2)
* ARROW-1476 - [JAVA] Implement final ValueVector updates
* ARROW-1710 - [Java] Remove non-nullable vectors in new vector class hierarchy
* ARROW-1717 - [Java] Remove public static helper method in vector classes for JSONReader/Writer
* ARROW-1719 - [Java] Remove accessor/mutator
* ARROW-1779 - [Java] Integration test breaks without zeroing out validity vectors
* ARROW-1819 - [Java] Remove legacy vector classes
* ARROW-1867 - [Java] Add BitVector APIs from old vector class
* ARROW-1885 - [Java] Restore previous MapVector class names
## Task
* ARROW-1369 - Support boolean types in the javascript arrow reader library
* ARROW-1818 - Examine Java Dependencies
* ARROW-1827 - [Java] Add checkstyle config file and header file
## Test
* ARROW-1549 - [JS] Integrate auto-generated Arrow test files
* ARROW-1821 - Add integration test case to explicitly check for optional validity buffer
* ARROW-1839 - [C++/Python] Add Decimal Parquet Read/Write Tests
# Apache Arrow 0.7.1 (27 September 2017)
## Bug
* ARROW-1497 - [Java] JsonFileReader doesn't set value count for some vectors
* ARROW-1500 - [C++] Result of ftruncate ignored in MemoryMappedFile::Create
* ARROW-1536 - [C++] Do not transitively depend on libboost\_system
* ARROW-1542 - [C++] Windows release verification script should not modify conda environment
* ARROW-1544 - [JS] Export Vector type definitions
* ARROW-1545 - Int64Builder should not need int64() as arg
* ARROW-1550 - [Python] Fix flaky test on Windows
* ARROW-1554 - [Python] Document that pip wheels depend on MSVC14 runtime
* ARROW-1557 - [PYTHON] pyarrow.Table.from\_arrays doesn't validate names length
* ARROW-1591 - C++: Xcode 9 is not correctly detected
* ARROW-1595 - [Python] Fix package dependency issues causing build failures
* ARROW-1601 - [C++] READ\_NEXT\_BITSET reads one byte past the last byte on last iteration
* ARROW-1606 - Python: Windows wheels don't include .lib files.
* ARROW-1610 - C++/Python: Only call python-prefix if the default PYTHON\_LIBRARY is not present
* ARROW-1611 - Crash in BitmapReader when length is zero
## Improvement
* ARROW-1537 - [C++] Support building with full path install\_name on macOS
* ARROW-1546 - [GLib] Support GLib 2.40 again
* ARROW-1578 - [C++/Python] Run lint checks in Travis CI to fail for linting issues as early as possible
* ARROW-1608 - Support Release verification script on macOS
* ARROW-1612 - [GLib] add how to install for mac os to README
## New Feature
* ARROW-1548 - [GLib] Support build append in builder
* ARROW-1592 - [GLib] Add GArrowUIntArrayBuilder
## Test
* ARROW-1529 - [GLib] Fix failure on macOS on Travis CI
## Wish
* ARROW-559 - Script to easily verify release in all languages
# Apache Arrow 0.7.0 (12 September 2017)
## Bug
* ARROW-1302 - C++: ${MAKE} variable not set sometimes on older MacOS installations
* ARROW-1354 - [Python] Segfault in Table.from\_pandas with Mixed-Type Categories
* ARROW-1357 - [Python] Data corruption in reading multi-file parquet dataset
* ARROW-1363 - [C++] IPC writer sends buffer layout for dictionary rather than indices
* ARROW-1365 - [Python] Remove usage of removed jemalloc\_memory\_pool in Python API docs
* ARROW-1373 - [Java] Implement get<type>Buffer() methods at the ValueVector interface
* ARROW-1375 - [C++] Visual Studio 2017 Appveyor builds failing
* ARROW-1379 - [Java] maven dependency issues - both unused and undeclared
* ARROW-1407 - Dictionaries can only hold a maximum of 4096 indices
* ARROW-1411 - [Python] Booleans in Float Columns cause Segfault
* ARROW-1414 - [GLib] Cast after status check
* ARROW-1421 - [Python] pyarrow.serialize cannot serialize a Python dict input
* ARROW-1426 - [Website] The title element of the top page is empty
* ARROW-1429 - [Python] Error loading parquet file with \_metadata from HDFS
* ARROW-1430 - [Python] flake8 warnings are not failing CI builds
* ARROW-1434 - [C++/Python] pyarrow.Array.from\_pandas does not support datetime64[D] arrays
* ARROW-1435 - [Python] PyArrow not propagating timezone information from Parquet to Python
* ARROW-1439 - [Packaging] Automate updating RPM in RPM build
* ARROW-1443 - [Java] Bug on ArrowBuf.setBytes with unsliced ByteBuffers
* ARROW-1444 - BitVector.splitAndTransfer copies last byte incorrectly
* ARROW-1446 - Python: Writing more than 2^31 rows from pandas dataframe causes row count overflow error
* ARROW-1450 - [Python] Raise proper error if custom serialization handler fails
* ARROW-1452 - [C++] Make UNUSED macro name more unique so it does not conflict with thirdparty projects
* ARROW-1453 - [Python] Implement WriteTensor for non-contiguous tensors
* ARROW-1458 - [Python] Document that HadoopFileSystem.mkdir with create\_parents=False has no effect
* ARROW-1459 - [Python] PyArrow fails to load partitioned parquet files with non-primitive types
* ARROW-1461 - [C++] Disable builds using LLVM apt packages temporarily
* ARROW-1467 - [JAVA]: Fix reset() and allocateNew() in Nullable Value Vectors template
* ARROW-1490 - [Java] Allow Travis CI failures for JDK9 for now
* ARROW-1493 - [C++] Flush the output stream at the end of each PrettyPrint function
* ARROW-1495 - [C++] Store shared\_ptr to boxed arrays in RecordBatch
* ARROW-1507 - [C++] arrow/compute/api.h can't be used without arrow/array.h
* ARROW-1512 - [Docs] NumericArray has no member named 'raw\_data'
* ARROW-1514 - [C++] Fix a typo in document
* ARROW-1527 - Fix Travis JDK9 build
* ARROW-1531 - [C++] Return ToBytes by value from Decimal128
* ARROW-1532 - [Python] Referencing an Empty Schema causes a SegFault
* ARROW-407 - BitVector.copyFromSafe() should re-allocate if necessary instead of returning false
* ARROW-801 - [JAVA] Provide direct access to underlying buffer memory addresses in consistent way without generating garbage or large amount indirections
## Improvement
* ARROW-1307 - [Python] Add pandas serialization section + Feather API to Sphinx docs
* ARROW-1317 - [Python] Add function to set Hadoop CLASSPATH
* ARROW-1331 - [Java] Refactor tests
* ARROW-1339 - [C++] Use boost::filesystem for handling of platform-specific file path encodings
* ARROW-1344 - [C++] Calling BufferOutputStream::Write after calling Finish crashes
* ARROW-1348 - [C++/Python] Add release verification script for Windows
* ARROW-1351 - Automate updating CHANGELOG.md as part of release scripts
* ARROW-1352 - [Integration] Improve print formatting for producer, consumer line
* ARROW-1355 - Make arrow buildable with java9
* ARROW-1356 - [Website] Add new committers
* ARROW-1358 - Update source release scripts to account for new SHA checksum policy
* ARROW-1359 - [Python] Add Parquet writer option to normalize field names for use in Spark
* ARROW-1366 - [Python] Add instructions for starting the Plasma store when installing pyarrow from wheels
* ARROW-1372 - [Plasma] Support for storing data in huge pages
* ARROW-1376 - [C++] RecordBatchStreamReader::Open API is inconsistent with writer
* ARROW-1381 - [Python] Improve performance of SerializedPyObject.to\_buffer
* ARROW-1383 - [C++] Support std::vector<bool> in builder vector appends
* ARROW-1384 - [C++] Add convenience function for serializing a record batch to an IPC message
* ARROW-1386 - [C++] Unpin CMake version in MSVC build toolchain
* ARROW-1395 - [C++] Remove APIs deprecated as of 0.5.0 and later versions
* ARROW-1397 - [Packaging] Use Docker instead of Vagrant
* ARROW-1401 - [C++] Add extra debugging context to failures in RETURN\_NOT\_OK in debug builds
* ARROW-1402 - [C++] Possibly deprecate public APIs that use MutableBuffer
* ARROW-1404 - [Packaging] Build .deb and .rpm on Travis CI
* ARROW-1405 - [Python] Add logging option for verbose memory allocations
* ARROW-1406 - [Python] Harden user API for generating serialized schema and record batch messages as memoryview-compatible objects
* ARROW-1408 - [C++] Refactor and make IPC read / write APIs more consistent, add appropriate deprecations
* ARROW-1410 - Plasma object store occasionally pauses for a long time
* ARROW-1412 - [Plasma] Add higher level API for putting and getting Python objects
* ARROW-1413 - [C++] Add include-what-you-use configuration
* ARROW-1416 - [Format] Clarify example array in memory layout documentation
* ARROW-1418 - [Python] Introduce SerializationContext to register custom serialization callbacks
* ARROW-1419 - [GLib] Suppress sign-conversion warning on Clang
* ARROW-1427 - [GLib] Add a link to readme of Arrow GLib
* ARROW-1428 - [C++] Append steps to clone source code to README.mb
* ARROW-1432 - [C++] Build bundled jemalloc functions with private prefix
* ARROW-1433 - [C++] Simplify implementation of Array::Slice
* ARROW-1438 - [Plasma] Pull SerializationContext through PlasmaClient put and get
* ARROW-1441 - [Site] Add Ruby to Flexible section
* ARROW-1442 - [Website] Add pointer to nightly conda packages on /install
* ARROW-1447 - [C++] Round of include-what-you-use include cleanups
* ARROW-1448 - [Packaging] Support uploading built .deb and .rpm to Bintray
* ARROW-1449 - Implement Decimal using only Int128
* ARROW-1451 - [C++] Create arrow/io/api.h
* ARROW-1460 - [C++] Upgrade clang-format used to LLVM 4.0
* ARROW-1466 - [C++] Support DecimalArray in arrow::PrettyPrint
* ARROW-1468 - [C++] Append to PrimitiveBuilder from std::vector<CTYPE>
* ARROW-1480 - [Python] Improve performance of serializing sets
* ARROW-1494 - [C++] Document that shared\_ptr returned by RecordBatch::column needs to be retained
* ARROW-1499 - [Python] Consider adding option to parquet.write\_table that sets options for maximum Spark compatibility
* ARROW-1505 - [GLib] Simplify arguments check
* ARROW-1506 - [C++] Support pkg-config for compute modules
* ARROW-1508 - C++: Add support for FixedSizeBinaryType in DictionaryBuilder
* ARROW-1511 - [C++] Deprecate arrow::MakePrimitiveArray
* ARROW-1513 - C++: Add cast from Dictionary to plain arrays
* ARROW-1515 - [GLib] Detect version directly
* ARROW-1516 - [GLib] Update document
* ARROW-1517 - Remove unnecessary temporary in DecimalUtil::ToString function
* ARROW-1519 - [C++] Move DecimalUtil functions to methods on the Int128 class
* ARROW-1528 - [GLib] Resolve include dependency
* ARROW-1530 - [C++] Install arrow/util/parallel.h
* ARROW-594 - [Python] Provide interface to write pyarrow.Table to a stream
* ARROW-786 - [Format] In-memory format for 128-bit Decimals, handling of sign bit
* ARROW-837 - [Python] Expose buffer allocation, FixedSizeBufferWriter
* ARROW-941 - [Docs] Improve "cold start" integration testing instructions
## New Feature
* ARROW-1156 - [Python] pyarrow.Array.from\_pandas should take a type parameter
* ARROW-1238 - [Java] Add JSON read/write support for decimals for integration tests
* ARROW-1364 - [C++] IPC reader and writer specialized for GPU device memory
* ARROW-1377 - [Python] Add function to assist with benchmarking Parquet scan performance
* ARROW-1387 - [C++] Set up GPU leaf library build toolchain
* ARROW-1392 - [C++] Implement reader and writer IO interfaces for GPU buffers
* ARROW-1396 - [C++] Add PrettyPrint function for Schemas, which also outputs any dictionaries
* ARROW-1399 - [C++] Add CUDA build version in a public header to help prevent ABI conflicts
* ARROW-1400 - [Python] Ability to create partitions when writing to Parquet
* ARROW-1415 - [GLib] Support date32 and date64
* ARROW-1417 - [Python] Allow more generic filesystem objects to be passed to ParquetDataset
* ARROW-1462 - [GLib] Support time array
* ARROW-1479 - [JS] Expand JavaScript implementation
* ARROW-1481 - [C++] Expose type casts as generic callable object that can write into pre-allocated memory
* ARROW-1504 - [GLib] Support timestamp
* ARROW-1510 - [C++] Support cast
* ARROW-229 - [C++] Implement safe casts for primitive types
* ARROW-592 - [C++] Provide .deb and .rpm packages
* ARROW-695 - Integration tests for Decimal types
* ARROW-696 - [C++] Add JSON read/write support for decimals for integration tests
* ARROW-759 - [Python] Implement a transient list serialization function that can handle a mix of scalars, lists, ndarrays, dicts
* ARROW-989 - [Python] Write pyarrow.Table to FileWriter or StreamWriter
## Test
* ARROW-1390 - [Python] Extend tests for python serialization
# Apache Arrow 0.6.0 (14 August 2017)
## Bug
* ARROW-1192 - [JAVA] Improve splitAndTransfer performance for List and Union vectors
* ARROW-1195 - [C++] CpuInfo doesn't get cache size on Windows
* ARROW-1204 - [C++] lz4 ExternalProject fails in Visual Studio 2015
* ARROW-1225 - [Python] pyarrow.array does not attempt to convert bytes to UTF8 when passed a StringType
* ARROW-1237 - [JAVA] Expose the ability to set lastSet
* ARROW-1239 - issue with current version of git-commit-id-plugin
* ARROW-1240 - security: upgrade logback to address CVE-2017-5929
* ARROW-1242 - [Java] security - upgrade Jackson to mitigate 3 CVE vulnerabilities
* ARROW-1245 - [Integration] Java Integration Tests Disabled
* ARROW-1248 - [Python] C linkage warnings in Clang with public Cython API
* ARROW-1249 - [JAVA] Expose the fillEmpties function from Nullable<Varlength>Vector.mutator
* ARROW-1263 - [C++] CpuInfo should be able to get CPU features on Windows
* ARROW-1265 - [Plasma] Plasma store memory leak warnings in Python test suite
* ARROW-1267 - [Java] Handle zero length case in BitVector.splitAndTransfer
* ARROW-1269 - [Packaging] Add Windows wheel build scripts from ARROW-1068 to arrow-dist
* ARROW-1275 - [C++] Default static library prefix for Snappy should be "\_static"
* ARROW-1276 - Cannot serializer empty DataFrame to parquet
* ARROW-1283 - [Java] VectorSchemaRoot should be able to be closed() more than once
* ARROW-1285 - PYTHON: NotImplemented exception creates empty parquet file
* ARROW-1287 - [Python] Emulate "whence" argument of seek in NativeFile
* ARROW-1290 - [C++] Use array capacity doubling in arrow::BufferBuilder
* ARROW-1291 - [Python] `pa.RecordBatch.from_pandas` doesn't accept DataFrame with numeric column names
* ARROW-1294 - [C++] New Appveyor build failures
* ARROW-1296 - [Java] templates/FixValueVectors reset() method doesn't set allocationSizeInBytes correctly
* ARROW-1300 - [JAVA] Fix ListVector Tests
* ARROW-1306 - [Python] Encoding? issue with error reporting for `parquet.read_table`
* ARROW-1308 - [C++] ld tries to link `arrow_static` even when -DARROW_BUILD_STATIC=off
* ARROW-1309 - [Python] Error inferring List type in `Array.from_pandas` when inner values are all None
* ARROW-1310 - [JAVA] Revert ARROW-886
* ARROW-1312 - [C++] Set default value to `ARROW_JEMALLOC` to OFF until ARROW-1282 is resolved
* ARROW-1326 - [Python] Fix Sphinx build in Travis CI
* ARROW-1327 - [Python] Failing to release GIL in `MemoryMappedFile._open` causes deadlock
* ARROW-1328 - [Python] `pyarrow.Table.from_pandas` option `timestamps_to_ms` changes column values
* ARROW-1330 - [Plasma] Turn on plasma tests on manylinux1
* ARROW-1335 - [C++] `PrimitiveArray::raw_values` has inconsistent semantics re: offsets compared with subclasses
* ARROW-1338 - [Python] Investigate non-deterministic core dump on Python 2.7, Travis CI builds
* ARROW-1340 - [Java] NullableMapVector field doesn't maintain metadata
* ARROW-1342 - [Python] Support strided array of lists
* ARROW-1343 - [Format/Java/C++] Ensuring encapsulated stream / IPC message sizes are always a multiple of 8
* ARROW-1350 - [C++] Include Plasma source tree in source distribution
* ARROW-187 - [C++] Decide on how pedantic we want to be about exceptions
* ARROW-276 - [JAVA] Nullable Value Vectors should extend BaseValueVector instead of BaseDataValueVector
* ARROW-573 - [Python/C++] Support ordered dictionaries data, pandas Categorical
* ARROW-884 - [C++] Exclude internal classes from documentation
* ARROW-932 - [Python] Fix compiler warnings on MSVC
* ARROW-968 - [Python] RecordBatch [i:j] syntax is incomplete
## Improvement
* ARROW-1093 - [Python] Fail Python builds if flake8 yields warnings
* ARROW-1121 - [C++] Improve error message when opening OS file fails
* ARROW-1140 - [C++] Allow optional build of plasma
* ARROW-1149 - [Plasma] Create Cython client library for Plasma
* ARROW-1173 - [Plasma] Blog post for Plasma
* ARROW-1211 - [C++] Consider making `default_memory_pool()` the default for builder classes
* ARROW-1213 - [Python] Enable s3fs to be used with ParquetDataset and reader/writer functions
* ARROW-1219 - [C++] Use more vanilla Google C++ formatting
* ARROW-1224 - [Format] Clarify language around buffer padding and alignment in IPC
* ARROW-1230 - [Plasma] Install libraries and headers
* ARROW-1243 - [Java] security: upgrade all libraries to latest stable versions
* ARROW-1251 - [Python/C++] Revise build documentation to account for latest build toolchain
* ARROW-1253 - [C++] Use pre-built toolchain libraries where prudent to speed up CI builds
* ARROW-1255 - [Plasma] Check plasma flatbuffer messages with the flatbuffer verifier
* ARROW-1257 - [Plasma] Plasma documentation
* ARROW-1258 - [C++] Suppress dlmalloc warnings on Clang
* ARROW-1259 - [Plasma] Speed up Plasma tests
* ARROW-1260 - [Plasma] Use factory method to create Python PlasmaClient
* ARROW-1264 - [Plasma] Don't exit the Python interpreter if the plasma client can't connect to the store
* ARROW-1274 - [C++] `add_compiler_export_flags()` throws warning with CMake >= 3.3
* ARROW-1288 - Clean up many ASF license headers
* ARROW-1289 - [Python] Add `PYARROW_BUILD_PLASMA` option like Parquet
* ARROW-1301 - [C++/Python] Add remaining supported libhdfs UNIX-like filesystem APIs
* ARROW-1303 - [C++] Support downloading Boost
* ARROW-1315 - [GLib] Status check of arrow::ArrayBuilder::Finish() is missing
* ARROW-1323 - [GLib] Add `garrow_boolean_array_get_values()`
* ARROW-1333 - [Plasma] Sorting example for DataFrames in plasma
* ARROW-1334 - [C++] Instantiate arrow::Table from vector of Array objects (instead of Columns)
## New Feature
* ARROW-1076 - [Python] Handle nanosecond timestamps more gracefully when writing to Parquet format
* ARROW-1104 - Integrate in-memory object store from Ray
* ARROW-1246 - [Format] Add Map logical type to metadata
* ARROW-1268 - [Website] Blog post on Arrow integration with Spark
* ARROW-1281 - [C++/Python] Add Docker setup for running HDFS tests and other tests we may not run in Travis CI
* ARROW-1305 - [GLib] Add GArrowIntArrayBuilder
* ARROW-1336 - [C++] Add arrow::schema factory function
* ARROW-439 - [Python] Add option in `to_pandas` conversions to yield Categorical from String/Binary arrays
* ARROW-622 - [Python] Investigate alternatives to `timestamps_to_ms` argument in pandas conversion
## Task
* ARROW-1270 - [Packaging] Add Python wheel build scripts for macOS to arrow-dist
* ARROW-1272 - [Python] Add script to arrow-dist to generate and upload manylinux1 Python wheels
* ARROW-1273 - [Python] Add convenience functions for reading only Parquet metadata or effective Arrow schema from a particular Parquet file
* ARROW-1297 - 0.6.0 Release
* ARROW-1304 - [Java] Fix checkstyle checks warning
## Test
* ARROW-1241 - [C++] Visual Studio 2017 Appveyor build job
# Apache Arrow 0.5.0 (23 July 2017)
## Bug
* ARROW-1074 - `from_pandas` doesnt convert ndarray to list
* ARROW-1079 - [Python] Empty "private" directories should be ignored by Parquet interface
* ARROW-1081 - C++: arrow::test::TestBase::MakePrimitive doesn't fill `null_bitmap`
* ARROW-1096 - [C++] Memory mapping file over 4GB fails on Windows
* ARROW-1097 - Reading tensor needs file to be opened in writeable mode
* ARROW-1098 - Document Error?
* ARROW-1101 - UnionListWriter is not implementing all methods on interface ScalarWriter
* ARROW-1103 - [Python] Utilize pandas metadata from common `_metadata` Parquet file if it exists
* ARROW-1107 - [JAVA] NullableMapVector getField() should return nullable type
* ARROW-1108 - Check if ArrowBuf is empty buffer in getActualConsumedMemory() and getPossibleConsumedMemory()
* ARROW-1109 - [JAVA] transferOwnership fails when readerIndex is not 0
* ARROW-1110 - [JAVA] make union vector naming consistent
* ARROW-1111 - [JAVA] Make aligning buffers optional, and allow -1 for unknown null count
* ARROW-1112 - [JAVA] Set lastSet for VarLength and List vectors when loading
* ARROW-1113 - [C++] gflags EP build gets triggered (as a no-op) on subsequent calls to make or ninja build
* ARROW-1115 - [C++] Use absolute path for ccache
* ARROW-1117 - [Docs] Minor issues in GLib README
* ARROW-1124 - [Python] pyarrow needs to depend on numpy>=1.10 (not 1.9)
* ARROW-1125 - Python: `Table.from_pandas` doesn't work anymore on partial schemas
* ARROW-1128 - [Docs] command to build a wheel is not properly rendered
* ARROW-1129 - [C++] Fix Linux toolchain build regression from ARROW-742
* ARROW-1131 - Python: Parquet unit tests are always skipped
* ARROW-1132 - [Python] Unable to write pandas DataFrame w/MultiIndex containing duplicate values to parquet
* ARROW-1136 - [C++/Python] Segfault on empty stream
* ARROW-1138 - Travis: Use OpenJDK7 instead of OracleJDK7
* ARROW-1139 - [C++] dlmalloc doesn't allow arrow to be built with clang 4 or gcc 7.1.1
* ARROW-1141 - on import get libjemalloc.so.2: cannot allocate memory in static TLS block
* ARROW-1143 - C++: Fix comparison of NullArray
* ARROW-1144 - [C++] Remove unused variable
* ARROW-1150 - [C++] AdaptiveIntBuilder compiler warning on MSVC
* ARROW-1152 - [Cython] `read_tensor` should work with a readable file
* ARROW-1155 - segmentation fault when run pa.Int16Value()
* ARROW-1157 - C++/Python: Decimal templates are not correctly exported on OSX
* ARROW-1159 - [C++] Static data members cannot be accessed from inline functions in Arrow headers by thirdparty users
* ARROW-1162 - Transfer Between Empty Lists Should Not Invoke Callback
* ARROW-1166 - Errors in Struct type's example and missing reference in Layout.md
* ARROW-1167 - [Python] Create chunked BinaryArray in `Table.from_pandas` when a column's data exceeds 2GB
* ARROW-1168 - [Python] pandas metadata may contain "mixed" data types
* ARROW-1169 - C++: jemalloc externalproject doesn't build with CMake's ninja generator
* ARROW-1170 - C++: `ARROW_JEMALLOC=OFF` breaks linking on unittest
* ARROW-1174 - [GLib] Investigate root cause of ListArray glib test failure
* ARROW-1177 - [C++] Detect int32 overflow in ListBuilder::Append
* ARROW-1179 - C++: Add missing virtual destructors
* ARROW-1180 - [GLib] `garrow_tensor_get_dimension_name()` returns invalid address
* ARROW-1181 - [Python] Parquet test fail if not enabled
* ARROW-1182 - C++: Specify `BUILD_BYPRODUCTS` for zlib and zstd
* ARROW-1186 - [C++] Enable option to build arrow with minimal dependencies needed to build Parquet library
* ARROW-1188 - Segfault when trying to serialize a DataFrame with Null-only Categorical Column
* ARROW-1190 - VectorLoader corrupts vectors with duplicate names
* ARROW-1191 - [JAVA] Implement getField() method for the complex readers
* ARROW-1194 - Getting record batch size with `pa.get_record_batch_size` returns a size that is too small for pandas DataFrame.
* ARROW-1197 - [GLib] `record_batch.hpp` Inclusion is missing
* ARROW-1200 - [C++] DictionaryBuilder should use signed integers for indices
* ARROW-1201 - [Python] Incomplete Python types cause a core dump when repr-ing
* ARROW-1203 - [C++] Disallow BinaryBuilder to append byte strings larger than the maximum value of `int32_t`
* ARROW-1205 - C++: Reference to type objects in ArrayLoader may cause segmentation faults.
* ARROW-1206 - [C++] Enable MSVC builds to work with some compression library support disabled
* ARROW-1208 - [C++] Toolchain build with ZSTD library from conda-forge failure
* ARROW-1215 - [Python] Class methods in API reference
* ARROW-1216 - Numpy arrays cannot be created from Arrow Buffers on Python 2
* ARROW-1218 - Arrow doesn't compile if all compression libraries are deactivated
* ARROW-1222 - [Python] pyarrow.array returns NullArray for array of unsupported Python objects
* ARROW-1223 - [GLib] Fix function name that returns wrapped object
* ARROW-1235 - [C++] macOS linker failure with operator<< and std::ostream
* ARROW-1236 - Library paths in exported pkg-config file are incorrect
* ARROW-601 - Some logical types not supported when loading Parquet
* ARROW-784 - Cleaning up thirdparty toolchain support in Arrow on Windows
* ARROW-992 - [Python] In place development builds do not have a `__version__`
## Improvement
* ARROW-1041 - [Python] Support `read_pandas` on a directory of Parquet files
* ARROW-1100 - [Python] Add "mode" property to NativeFile instances
* ARROW-1102 - Make MessageSerializer.serializeMessage() public
* ARROW-1120 - [Python] Write support for int96
* ARROW-1137 - Python: Ensure Pandas roundtrip of all-None column
* ARROW-1148 - [C++] Raise minimum CMake version to 3.2
* ARROW-1151 - [C++] Add gcc branch prediction to status check macro
* ARROW-1160 - C++: Implement DictionaryBuilder
* ARROW-1165 - [C++] Refactor PythonDecimalToArrowDecimal to not use templates
* ARROW-1185 - [C++] Clean up arrow::Status implementation, add `warn_unused_result` attribute for clang
* ARROW-1187 - Serialize a DataFrame with None column
* ARROW-1193 - [C++] Support pkg-config for `arrow_python.so`
* ARROW-1196 - [C++] Appveyor separate jobs for Debug/Release builds from sources; Build with conda toolchain; Build with NMake Makefiles Generator
* ARROW-1199 - [C++] Introduce mutable POD struct for generic array data
* ARROW-1202 - Remove semicolons from status macros
* ARROW-1217 - [GLib] Add GInputStream based arrow::io::RandomAccessFile
* ARROW-1220 - [C++] Standartize usage of `*_HOME` cmake script variables for 3rd party libs
* ARROW-1221 - [C++] Pin clang-format version
* ARROW-1229 - [GLib] Follow Reader API change (get -> read)
* ARROW-742 - Handling exceptions during execution of `std::wstring_convert`
* ARROW-834 - [Python] Support creating Arrow arrays from Python iterables
* ARROW-915 - Struct Array reads limited support
* ARROW-935 - [Java] Build Javadoc in Travis CI
* ARROW-960 - [Python] Add source build guide for macOS + Homebrew
* ARROW-962 - [Python] Add schema attribute to FileReader
* ARROW-966 - [Python] `pyarrow.list_` should also accept Field instance
* ARROW-978 - [Python] Use sphinx-bootstrap-theme for Sphinx documentation
## New Feature
* ARROW-1048 - Allow user `LD_LIBRARY_PATH` to be used with source release script
* ARROW-1073 - C++: Adapative integer builder
* ARROW-1095 - [Website] Add Arrow icon asset
* ARROW-111 - [C++] Add static analyzer to tool chain to verify checking of Status returns
* ARROW-1122 - [Website] Guest blog post on Arrow + ODBC from turbodbc
* ARROW-1123 - C++: Make jemalloc the default allocator
* ARROW-1135 - Upgrade Travis CI clang builds to use LLVM 4.0
* ARROW-1142 - [C++] Move over compression library toolchain from parquet-cpp
* ARROW-1145 - [GLib] Add `get_values()`
* ARROW-1154 - [C++] Migrate more computational utility code from parquet-cpp
* ARROW-1183 - [Python] Implement time type conversions in `to_pandas`
* ARROW-1198 - Python: Add public C++ API to unwrap PyArrow object
* ARROW-1212 - [GLib] Add `garrow_binary_array_get_offsets_buffer()`
* ARROW-1214 - [Python] Add classes / functions to enable stream message components to be handled outside of the stream reader class
* ARROW-1227 - [GLib] Support GOutputStream
* ARROW-460 - [C++] Implement JSON round trip for DictionaryArray
* ARROW-462 - [C++] Implement in-memory conversions between non-nested primitive types and DictionaryArray equivalent
* ARROW-575 - Python: Auto-detect nested lists and nested numpy arrays in Pandas
* ARROW-597 - [Python] Add convenience function to yield DataFrame from any object that a StreamReader or FileReader can read from
* ARROW-599 - [C++] Add LZ4 codec to 3rd-party toolchain
* ARROW-600 - [C++] Add ZSTD codec to 3rd-party toolchain
* ARROW-692 - Java<->C++ Integration tests for dictionary-encoded vectors
* ARROW-693 - [Java] Add JSON support for dictionary vectors
## Task
* ARROW-1052 - Arrow 0.5.0 release
## Test
* ARROW-1228 - [GLib] Test file name should be the same name as target class
* ARROW-1233 - [C++] Validate cmake script resolving of 3rd party linked libs from correct location in toolchain build
# Apache Arrow 0.4.1 (9 June 2017)
## Bug
* ARROW-1039 - Python: `pyarrow.Filesystem.read_parquet` causing error if nthreads>1
* ARROW-1050 - [C++] Export arrow::ValidateArray
* ARROW-1051 - [Python] If pyarrow.parquet fails to import due to a shared library ABI conflict, the `test_parquet.py` tests silently do not run
* ARROW-1056 - [Python] Parquet+HDFS test failure due to writing pandas index
* ARROW-1057 - Fix cmake warning and msvc debug asserts
* ARROW-1062 - [GLib] Examples use old API
* ARROW-1066 - remove warning on feather for pandas >= 0.20.1
* ARROW-1070 - [C++] Feather files for date/time types should be written with the physical types
* ARROW-1075 - [GLib] Build error on macOS
* ARROW-1085 - [java] Follow up on template cleanup. Missing method for IntervalYear
* ARROW-1086 - [Python] pyarrow 0.4.0 on pypi is missing pxd files
* ARROW-1088 - [Python] `test_unicode_filename` test fails when unicode filenames aren't supported by system
* ARROW-1090 - [Python] `build_ext` usability
* ARROW-1091 - Decimal scale and precision are flipped
* ARROW-1092 - More Decimal and scale flipped follow-up
* ARROW-1094 - [C++] Incomplete buffer reads in arrow::io::ReadableFile should exactly truncate returned buffer
* ARROW-424 - [C++] Threadsafety in arrow/io/hdfs.h
## Improvement
* ARROW-1020 - [Format] Add additional language to Schema.fbs to clarify naive vs. localized Timestamp values
* ARROW-1034 - [Python] Enable creation of binary wheels on Windows / MSVC
* ARROW-1049 - [java] vector template cleanup
* ARROW-1063 - [Website] Blog post and website updates for 0.4.0 release
* ARROW-1078 - [Python] Account for PARQUET-967
* ARROW-1080 - C++: Add tutorial about converting to/from row-wise representation
* ARROW-897 - [GLib] Build arrow-glib as a separate build in the Travis CI build matrix
* ARROW-986 - [Format] Update IPC.md to account for dictionary batches
* ARROW-990 - [JS] Add tslint support for linting TypeScript
## Task
* ARROW-1068 - [Python] Create external repo with appveyor.yml configured for building Python wheel installers
* ARROW-1069 - Add instructions for publishing maven artifacts
* ARROW-1084 - Implementations of BufferAllocator should handle Netty's OutOfDirectMemoryError
## Test
* ARROW-1060 - [Python] Add unit test for ARROW-1053
* ARROW-1082 - [GLib] Add CI on macOS
# Apache Arrow 0.4.0 (22 May 2017)
## Bug
* ARROW-1003 - [C++] Hdfs and java dlls fail to load when built for Windows with MSVC
* ARROW-1004 - ArrowInvalid: Invalid: Python object of type float is not None and is not a string, bool, or date object
* ARROW-1017 - Python: `Table.to_pandas` leaks memory
* ARROW-1023 - Python: Fix bundling of arrow-cpp for macOS
* ARROW-1033 - [Python] pytest discovers `scripts/test_leak.py`
* ARROW-1046 - [Python] Conform DataFrame metadata to pandas spec
* ARROW-1053 - [Python] Memory leak with RecordBatchFileReader
* ARROW-1054 - [Python] Test suite fails on pandas 0.19.2
* ARROW-1061 - [C++] Harden decimal parsing against invalid strings
* ARROW-1064 - ModuleNotFoundError: No module named 'pyarrow._parquet'
* ARROW-813 - [Python] setup.py sdist must also bundle dependent cmake modules
* ARROW-824 - Date and Time Vectors should reflect timezone-less semantics
* ARROW-856 - CmakeError by Unknown compiler.
* ARROW-881 - [Python] Reconstruct Pandas DataFrame indexes using `custom_metadata`
* ARROW-909 - libjemalloc.so.2: cannot open shared object file:
* ARROW-939 - Fix division by zero for zero-dimensional Tensors
* ARROW-940 - [JS] Generate multiple sets of artifacts
* ARROW-944 - Python: Compat broken for pandas==0.18.1
* ARROW-948 - [GLib] Update C++ header file list
* ARROW-952 - Compilation error on macOS with clang-802.0.42
* ARROW-958 - [Python] Conda build guide still needs `ARROW_HOME`, `PARQUET_HOME`
* ARROW-979 - [Python] Fix `setuptools_scm` version when release tag is not in the master timeline
* ARROW-991 - [Python] `PyArray_SimpleNew` should not be used with `NPY_DATETIME`
* ARROW-995 - [Website] 0.3 release announce has a typo in reference
* ARROW-998 - [Doc] File format documents incorrect schema location
## Improvement
* ARROW-1000 - [GLib] Move install document to Website
* ARROW-1001 - [GLib] Unify writer files
* ARROW-1002 - [C++] It is not necessary to add padding after the magic header in the FileWriter implementation
* ARROW-1010 - [Website] Only show English posts in /blog/
* ARROW-1016 - Python: Include C++ headers (optionally) in wheels
* ARROW-1022 - [Python] Add nthreads option to Feather read method
* ARROW-1024 - Python: Update build time numpy version to 1.10.1
* ARROW-1025 - [Website] Improve changelog on website
* ARROW-1027 - [Python] Allow negative indexing in fields/columns on pyarrow Table and Schema objects
* ARROW-1028 - [Python] Documentation updates after ARROW-1008
* ARROW-1029 - [Python] Fix --with-parquet build on Windows, add unit tests to Appveyor
* ARROW-1030 - Python: Account for library versioning in parquet-cpp
* ARROW-1037 - [GLib] Follow reader name change
* ARROW-1038 - [GLib] Follow writer name change
* ARROW-1040 - [GLib] Follow tensor IO
* ARROW-182 - [C++] Remove Array::Validate virtual function and make a separate method
* ARROW-376 - Python: Convert non-range Pandas indices (optionally) to Arrow
* ARROW-532 - [Python] Expand pyarrow.parquet documentation for 0.3 release
* ARROW-579 - Python: Provide redistributable pyarrow wheels on OSX
* ARROW-891 - [Python] Expand Windows build instructions to not require looking at separate C++ docs
* ARROW-899 - [Docs] Add CHANGELOG for 0.3.0
* ARROW-901 - [Python] Write FixedSizeBinary to Parquet
* ARROW-913 - [Python] Only link jemalloc to the Cython extension where it's needed
* ARROW-923 - [Docs] Generate Changelog for website with JIRA links
* ARROW-929 - Move KEYS file to SVN, remove from git
* ARROW-943 - [GLib] Support running unit tests with source archive
* ARROW-945 - [GLib] Add a Lua example to show Torch integration
* ARROW-946 - [GLib] Use "new" instead of "open" for constructor name
* ARROW-947 - [Python] Improve execution time of manylinux1 build
* ARROW-953 - Use cmake / curl from conda-forge in CI builds
* ARROW-954 - Make it possible to compile Arrow with header-only boost
* ARROW-961 - [Python] Rename InMemoryOutputStream to BufferOutputStream
* ARROW-970 - [Python] Accidentally calling pyarrow.Table() should not segfault process
* ARROW-982 - [Website] Improve website front copy to highlight serialization efficiency benefits
* ARROW-984 - [GLib] Add Go examples
* ARROW-985 - [GLib] Update package information
* ARROW-988 - [JS] Add entry to Travis CI matrix
* ARROW-993 - [GLib] Add missing error checks in Go examples
* ARROW-996 - [Website] Add 0.3 release announce in Japanese
## New Feature
* ARROW-1008 - [C++] Define abstract interface for stream iteration
* ARROW-1011 - [Format] Clarify requirements around buffer padding in validity bitmaps
* ARROW-1014 - 0.4.0 release
* ARROW-1031 - [GLib] Support pretty print
* ARROW-1044 - [GLib] Support Feather
* ARROW-29 - C++: Add re2 as optional 3rd-party toolchain dependency
* ARROW-446 - [Python] Document NativeFile interfaces, HDFS client in Sphinx
* ARROW-482 - [Java] Provide API access to `custom_metadata` Field attribute in IPC setting
* ARROW-596 - [Python] Add convenience function to convert pandas.DataFrame to pyarrow.Buffer containing a file or stream representation
* ARROW-714 - [C++] Add `import_pyarrow` C API in the style of NumPy for thirdparty C++ users
* ARROW-819 - [Python] Define public Cython API
* ARROW-872 - [JS] Read streaming format
* ARROW-873 - [JS] Implement fixed width list type
* ARROW-874 - [JS] Read dictionary-encoded vectors
* ARROW-963 - [GLib] Add equal
* ARROW-967 - [GLib] Support initializing array with buffer
* ARROW-977 - [java] Add Timezone aware timestamp vectors
## Task
* ARROW-1015 - [Java] Implement schema-level metadata
* ARROW-629 - [JS] Add unit test suite
* ARROW-956 - remove pandas pre-0.20.0 compat
* ARROW-957 - [Doc] Add HDFS and Windows documents to doxygen output
* ARROW-997 - [Java] Implement transfer in FixedSizeListVector
# Apache Arrow 0.3.0 (5 May 2017)
## Bug
* ARROW-109 - [C++] Investigate recursive data types limit in flatbuffers
* ARROW-208 - Add checkstyle policy to java project
* ARROW-347 - Add method to pass CallBack when creating a transfer pair
* ARROW-413 - DATE type is not specified clearly
* ARROW-431 - [Python] Review GIL release and acquisition in `to_pandas` conversion
* ARROW-443 - [Python] Support for converting from strided pandas data in `Table.from_pandas`
* ARROW-451 - [C++] Override DataType::Equals for other types with additional metadata
* ARROW-454 - pojo.Field doesn't implement hashCode()
* ARROW-526 - [Format] Update IPC.md to account for File format changes and Streaming format
* ARROW-565 - [C++] Examine "Field::dictionary" member
* ARROW-570 - Determine Java tools JAR location from project metadata
* ARROW-584 - [C++] Fix compiler warnings exposed with -Wconversion
* ARROW-588 - [C++] Fix compiler warnings on 32-bit platforms
* ARROW-595 - [Python] StreamReader.schema returns None
* ARROW-604 - Python: boxed Field instances are missing the reference to DataType
* ARROW-613 - [JS] Implement random-access file format
* ARROW-617 - Time type is not specified clearly
* ARROW-619 - Python: Fix typos in setup.py args and `LD_LIBRARY_PATH`
* ARROW-623 - segfault with `__repr__` of empty Field
* ARROW-624 - [C++] Restore MakePrimitiveArray function
* ARROW-627 - [C++] Compatibility macros for exported extern template class declarations
* ARROW-628 - [Python] Install nomkl metapackage when building parquet-cpp for faster Travis builds
* ARROW-630 - [C++] IPC unloading for BooleanArray does not account for offset
* ARROW-636 - [C++] Add Boost / other system requirements to C++ README
* ARROW-639 - [C++] Invalid offset in slices
* ARROW-642 - [Java] Remove temporary file in java/tools
* ARROW-644 - Python: Cython should be a setup-only requirement
* ARROW-652 - Remove trailing f in merge script output
* ARROW-654 - [C++] Support timezone metadata in file/stream formats
* ARROW-668 - [Python] Convert nanosecond timestamps to pandas.Timestamp when converting from TimestampValue
* ARROW-671 - [GLib] License file isn't installed
* ARROW-673 - [Java] Support additional Time metadata
* ARROW-677 - [java] Fix checkstyle jcl-over-slf4j conflict issue
* ARROW-678 - [GLib] Fix dependenciesfff
* ARROW-680 - [C++] Multiarch support impacts user-supplied install prefix
* ARROW-682 - Add self-validation checks in integration tests
* ARROW-683 - [C++] Support date32 (DateUnit::DAY) in IPC metadata, rename date to date64
* ARROW-686 - [C++] Account for time metadata changes, add time32 and time64 types
* ARROW-689 - [GLib] Install header files and documents to wrong directories
* ARROW-691 - [Java] Encode dictionary Int type in message format
* ARROW-697 - [Java] Raise appropriate exceptions when encountering large (> `INT32_MAX`) record batches
* ARROW-699 - [C++] Arrow dynamic libraries are missed on run of unit tests on Windows
* ARROW-702 - Fix BitVector.copyFromSafe to reAllocate instead of returning false
* ARROW-703 - Fix issue where setValueCount(0) doesn’t work in the case that we’ve shipped vectors across the wire
* ARROW-704 - Fix bad import caused by conflicting changes
* ARROW-709 - [C++] Restore type comparator for DecimalType
* ARROW-713 - [C++] Fix linking issue with ipc benchmark
* ARROW-715 - Python: Explicit pandas import makes it a hard requirement
* ARROW-716 - error building arrow/python
* ARROW-720 - [java] arrow should not have a dependency on slf4j bridges in compile
* ARROW-723 - Arrow freezes on write if `chunk_size=0`
* ARROW-726 - [C++] PyBuffer dtor may segfault if constructor passed an object not exporting buffer protocol
* ARROW-732 - Schema comparison bugs in struct and union types
* ARROW-736 - [Python] Mixed-type object DataFrame columns should not silently coerce to an Arrow type by default
* ARROW-738 - [Python] Fix manylinux1 packaging
* ARROW-739 - Parallel build fails non-deterministically.
* ARROW-740 - FileReader fails for large objects
* ARROW-747 - [C++] Fix spurious warning caused by passing dl to `add_dependencies`
* ARROW-749 - [Python] Delete incomplete binary files when writing fails
* ARROW-753 - [Python] Unit tests in arrow/python fail to link on some OS X platforms
* ARROW-756 - [C++] Do not pass -fPIC when compiling with MSVC
* ARROW-757 - [C++] MSVC build fails on googletest when using NMake
* ARROW-762 - Kerberos Problem with PyArrow
* ARROW-776 - [GLib] Cast type is wrong
* ARROW-777 - [Java] Resolve getObject behavior per changes / discussion in ARROW-729
* ARROW-778 - Modify merge tool to work on Windows
* ARROW-781 - [Python/C++] Increase reference count for base object?
* ARROW-783 - Integration tests fail for length-0 record batch
* ARROW-787 - [GLib] Fix compilation errors caused by ARROW-758
* ARROW-793 - [GLib] Wrong indent
* ARROW-794 - [C++] Check whether data is contiguous in ipc::WriteTensor
* ARROW-797 - [Python] Add updated pyarrow. public API listing in Sphinx docs
* ARROW-800 - [C++] Boost headers being transitively included in pyarrow
* ARROW-805 - listing empty HDFS directory returns an error instead of returning empty list
* ARROW-809 - C++: Writing sliced record batch to IPC writes the entire array
* ARROW-812 - Pip install pyarrow on mac failed.
* ARROW-817 - [C++] Fix incorrect code comment from ARROW-722
* ARROW-821 - [Python] Extra file `_table_api.h` generated during Python build process
* ARROW-822 - [Python] StreamWriter fails to open with socket as sink
* ARROW-826 - Compilation error on Mac with `-DARROW_PYTHON=on`
* ARROW-829 - Python: Parquet: Dictionary encoding is deactivated if column-wise compression was selected
* ARROW-830 - Python: jemalloc is not anymore publicly exposed
* ARROW-839 - [C++] Portable alternative to `PyDate_to_ms` function
* ARROW-847 - C++: `BUILD_BYPRODUCTS` not specified anymore for gtest
* ARROW-852 - Python: Also set Arrow Library PATHS when detection was done through pkg-config
* ARROW-853 - [Python] It is no longer necessary to modify the RPATH of the Cython extensions on many environments
* ARROW-858 - Remove dependency on boost regex
* ARROW-866 - [Python] Error from file object destructor
* ARROW-867 - [Python] Miscellaneous pyarrow MSVC fixes
* ARROW-875 - Nullable variable length vector fillEmpties() fills an extra value
* ARROW-879 - compat with pandas 0.20.0
* ARROW-882 - [C++] On Windows statically built lib file overwrites lib file of shared build
* ARROW-886 - VariableLengthVectors don't reAlloc offsets
* ARROW-887 - [format] For backward compatibility, new unit fields must have default values matching previous implied unit
* ARROW-888 - BitVector transfer() does not transfer ownership
* ARROW-895 - Nullable variable length vector lastSet not set correctly
* ARROW-900 - [Python] UnboundLocalError in ParquetDatasetPiece
* ARROW-903 - [GLib] Remove a needless "."
* ARROW-914 - [C++/Python] Fix Decimal ToBytes
* ARROW-922 - Allow Flatbuffers and RapidJSON to be used locally on Windows
* ARROW-928 - Update CMAKE script to detect unsupported msvc compilers versions
* ARROW-933 - [Python] `arrow_python` bindings have debug print statement
* ARROW-934 - [GLib] Glib sources missing from result of 02-source.sh
* ARROW-936 - Fix release README
* ARROW-938 - Fix Apache Rat errors from source release build
## Improvement
* ARROW-316 - Finalize Date type
* ARROW-542 - [Java] Implement dictionaries in stream/file encoding
* ARROW-563 - C++: Support non-standard gcc version strings
* ARROW-566 - Python: Deterministic position of libarrow in manylinux1 wheels
* ARROW-569 - [C++] Set version for .pc
* ARROW-577 - [C++] Refactor StreamWriter and FileWriter to have private implementations
* ARROW-580 - C++: Also provide `jemalloc_X` targets if only a static or shared version is found
* ARROW-582 - [Java] Add Date/Time Support to JSON File
* ARROW-589 - C++: Use system provided shared jemalloc if static is unavailable
* ARROW-593 - [C++] Rename ReadableFileInterface to RandomAccessFile
* ARROW-612 - [Java] Field toString should show nullable flag status
* ARROW-615 - Move ByteArrayReadableSeekableByteChannel to vector.util package
* ARROW-631 - [GLib] Import C API (C++ API wrapper) based on GLib from https://github.com/kou/arrow-glib
* ARROW-646 - Cache miniconda packages
* ARROW-647 - [C++] Don't require Boost static libraries to support CentOS 7
* ARROW-648 - [C++] Support multiarch on Debian
* ARROW-650 - [GLib] Follow eadableFileInterface -> RnadomAccessFile change
* ARROW-651 - [C++] Set shared library version for .deb packages
* ARROW-655 - Implement DecimalArray
* ARROW-662 - [Format] Factor Flatbuffer schema metadata into a Schema.fbs
* ARROW-664 - Make C++ Arrow serialization deterministic
* ARROW-674 - [Java] Support additional Timestamp timezone metadata
* ARROW-675 - [GLib] Update package metadata
* ARROW-676 - [java] move from MinorType to FieldType in ValueVectors to carry all the relevant type bits
* ARROW-679 - [Format] Change RecordBatch and Field length members from int to long
* ARROW-681 - [C++] Build Arrow on Windows with dynamically linked boost
* ARROW-684 - Python: More informative message when parquet-cpp but not parquet-arrow is available
* ARROW-688 - [C++] Use `CMAKE_INSTALL_INCLUDEDIR` for consistency
* ARROW-690 - Only send JIRA updates to issues@arrow.apache.org
* ARROW-700 - Add headroom interface for allocator.
* ARROW-706 - [GLib] Add package install document
* ARROW-707 - Python: All none-Pandas column should be converted to NullArray
* ARROW-708 - [C++] Some IPC code simplification, perf analysis
* ARROW-712 - [C++] Implement Array::Accept as inline visitor
* ARROW-719 - [GLib] Support prepared source archive release
* ARROW-724 - Add "How to Contribute" section to README
* ARROW-725 - [Format] Constant length list type
* ARROW-727 - [Python] Write memoryview-compatible objects in NativeFile.write with zero copy
* ARROW-728 - [C++/Python] Add arrow::Table function for removing a column
* ARROW-731 - [C++] Add shared library related versions to .pc
* ARROW-741 - [Python] Add Python 3.6 to Travis CI
* ARROW-743 - [C++] Consolidate unit tests for code in array.h
* ARROW-744 - [GLib] Re-add an assertion to `garrow_table_new()` test
* ARROW-745 - [C++] Allow use of system cpplint
* ARROW-746 - [GLib] Add `garrow_array_get_data_type()`
* ARROW-751 - [Python] Rename all Cython extensions to "private" status with leading underscore
* ARROW-752 - [Python] Construct pyarrow.DictionaryArray from boxed pyarrow array objects
* ARROW-754 - [GLib] Add `garrow_array_is_null()`
* ARROW-755 - [GLib] Add `garrow_array_get_value_type()`
* ARROW-758 - [C++] Fix compiler warnings on MSVC x64
* ARROW-761 - [Python] Add function to compute the total size of tensor payloads, including metadata and padding
* ARROW-763 - C++: Use `python-config` to find libpythonX.X.dylib
* ARROW-765 - [Python] Make generic ArrowException subclass value error
* ARROW-769 - [GLib] Support building without installed Arrow C++
* ARROW-770 - [C++] Move clang-tidy/format config files back to C++ source tree
* ARROW-774 - [GLib] Remove needless LICENSE.txt copy
* ARROW-775 - [Java] add simple constructors to value vectors
* ARROW-779 - [C++/Python] Raise exception if old metadata encountered
* ARROW-782 - [C++] Change struct to class for objects that meet the criteria in the Google style guide
* ARROW-788 - Possible nondeterminism in Tensor serialization code
* ARROW-795 - [C++] Combine `libarrow/libarrow_io/libarrow_ipc`
* ARROW-802 - [GLib] Add read examples
* ARROW-803 - [GLib] Update package repository URL
* ARROW-804 - [GLib] Update build document
* ARROW-806 - [GLib] Support add/remove a column from table
* ARROW-807 - [GLib] Update "Since" tag
* ARROW-808 - [GLib] Remove needless ignore entries
* ARROW-810 - [GLib] Remove io/ipc prefix
* ARROW-811 - [GLib] Add GArrowBuffer
* ARROW-815 - [Java] Allow for expanding underlying buffer size after allocation
* ARROW-816 - [C++] Use conda packages for RapidJSON, Flatbuffers to speed up builds
* ARROW-818 - [Python] Review public pyarrow. API completeness and update docs
* ARROW-820 - [C++] Build dependencies for Parquet library without arrow support
* ARROW-825 - [Python] Generalize `pyarrow.from_pylist` to accept any object implementing the PySequence protocol
* ARROW-827 - [Python] Variety of Parquet improvements to support Dask integration
* ARROW-828 - [CPP] Document new requirement (libboost-regex-dev) in README.md
* ARROW-832 - [C++] Upgrade thirdparty gtest to 1.8.0
* ARROW-833 - [Python] "Quickstart" build / environment setup guide for Python developers
* ARROW-841 - [Python] Add pyarrow build to Appveyor
* ARROW-844 - [Format] Revise format/README.md to reflect progress reaching a more complete specification
* ARROW-845 - [Python] Sync FindArrow.cmake changes from parquet-cpp
* ARROW-846 - [GLib] Add GArrowTensor, GArrowInt8Tensor and GArrowUInt8Tensor
* ARROW-848 - [Python] Improvements / fixes to conda quickstart guide
* ARROW-849 - [C++] Add optional `$ARROW_BUILD_TOOLCHAIN` environment variable option for configuring build environment
* ARROW-857 - [Python] Automate publishing Python documentation to arrow-site
* ARROW-860 - [C++] Decide if typed Tensor subclasses are worthwhile
* ARROW-861 - [Python] Move DEVELOPMENT.md to Sphinx docs
* ARROW-862 - [Python] Improve source build instructions in README
* ARROW-863 - [GLib] Use GBytes to implement zero-copy
* ARROW-864 - [GLib] Unify Array files
* ARROW-868 - [GLib] Use GBytes to reduce copy
* ARROW-871 - [GLib] Unify DataType files
* ARROW-876 - [GLib] Unify ArrayBuffer files
* ARROW-877 - [GLib] Add `garrow_array_get_null_bitmap()`
* ARROW-878 - [GLib] Add `garrow_binary_array_get_buffer()`
* ARROW-892 - [GLib] Fix GArrowTensor document
* ARROW-893 - Add GLib document to Web site
* ARROW-894 - [GLib] Add GArrowPoolBuffer
* ARROW-896 - [Docs] Add Jekyll plugin for including rendered Jupyter notebooks on website
* ARROW-898 - [C++] Expand metadata support to field level, provide for sharing instances of KeyValueMetadata
* ARROW-904 - [GLib] Simplify error check codes
* ARROW-907 - C++: Convenience construct Table from schema and arrays
* ARROW-908 - [GLib] Unify OutputStream files
* ARROW-910 - [C++] Write 0-length EOS indicator at end of stream
* ARROW-916 - [GLib] Add GArrowBufferOutputStream
* ARROW-917 - [GLib] Add GArrowBufferReader
* ARROW-918 - [GLib] Use GArrowBuffer for read
* ARROW-919 - [GLib] Use "id" to get type enum value from GArrowDataType
* ARROW-920 - [GLib] Add Lua examples
* ARROW-925 - [GLib] Fix GArrowBufferReader test
* ARROW-930 - javadoc generation fails with java 8
* ARROW-931 - [GLib] Reconstruct input stream
## New Feature
* ARROW-231 - C++: Add typed Resize to PoolBuffer
* ARROW-281 - [C++] IPC/RPC support on Win32 platforms
* ARROW-341 - [Python] Making libpyarrow available to third parties
* ARROW-452 - [C++/Python] Merge "Feather" file format implementation
* ARROW-459 - [C++] Implement IPC round trip for DictionaryArray, dictionaries shared across record batches
* ARROW-483 - [C++/Python] Provide access to `custom_metadata` Field attribute in IPC setting
* ARROW-491 - [C++] Add FixedWidthBinary type
* ARROW-493 - [C++] Allow in-memory array over 2^31 -1 elements but require splitting at IPC / RPC boundaries
* ARROW-502 - [C++/Python] Add MemoryPool implementation that logs allocation activity to std::cout
* ARROW-510 - Add integration tests for date and time types
* ARROW-520 - [C++] Add STL-compliant allocator that hooks into an arrow::MemoryPool
* ARROW-528 - [Python] Support `_metadata` or `_common_metadata` files when reading Parquet directories
* ARROW-534 - [C++] Add IPC tests for date/time types
* ARROW-539 - [Python] Support reading Parquet datasets with standard partition directory schemes
* ARROW-550 - [Format] Add a TensorMessage type
* ARROW-552 - [Python] Add scalar value support for Dictionary type
* ARROW-557 - [Python] Explicitly opt in to HDFS unit tests
* ARROW-568 - [C++] Add default implementations for TypeVisitor, ArrayVisitor methods that return NotImplemented
* ARROW-574 - Python: Add support for nested Python lists in Pandas conversion
* ARROW-576 - [C++] Complete round trip Union file/stream IPC tests
* ARROW-578 - [C++] Add CMake option to add custom $CXXFLAGS
* ARROW-598 - [Python] Add support for converting pyarrow.Buffer to a memoryview with zero copy
* ARROW-603 - [C++] Add RecordBatch::Validate method that at least checks that schema matches the array metadata
* ARROW-605 - [C++] Refactor generic ArrayLoader class, support work for Feather merge
* ARROW-606 - [C++] Upgrade to flatbuffers 1.6.0
* ARROW-608 - [Format] Days since epoch date type
* ARROW-610 - [C++] Win32 compatibility in file.cc
* ARROW-616 - [C++] Remove -g flag in release builds
* ARROW-618 - [Python] Implement support for DatetimeTZ custom type from pandas
* ARROW-620 - [C++] Add date/time support to JSON reader/writer for integration testing
* ARROW-621 - [C++] Implement an "inline visitor" template that enables visitor-pattern-like code without virtual function dispatch
* ARROW-625 - [C++] Add time unit to TimeType::ToString
* ARROW-626 - [Python] Enable pyarrow.BufferReader to read from any Python object implementing the buffer/memoryview protocol
* ARROW-632 - [Python] Add support for FixedWidthBinary type
* ARROW-635 - [C++] Add JSON read/write support for FixedWidthBinary
* ARROW-637 - [Format] Add time zone metadata to Timestamp type
* ARROW-656 - [C++] Implement IO interface that can read and write to a fixed-size mutable buffer
* ARROW-657 - [Python] Write and read tensors (with zero copy) into shared memory
* ARROW-658 - [C++] Implement in-memory arrow::Tensor objects
* ARROW-659 - [C++] Add multithreaded memcpy implementation (for hardware where it helps)
* ARROW-660 - [C++] Restore function that can read a complete encapsulated record batch message
* ARROW-661 - [C++] Add a Flatbuffer metadata type that supports array data over 2^31 - 1 elements
* ARROW-663 - [Java] Support additional Time metadata + vector value accessors
* ARROW-669 - [Python] Attach proper tzinfo when computing boxed scalars for TimestampArray
* ARROW-687 - [C++] Build and run full test suite in Appveyor
* ARROW-698 - [C++] Add options to StreamWriter/FileWriter to permit large record batches
* ARROW-701 - [Java] Support additional Date metadata
* ARROW-710 - [Python] Enable Feather APIs to read and write using Python file-like objects
* ARROW-717 - [C++] IPC zero-copy round trips for arrow::Tensor
* ARROW-718 - [Python] Expose arrow::Tensor with conversions to/from NumPy arrays
* ARROW-722 - [Python] pandas conversions for new date and time types/metadata
* ARROW-729 - [Java] Add vector type for 32-bit date as days since UNIX epoch
* ARROW-733 - [C++/Format] Change name of Fixed Width Binary to Fixed Size Binary for consistency
* ARROW-734 - [Python] Support for pyarrow on Windows / MSVC
* ARROW-735 - [C++] Developer instruction document for MSVC on Windows
* ARROW-737 - [C++] Support obtaining mutable slices of mutable buffers
* ARROW-768 - [Java] Change the "boxed" object representation of date and time types
* ARROW-771 - [Python] Add APIs for reading individual Parquet row groups
* ARROW-773 - [C++] Add function to create arrow::Table with column appended to existing table
* ARROW-865 - [Python] Verify Parquet roundtrips for new date/time types
* ARROW-880 - [GLib] Add `garrow_primitive_array_get_buffer()`
* ARROW-890 - [GLib] Add GArrowMutableBuffer
* ARROW-926 - Update KEYS to include wesm
## Task
* ARROW-52 - Set up project blog
* ARROW-670 - Arrow 0.3 release
* ARROW-672 - [Format] Bump metadata version for 0.3 release
* ARROW-748 - [Python] Pin runtime library versions in conda-forge packages to force upgrades
* ARROW-798 - [Docs] Publish Format Markdown documents somehow on arrow.apache.org
* ARROW-869 - [JS] Rename directory to js/
* ARROW-95 - Scaffold Main Documentation using asciidoc
* ARROW-98 - Java: API documentation
## Test
* ARROW-836 - Test for timedelta compat with pandas
* ARROW-927 - C++/Python: Add manylinux1 builds to Travis matrix
# Apache Arrow 0.2.0 (15 February 2017)
## Bug
* ARROW-112 - [C++] Style fix for constants/enums
* ARROW-202 - [C++] Integrate with appveyor ci for windows support and get arrow building on windows
* ARROW-220 - [C++] Build conda artifacts in a build environment with better cross-linux ABI compatibility
* ARROW-224 - [C++] Address static linking of boost dependencies
* ARROW-230 - Python: Do not name modules like native ones (i.e. rename pyarrow.io)
* ARROW-239 - [Python] HdfsFile.read called with no arguments should read remainder of file
* ARROW-261 - [C++] Refactor BinaryArray/StringArray classes to not inherit from ListArray
* ARROW-275 - Add tests for UnionVector in Arrow File
* ARROW-294 - [C++] Do not use fopen / fclose / etc. methods for memory mapped file implementation
* ARROW-322 - [C++] Do not build HDFS IO interface optionally
* ARROW-323 - [Python] Opt-in to PyArrow parquet build rather than skipping silently on failure
* ARROW-334 - [Python] OS X rpath issues on some configurations
* ARROW-337 - UnionListWriter.list() is doing more than it should, this can cause data corruption
* ARROW-339 - Make `merge_arrow_pr` script work with Python 3
* ARROW-340 - [C++] Opening a writeable file on disk that already exists does not truncate to zero
* ARROW-342 - Set Python version on release
* ARROW-345 - libhdfs integration doesn't work for Mac
* ARROW-346 - Python API Documentation
* ARROW-348 - [Python] CMake build type should be configurable on the command line
* ARROW-349 - Six is missing as a requirement in the python setup.py
* ARROW-351 - Time type has no unit
* ARROW-354 - Connot compare an array of empty strings to another
* ARROW-357 - Default Parquet `chunk_size` of 64k is too small
* ARROW-358 - [C++] libhdfs can be in non-standard locations in some Hadoop distributions
* ARROW-362 - Python: Calling `to_pandas` on a table read from Parquet leaks memory
* ARROW-371 - Python: Table with null timestamp becomes float in pandas
* ARROW-375 - columns parameter in `parquet.read_table()` raises KeyError for valid column
* ARROW-384 - Align Java and C++ RecordBatch data and metadata layout
* ARROW-386 - [Java] Respect case of struct / map field names
* ARROW-387 - [C++] arrow::io::BufferReader does not permit shared memory ownership in zero-copy reads
* ARROW-390 - C++: CMake fails on json-integration-test with `ARROW_BUILD_TESTS=OFF`
* ARROW-392 - Fix string/binary integration tests
* ARROW-393 - [JAVA] JSON file reader fails to set the buffer size on String data vector
* ARROW-395 - Arrow file format writes record batches in reverse order.
* ARROW-398 - [Java] Java file format requires bitmaps of all 1's to be written when there are no nulls
* ARROW-399 - [Java] ListVector.loadFieldBuffers ignores the ArrowFieldNode length metadata
* ARROW-400 - [Java] ArrowWriter writes length 0 for Struct types
* ARROW-401 - [Java] Floating point vectors should do an approximate comparison in integration tests
* ARROW-402 - [Java] "refCnt gone negative" error in integration tests
* ARROW-403 - [JAVA] UnionVector: Creating a transfer pair doesn't transfer the schema to destination vector
* ARROW-404 - [Python] Closing an HdfsClient while there are still open file handles results in a crash
* ARROW-405 - [C++] Be less stringent about finding include/hdfs.h in `HADOOP_HOME`
* ARROW-406 - [C++] Large HDFS reads must utilize the set file buffer size when making RPCs
* ARROW-408 - [C++/Python] Remove defunct conda recipes
* ARROW-414 - [Java] "Buffer too large to resize to ..." error
* ARROW-420 - Align Date implementation between Java and C++
* ARROW-421 - [Python] Zero-copy buffers read by pyarrow::PyBytesReader must retain a reference to the parent PyBytes to avoid premature garbage collection issues
* ARROW-422 - C++: IPC should depend on `rapidjson_ep` if RapidJSON is vendored
* ARROW-429 - git-archive SHA-256 checksums are changing
* ARROW-433 - [Python] Date conversion is locale-dependent
* ARROW-434 - Segfaults and encoding issues in Python Parquet reads
* ARROW-435 - C++: Spelling mistake in `if(RAPIDJSON_VENDORED)`
* ARROW-437 - [C++] clang compiler warnings from overridden virtual functions
* ARROW-445 - C++: `arrow_ipc` is built before `arrow/ipc/Message_generated.h` was generated
* ARROW-447 - Python: Align scalar/pylist string encoding with pandas' one.
* ARROW-455 - [C++] BufferOutputStream dtor does not call Close()
* ARROW-469 - C++: Add option so that resize doesn't decrease the capacity
* ARROW-481 - [Python] Fix Python 2.7 regression in patch for PARQUET-472
* ARROW-486 - [C++] arrow::io::MemoryMappedFile can't be casted to arrow::io::FileInterface
* ARROW-487 - Python: ConvertTableToPandas segfaults if ObjectBlock::Write fails
* ARROW-494 - [C++] When MemoryMappedFile is destructed, memory is unmapped even if buffer referecnes still exist
* ARROW-499 - Update file serialization to use streaming serialization format
* ARROW-505 - [C++] Fix compiler warnings in release mode
* ARROW-511 - [Python] List[T] conversions not implemented for single arrays
* ARROW-513 - [C++] Fix Appveyor build
* ARROW-519 - [C++] Missing vtable in libarrow.dylib on Xcode 6.4
* ARROW-523 - Python: Account for changes in PARQUET-834
* ARROW-533 - [C++] arrow::TimestampArray / TimeArray has a broken constructor
* ARROW-535 - [Python] Add type mapping for `NPY_LONGLONG`
* ARROW-537 - [C++] StringArray/BinaryArray comparisons may be incorrect when values with non-zero length are null
* ARROW-540 - [C++] Fix build in aftermath of ARROW-33
* ARROW-543 - C++: Lazily computed `null_counts` counts number of non-null entries
* ARROW-544 - [C++] ArrayLoader::LoadBinary fails for length-0 arrays
* ARROW-545 - [Python] Ignore files without .parq or .parquet prefix when reading directory of files
* ARROW-548 - [Python] Add nthreads option to `pyarrow.Filesystem.read_parquet`
* ARROW-551 - C++: Construction of Column with nullptr Array segfaults
* ARROW-556 - [Integration] Can not run Integration tests if different cpp build path
* ARROW-561 - Update java & python dependencies to improve downstream packaging experience
## Improvement
* ARROW-189 - C++: Use ExternalProject to build thirdparty dependencies
* ARROW-191 - Python: Provide infrastructure for manylinux1 wheels
* ARROW-328 - [C++] Return `shared_ptr` by value instead of const-ref?
* ARROW-330 - [C++] CMake functions to simplify shared / static library configuration
* ARROW-333 - Make writers update their internal schema even when no data is written.
* ARROW-335 - Improve Type apis and toString() by encapsulating flatbuffers better
* ARROW-336 - Run Apache Rat in Travis builds
* ARROW-338 - [C++] Refactor IPC vector "loading" and "unloading" to be based on cleaner visitor pattern
* ARROW-350 - Add Kerberos support to HDFS shim
* ARROW-355 - Add tests for serialising arrays of empty strings to Parquet
* ARROW-356 - Add documentation about reading Parquet
* ARROW-360 - C++: Add method to shrink PoolBuffer using realloc
* ARROW-361 - Python: Support reading a column-selection from Parquet files
* ARROW-365 - Python: Provide `Array.to_pandas()`
* ARROW-366 - [java] implement Dictionary vector
* ARROW-374 - Python: clarify unicode vs. binary in API
* ARROW-379 - Python: Use `setuptools_scm`/`setuptools_scm_git_archive` to provide the version number
* ARROW-380 - [Java] optimize null count when serializing vectors.
* ARROW-382 - Python: Extend API documentation
* ARROW-396 - Python: Add pyarrow.schema.Schema.equals
* ARROW-409 - Python: Change `pyarrow.Table.dataframe_from_batches` API to create Table instead
* ARROW-411 - [Java] Move Intergration.compare and Intergration.compareSchemas to a public utils class
* ARROW-423 - C++: Define `BUILD_BYPRODUCTS` in external project to support non-make CMake generators
* ARROW-425 - Python: Expose a C function to convert arrow::Table to pyarrow.Table
* ARROW-426 - Python: Conversion from pyarrow.Array to a Python list
* ARROW-430 - Python: Better version handling
* ARROW-432 - [Python] Avoid unnecessary memory copy in `to_pandas` conversion by using low-level pandas internals APIs
* ARROW-450 - Python: Fixes for PARQUET-818
* ARROW-457 - Python: Better control over memory pool
* ARROW-458 - Python: Expose jemalloc MemoryPool
* ARROW-463 - C++: Support jemalloc 4.x
* ARROW-466 - C++: ExternalProject for jemalloc
* ARROW-468 - Python: Conversion of nested data in pd.DataFrames to/from Arrow structures
* ARROW-474 - Create an Arrow streaming file fomat
* ARROW-479 - Python: Test for expected schema in Pandas conversion
* ARROW-485 - [Java] Users are required to initialize VariableLengthVectors.offsetVector before calling VariableLengthVectors.mutator.getSafe
* ARROW-490 - Python: Update manylinux1 build scripts
* ARROW-524 - [java] provide apis to access nested vectors and buffers
* ARROW-525 - Python: Add more documentation to the package
* ARROW-529 - Python: Add jemalloc and Python 3.6 to manylinux1 build
* ARROW-546 - Python: Account for changes in PARQUET-867
* ARROW-553 - C++: Faster valid bitmap building
## New Feature
* ARROW-108 - [C++] Add IPC round trip for union types
* ARROW-221 - Add switch for writing Parquet 1.0 compatible logical types
* ARROW-227 - [C++/Python] Hook `arrow_io` generic reader / writer interface into `arrow_parquet`
* ARROW-228 - [Python] Create an Arrow-cpp-compatible interface for reading bytes from Python file-like objects
* ARROW-243 - [C++] Add "driver" option to HdfsClient to choose between libhdfs and libhdfs3 at runtime
* ARROW-303 - [C++] Also build static libraries for leaf libraries
* ARROW-312 - [Python] Provide Python API to read/write the Arrow IPC file format
* ARROW-317 - [C++] Implement zero-copy Slice method on arrow::Buffer that retains reference to parent
* ARROW-33 - C++: Implement zero-copy array slicing
* ARROW-332 - [Python] Add helper function to convert RecordBatch to pandas.DataFrame
* ARROW-363 - Set up Java/C++ integration test harness
* ARROW-369 - [Python] Add ability to convert multiple record batches at once to pandas
* ARROW-373 - [C++] Implement C++ version of JSON file format for testing
* ARROW-377 - Python: Add support for conversion of Pandas.Categorical
* ARROW-381 - [C++] Simplify primitive array type builders to use a default type singleton
* ARROW-383 - [C++] Implement C++ version of ARROW-367 integration test validator
* ARROW-389 - Python: Write Parquet files to pyarrow.io.NativeFile objects
* ARROW-394 - Add integration tests for boolean, list, struct, and other basic types
* ARROW-410 - [C++] Add Flush method to arrow::io::OutputStream
* ARROW-415 - C++: Add Equals implementation to compare Tables
* ARROW-416 - C++: Add Equals implementation to compare Columns
* ARROW-417 - C++: Add Equals implementation to compare ChunkedArrays
* ARROW-418 - [C++] Consolidate array container and builder code, remove arrow/types
* ARROW-419 - [C++] Promote util/{status.h, buffer.h, memory-pool.h} to top level of arrow/ source directory
* ARROW-427 - [C++] Implement dictionary-encoded array container
* ARROW-428 - [Python] Deserialize from Arrow record batches to pandas in parallel using a thread pool
* ARROW-438 - [Python] Concatenate Table instances with equal schemas
* ARROW-440 - [C++] Support pkg-config
* ARROW-441 - [Python] Expose Arrow's file and memory map classes as NativeFile subclasses
* ARROW-442 - [Python] Add public Python API to inspect Parquet file metadata
* ARROW-444 - [Python] Avoid unnecessary memory copies from use of `PyBytes_*` C APIs
* ARROW-449 - Python: Conversion from pyarrow.{Table,RecordBatch} to a Python dict
* ARROW-456 - C++: Add jemalloc based MemoryPool
* ARROW-461 - [Python] Implement conversion between arrow::DictionaryArray and pandas.Categorical
* ARROW-467 - [Python] Run parquet-cpp unit tests in Travis CI
* ARROW-470 - [Python] Add "FileSystem" abstraction to access directories of files in a uniform way
* ARROW-471 - [Python] Enable ParquetFile to pass down separately-obtained file metadata
* ARROW-472 - [Python] Expose parquet::{SchemaDescriptor, ColumnDescriptor}::Equals
* ARROW-475 - [Python] High level support for reading directories of Parquet files (as a single Arrow table) from supported file system interfaces
* ARROW-476 - [Integration] Add integration tests for Binary / Varbytes type
* ARROW-477 - [Java] Add support for second/microsecond/nanosecond timestamps in-memory and in IPC/JSON layer
* ARROW-478 - [Python] Accept a PyBytes object in the pyarrow.io.BufferReader ctor
* ARROW-484 - Add more detail about what of technology can be found in the Arrow implementations to README
* ARROW-495 - [C++] Add C++ implementation of streaming serialized format
* ARROW-497 - [Java] Integration test harness for streaming format
* ARROW-498 - [C++] Integration test harness for streaming format
* ARROW-503 - [Python] Interface to streaming binary format
* ARROW-508 - [C++] Make file/memory-mapped file interfaces threadsafe
* ARROW-509 - [Python] Add support for PARQUET-835 (parallel column reads)
* ARROW-512 - C++: Add method to check for primitive types
* ARROW-514 - [Python] Accept pyarrow.io.Buffer as input to StreamReader, FileReader classes
* ARROW-515 - [Python] Add StreamReader/FileReader methods that read all record batches as a Table
* ARROW-521 - [C++/Python] Track peak memory use in default MemoryPool
* ARROW-531 - Python: Document jemalloc, extend Pandas section, add Getting Involved
* ARROW-538 - [C++] Set up AddressSanitizer (ASAN) builds
* ARROW-547 - [Python] Expose Array::Slice and RecordBatch::Slice
* ARROW-81 - [Format] Add a Category logical type (distinct from dictionary-encoding)
## Task
* ARROW-268 - [C++] Flesh out union implementation to have all required methods for IPC
* ARROW-327 - [Python] Remove conda builds from Travis CI processes
* ARROW-353 - Arrow release 0.2
* ARROW-359 - Need to document `ARROW_LIBHDFS_DIR`
* ARROW-367 - [java] converter csv/json <=> Arrow file format for Integration tests
* ARROW-368 - Document use of `LD_LIBRARY_PATH` when using Python
* ARROW-372 - Create JSON arrow file format for integration tests
* ARROW-506 - Implement Arrow Echo server for integration testing
* ARROW-527 - clean drill-module.conf file
* ARROW-558 - Add KEYS files
* ARROW-96 - C++: API documentation using Doxygen
* ARROW-97 - Python: API documentation via sphinx-apidoc
# Apache Arrow 0.1.0 (7 October 2016)
## Bug
* ARROW-103 - Missing patterns from .gitignore
* ARROW-104 - Update Layout.md based on discussion on the mailing list
* ARROW-105 - Unit tests fail if assertions are disabled
* ARROW-113 - TestValueVector test fails if cannot allocate 2GB of memory
* ARROW-16 - Building cpp issues on XCode 7.2.1
* ARROW-17 - Set some vector fields to default access level for Drill compatibility
* ARROW-18 - Fix bug with decimal precision and scale
* ARROW-185 - [C++] Make sure alignment and memory padding conform to spec
* ARROW-188 - Python: Add numpy as install requirement
* ARROW-193 - For the instruction, typos "int his" should be "in this"
* ARROW-194 - C++: Allow read-only memory mapped source
* ARROW-200 - [Python] Convert Values String looks like it has incorrect error handling
* ARROW-209 - [C++] Broken builds: llvm.org apt repos are unavailable
* ARROW-210 - [C++] Tidy up the type system a little bit
* ARROW-211 - Several typos/errors in Layout.md examples
* ARROW-217 - Fix Travis w.r.t conda 4.1.0 changes
* ARROW-219 - [C++] Passed `CMAKE_CXX_FLAGS` are being dropped, fix compiler warnings
* ARROW-223 - Do not link against libpython
* ARROW-225 - [C++/Python] master Travis CI build is broken
* ARROW-244 - [C++] Some global APIs of IPC module should be visible to the outside
* ARROW-246 - [Java] UnionVector doesn't call allocateNew() when creating it's vectorType
* ARROW-247 - [C++] Missing explicit destructor in RowBatchReader causes an incomplete type error
* ARROW-250 - Fix for ARROW-246 may cause memory leaks
* ARROW-259 - Use flatbuffer fields in java implementation
* ARROW-265 - Negative decimal values have wrong padding
* ARROW-266 - [C++] Fix the broken build
* ARROW-274 - Make the MapVector nullable
* ARROW-278 - [Format] Struct type name consistency in implementations and metadata
* ARROW-283 - [C++] Update `arrow_parquet` to account for API changes in PARQUET-573
* ARROW-284 - [C++] Triage builds by disabling Arrow-Parquet module
* ARROW-287 - [java] Make nullable vectors use a BitVecor instead of UInt1Vector for bits
* ARROW-297 - Fix Arrow pom for release
* ARROW-304 - NullableMapReaderImpl.isSet() always returns true
* ARROW-308 - UnionListWriter.setPosition() should not call startList()
* ARROW-309 - Types.getMinorTypeForArrowType() does not work for Union type
* ARROW-313 - XCode 8.0 breaks builds
* ARROW-314 - JSONScalar is unnecessary and unused.
* ARROW-320 - ComplexCopier.copy(FieldReader, FieldWriter) should not start a list if reader is not set
* ARROW-321 - Fix Arrow licences
* ARROW-36 - Remove fixVersions from patch tool (until we have them)
* ARROW-46 - Port DRILL-4410 to Arrow
* ARROW-5 - Error when run maven install
* ARROW-51 - Move ValueVector test from Drill project
* ARROW-55 - Python: fix legacy Python (2.7) tests and add to Travis CI
* ARROW-62 - Format: Are the nulls bits 0 or 1 for null values?
* ARROW-63 - C++: ctest fails if Python 3 is the active Python interpreter
* ARROW-65 - Python: FindPythonLibsNew does not work in a virtualenv
* ARROW-69 - Change permissions for assignable users
* ARROW-72 - FindParquet searches for non-existent header
* ARROW-75 - C++: Fix handling of empty strings
* ARROW-77 - C++: conform null bit interpretation to match ARROW-62
* ARROW-80 - Segmentation fault on len(Array) for empty arrays
* ARROW-88 - C++: Refactor given PARQUET-572
* ARROW-93 - XCode 7.3 breaks builds
* ARROW-94 - Expand list example to clarify null vs empty list
## Improvement
* ARROW-10 - Fix mismatch of javadoc names and method parameters
* ARROW-15 - Fix a naming typo for memory.AllocationManager.AllocationOutcome
* ARROW-190 - Python: Provide installable sdist builds
* ARROW-199 - [C++] Refine third party dependency
* ARROW-206 - [C++] Expose an equality API for arrays that compares a range of slots on two arrays
* ARROW-212 - [C++] Clarify the fact that PrimitiveArray is now abstract class
* ARROW-213 - Exposing static arrow build
* ARROW-218 - Add option to use GitHub API token via environment variable when merging PRs
* ARROW-234 - [C++] Build with libhdfs support in `arrow_io` in conda builds
* ARROW-238 - C++: InternalMemoryPool::Free() should throw an error when there is insufficient allocated memory
* ARROW-245 - [Format] Clarify Arrow's relationship with big endian platforms
* ARROW-252 - Add implementation guidelines to the documentation
* ARROW-253 - Int types should only have width of 8*2^n (8, 16, 32, 64)
* ARROW-254 - Remove Bit type as it is redundant with boolean
* ARROW-255 - Finalize Dictionary representation
* ARROW-256 - Add versioning to the arrow spec.
* ARROW-257 - Add a typeids Vector to Union type
* ARROW-264 - Create an Arrow File format
* ARROW-270 - [Format] Define more generic Interval logical type
* ARROW-271 - Update Field structure to be more explicit
* ARROW-279 - rename vector module to arrow-vector for consistency
* ARROW-280 - [C++] Consolidate file and shared memory IO interfaces
* ARROW-285 - Allow for custom flatc compiler
* ARROW-286 - Build thirdparty dependencies in parallel
* ARROW-289 - Install test-util.h
* ARROW-290 - Specialize alloc() in ArrowBuf
* ARROW-292 - [Java] Upgrade Netty to 4.041
* ARROW-299 - Use absolute namespace in macros
* ARROW-305 - Add compression and `use_dictionary` options to Parquet interface
* ARROW-306 - Add option to pass cmake arguments via environment variable
* ARROW-315 - Finalize timestamp type
* ARROW-319 - Add canonical Arrow Schema json representation
* ARROW-324 - Update arrow metadata diagram
* ARROW-325 - make TestArrowFile not dependent on timezone
* ARROW-50 - C++: Enable library builds for 3rd-party users without having to build thirdparty googletest
* ARROW-54 - Python: rename package to "pyarrow"
* ARROW-64 - Add zsh support to C++ build scripts
* ARROW-66 - Maybe some missing steps in installation guide
* ARROW-68 - Update `setup_build_env` and third-party script to be more userfriendly
* ARROW-71 - C++: Add script to run clang-tidy on codebase
* ARROW-73 - Support CMake 2.8
* ARROW-78 - C++: Add constructor for DecimalType
* ARROW-79 - Python: Add benchmarks
* ARROW-8 - Set up Travis CI
* ARROW-85 - C++: memcmp can be avoided in Equal when comparing with the same Buffer
* ARROW-86 - Python: Implement zero-copy Arrow-to-Pandas conversion
* ARROW-87 - Implement Decimal schema conversion for all ways supported in Parquet
* ARROW-89 - Python: Add benchmarks for Arrow<->Pandas conversion
* ARROW-9 - Rename some unchanged "Drill" to "Arrow"
* ARROW-91 - C++: First draft of an adapter class for parquet-cpp's ParquetFileReader that produces Arrow table/row batch objects
## New Feature
* ARROW-100 - [C++] Computing RowBatch size
* ARROW-106 - Add IPC round trip for string types (string, char, varchar, binary)
* ARROW-107 - [C++] add ipc round trip for struct types
* ARROW-13 - Add PR merge tool similar to that used in Parquet
* ARROW-19 - C++: Externalize memory allocations and add a MemoryPool abstract interface to builder classes
* ARROW-197 - [Python] Add conda dev recipe for pyarrow
* ARROW-2 - Post Simple Website
* ARROW-20 - C++: Add null count member to Array containers, remove nullable member
* ARROW-201 - C++: Initial ParquetWriter implementation
* ARROW-203 - Python: Basic filename based Parquet read/write
* ARROW-204 - [Python] Automate uploading conda build artifacts for libarrow and pyarrow
* ARROW-21 - C++: Add in-memory schema metadata container
* ARROW-214 - C++: Add String support to Parquet I/O
* ARROW-215 - C++: Support other integer types in Parquet I/O
* ARROW-22 - C++: Add schema adapter routines for converting flat Parquet schemas to in-memory Arrow schemas
* ARROW-222 - [C++] Create prototype file-like interface to HDFS (via libhdfs) and begin defining more general IO interface for Arrow data adapters
* ARROW-23 - C++: Add logical "Column" container for chunked data
* ARROW-233 - [C++] Add visibility defines for limiting shared library symbol visibility
* ARROW-236 - [Python] Enable Parquet read/write to work with HDFS file objects
* ARROW-237 - [C++] Create Arrow specializations of Parquet allocator and read interfaces
* ARROW-24 - C++: Add logical "Table" container
* ARROW-242 - C++/Python: Support Timestamp Data Type
* ARROW-26 - C++: Add developer instructions for building parquet-cpp integration
* ARROW-262 - [Format] Add a new format document for metadata and logical types for messaging and IPC / on-wire/file representations
* ARROW-267 - [C++] C++ implementation of file-like layout for RPC / IPC
* ARROW-28 - C++: Add google/benchmark to the 3rd-party build toolchain
* ARROW-293 - [C++] Implementations of IO interfaces for operating system files
* ARROW-296 - [C++] Remove `arrow_parquet` C++ module and related parts of build system
* ARROW-3 - Post Initial Arrow Format Spec
* ARROW-30 - Python: pandas/NumPy to/from Arrow conversion routines
* ARROW-301 - [Format] Add some form of user field metadata to IPC schemas
* ARROW-302 - [Python] Add support to use the Arrow file format with file-like objects
* ARROW-31 - Python: basic PyList <-> Arrow marshaling code
* ARROW-318 - [Python] Revise README to reflect current state of project
* ARROW-37 - C++: Represent boolean array data in bit-packed form
* ARROW-4 - Initial Arrow CPP Implementation
* ARROW-42 - Python: Add to Travis CI build
* ARROW-43 - Python: Add rudimentary console `__repr__` for array types
* ARROW-44 - Python: Implement basic object model for scalar values (i.e. results of `arrow_arr[i]`)
* ARROW-48 - Python: Add Schema object wrapper
* ARROW-49 - Python: Add Column and Table wrapper interface
* ARROW-53 - Python: Fix RPATH and add source installation instructions
* ARROW-56 - Format: Specify LSB bit ordering in bit arrays
* ARROW-57 - Format: Draft data headers IDL for data interchange
* ARROW-58 - Format: Draft type metadata ("schemas") IDL
* ARROW-59 - Python: Boolean data support for builtin data structures
* ARROW-60 - C++: Struct type builder API
* ARROW-67 - C++: Draft type metadata conversion to/from IPC representation
* ARROW-7 - Add Python library build toolchain
* ARROW-70 - C++: Add "lite" DCHECK macros used in parquet-cpp
* ARROW-76 - Revise format document to include null count, defer non-nullable arrays to the domain of metadata
* ARROW-82 - C++: Implement IPC exchange for List types
* ARROW-90 - Apache Arrow cpp code does not support power architecture
* ARROW-92 - C++: Arrow to Parquet Schema conversion
## Task
* ARROW-1 - Import Initial Codebase
* ARROW-101 - Fix java warnings emitted by java compiler
* ARROW-102 - travis-ci support for java project
* ARROW-11 - Mirror JIRA activity to dev@arrow.apache.org
* ARROW-14 - Add JIRA components
* ARROW-251 - [C++] Expose APIs for getting code and message of the status
* ARROW-272 - Arrow release 0.1
* ARROW-298 - create release scripts
* ARROW-35 - Add a short call-to-action / how-to-get-involved to the main README.md
## Test
* ARROW-260 - TestValueVector.testFixedVectorReallocation and testVariableVectorReallocation are flaky
* ARROW-83 - Add basic test infrastructure for DecimalType
| {
"content_hash": "efc7f07bb4b54e29a32284372edb7b94",
"timestamp": "",
"source": "github",
"line_count": 5074,
"max_line_length": 182,
"avg_line_length": 67.72506897910918,
"alnum_prop": 0.7506380279189959,
"repo_name": "renesugar/arrow",
"id": "b071083932df164ad4c6071fad8662541c3fea4d",
"size": "343730",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CHANGELOG.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "73655"
},
{
"name": "Awk",
"bytes": "3683"
},
{
"name": "Batchfile",
"bytes": "34928"
},
{
"name": "C",
"bytes": "428011"
},
{
"name": "C#",
"bytes": "517100"
},
{
"name": "C++",
"bytes": "10120156"
},
{
"name": "CMake",
"bytes": "450430"
},
{
"name": "Dockerfile",
"bytes": "54234"
},
{
"name": "Emacs Lisp",
"bytes": "1825"
},
{
"name": "FreeMarker",
"bytes": "2271"
},
{
"name": "Go",
"bytes": "838776"
},
{
"name": "HTML",
"bytes": "3427"
},
{
"name": "Java",
"bytes": "3527648"
},
{
"name": "JavaScript",
"bytes": "102332"
},
{
"name": "Lua",
"bytes": "8771"
},
{
"name": "M4",
"bytes": "9093"
},
{
"name": "MATLAB",
"bytes": "36600"
},
{
"name": "Makefile",
"bytes": "49970"
},
{
"name": "Meson",
"bytes": "39653"
},
{
"name": "Objective-C",
"bytes": "12125"
},
{
"name": "PLpgSQL",
"bytes": "56995"
},
{
"name": "Perl",
"bytes": "3799"
},
{
"name": "Python",
"bytes": "2152367"
},
{
"name": "R",
"bytes": "272554"
},
{
"name": "Ruby",
"bytes": "862884"
},
{
"name": "Rust",
"bytes": "2208433"
},
{
"name": "Shell",
"bytes": "376434"
},
{
"name": "TSQL",
"bytes": "29787"
},
{
"name": "Thrift",
"bytes": "138360"
},
{
"name": "TypeScript",
"bytes": "1157378"
}
],
"symlink_target": ""
} |
import Modal from 'flarum/components/Modal';
import LogInModal from 'flarum/components/LogInModal';
import avatar from 'flarum/helpers/avatar';
import Button from 'flarum/components/Button';
import LogInButtons from 'flarum/components/LogInButtons';
import extractText from 'flarum/utils/extractText';
/**
* The `SignUpModal` component displays a modal dialog with a singup form.
*
* ### Props
*
* - `username`
* - `email`
* - `password`
* - `token` An email token to sign up with.
*/
export default class SignUpModal extends Modal {
init() {
super.init();
/**
* The value of the username input.
*
* @type {Function}
*/
this.username = m.prop(this.props.username || '');
/**
* The value of the email input.
*
* @type {Function}
*/
this.email = m.prop(this.props.email || '');
/**
* The value of the password input.
*
* @type {Function}
*/
this.password = m.prop(this.props.password || '');
/**
* The user that has been signed up and that should be welcomed.
*
* @type {null|User}
*/
this.welcomeUser = null;
}
className() {
return 'Modal--small SignUpModal' + (this.welcomeUser ? ' SignUpModal--success' : '');
}
title() {
return app.translator.trans('core.forum.sign_up.title');
}
content() {
return [
<div className="Modal-body">
{this.body()}
</div>,
<div className="Modal-footer">
{this.footer()}
</div>
];
}
body() {
const body = [
this.props.token ? '' : <LogInButtons/>,
<div className="Form Form--centered">
<div className="Form-group">
<input className="FormControl" name="username" placeholder={extractText(app.translator.trans('core.forum.sign_up.username_placeholder'))}
value={this.username()}
onchange={m.withAttr('value', this.username)}
disabled={this.loading} />
</div>
<div className="Form-group">
<input className="FormControl" name="email" type="email" placeholder={extractText(app.translator.trans('core.forum.sign_up.email_placeholder'))}
value={this.email()}
onchange={m.withAttr('value', this.email)}
disabled={this.loading || (this.props.token && this.props.email)} />
</div>
{this.props.token ? '' : (
<div className="Form-group">
<input className="FormControl" name="password" type="password" placeholder={extractText(app.translator.trans('core.forum.sign_up.password_placeholder'))}
value={this.password()}
onchange={m.withAttr('value', this.password)}
disabled={this.loading} />
</div>
)}
<div className="Form-group">
<Button
className="Button Button--primary Button--block"
type="submit"
loading={this.loading}>
{app.translator.trans('core.forum.sign_up.submit_button')}
</Button>
</div>
</div>
];
if (this.welcomeUser) {
const user = this.welcomeUser;
const fadeIn = (element, isInitialized) => {
if (isInitialized) return;
$(element).hide().fadeIn();
};
body.push(
<div className="SignUpModal-welcome" style={{background: user.color()}} config={fadeIn}>
<div className="darkenBackground">
<div className="container">
{avatar(user)}
<h3>{app.translator.trans('core.forum.sign_up.welcome_text', {user})}</h3>
<p>{app.translator.trans('core.forum.sign_up.confirmation_message', {email: <strong>{user.email()}</strong>})}</p>
<p>
<Button className="Button Button--primary" onclick={this.hide.bind(this)}>
{app.translator.trans('core.forum.sign_up.dismiss_button')}
</Button>
</p>
</div>
</div>
</div>
);
}
return body;
}
footer() {
return [
<p className="SignUpModal-logIn">
{app.translator.trans('core.forum.sign_up.log_in_text', {a: <a onclick={this.logIn.bind(this)}/>})}
</p>
];
}
/**
* Open the log in modal, prefilling it with an email/username/password if
* the user has entered one.
*
* @public
*/
logIn() {
const props = {
email: this.email() || this.username(),
password: this.password()
};
app.modal.show(new LogInModal(props));
}
onready() {
if (this.props.username && !this.props.email) {
this.$('[name=email]').select();
} else {
this.$('[name=username]').select();
}
}
onsubmit(e) {
e.preventDefault();
this.loading = true;
const data = this.submitData();
app.request({
url: app.forum.attribute('baseUrl') + '/register',
method: 'POST',
data,
errorHandler: this.onerror.bind(this)
}).then(
payload => {
const user = app.store.pushPayload(payload);
// If the user's new account has been activated, then we can assume
// that they have been logged in too. Thus, we will reload the page.
// Otherwise, we will show a message asking them to check their email.
if (user.isActivated()) {
window.location.reload();
} else {
this.welcomeUser = user;
this.loaded();
}
},
this.loaded.bind(this)
);
}
/**
* Get the data that should be submitted in the sign-up request.
*
* @return {Object}
* @public
*/
submitData() {
const data = {
username: this.username(),
email: this.email()
};
if (this.props.token) {
data.token = this.props.token;
} else {
data.password = this.password();
}
return data;
}
}
| {
"content_hash": "ff1d42e121ccdda07f6468216955d994",
"timestamp": "",
"source": "github",
"line_count": 221,
"max_line_length": 165,
"avg_line_length": 26.493212669683256,
"alnum_prop": 0.5627668659265584,
"repo_name": "kirkbushell/core",
"id": "f1bc716beca93d8ebcd368f4945d9799b4170e37",
"size": "5855",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "js/forum/src/components/SignUpModal.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "154291"
},
{
"name": "HTML",
"bytes": "572"
},
{
"name": "JavaScript",
"bytes": "2128190"
},
{
"name": "PHP",
"bytes": "626815"
},
{
"name": "Shell",
"bytes": "280"
}
],
"symlink_target": ""
} |
Pympler is a development tool to measure, monitor and analyze the
memory behavior of Python objects in a running Python application.
By pympling a Python application, detailed insight in the size and
the lifetime of Python objects can be obtained. Undesirable or
unexpected runtime behavior like memory bloat and other "pymples"
can easily be identified.
Pympler integrates three previously separate modules into a single,
comprehensive profiling tool. The :ref:`asizeof <asizeof>` module
provides basic size information for one or several Python objects,
module :ref:`muppy <muppy>` is used for on-line monitoring of a Python
application and module :ref:`Class Tracker <classtracker>` provides
off-line analysis of the lifetime of selected Python objects.
A web profiling frontend exposes process statistics, garbage
visualisation and class tracker statistics.
Requirements
------------
Pympler is written entirely in Python, with no dependencies to external
libraries. It integrates `Bottle <http://bottlepy.org>`_ and
`Highcharts <http://www.highcharts.com>`_. Pympler has been tested with
Python 2.5, 2.6, 2.7, 3.1, 3.2 and 3.3.
Pympler is platform independent and has been tested on various Linux
distributions (32bit and 64bit), Windows XP, Windows 7 and MacOS X.
Download
--------
If you have *pip* installed, the easiest way to get Pympler is::
pip install pympler
Alternately, download Pympler releases from the `Python Package Index
<https://pypi.python.org/pypi/Pympler>`_ or `check out the latest
development revision
<https://github.com/pympler/pympler>`_ with git. Please
see the README file for installation instructions.
Target Audience
---------------
Every Python developer interested in analyzing the memory consumption
of his or her Python program should find a suitable, readily usable
facility in Pympler.
Usage Examples
--------------
``pympler.asizeof`` can be used to investigate how much memory certain Python
objects consume. You can use one of the :ref:`asizeof <asizeof>` functions to
get the size of these objects and all associated referents::
>>> from pympler import asizeof
>>> obj = [1, 2, (3, 4), 'text']
>>> asizeof.asizeof(obj)
176
>>> print asizeof.asized(obj, detail=1).format()
[1, 2, (3, 4), 'text'] size=176 flat=48
(3, 4) size=64 flat=32
'text' size=32 flat=32
1 size=16 flat=16
2 size=16 flat=16
Graham has been notified that his Python script leaks memory. Looking at
the garbage collector debug output does not reveal where the leaks come
from. Thus he decides to use the :ref:`muppy <muppy>` module to see which actions
result in an increased memory usage. Graham discovers that whenever
his script iterates over the input set, a new dict object is created.
With the help of the `muppy` module he can identify where these new
dicts are referenced and eliminates the leak.
Helen maintains a complex application that is taking up a large amount
of memory. She would like to reduce the memory footprint of her
program by optimizing or restructuring her code. She has a number of
optimization candidates and she would like to know if optimizing one
of them would likely reduce the total memory footprint. Helen uses
the :ref:`Class Tracker <classtracker>` to track and profile her
candidate classes. The results tell her which class instances take up
the largest shares of memory and are therefore best suited for
optimization attempts. After trying to optimize her code she runs the
program again and compares the profiling results to quantify the
improvements.
History
-------
Pympler was founded in August 2008 by Jean Brouwers, Ludwig Haehne,
and Robert Schuppenies with the goal of providing a complete and
stand-alone memory profiling solution for Python.
| {
"content_hash": "b30fe4cdcde018a17271461fd7d2bb95",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 82,
"avg_line_length": 38.02,
"alnum_prop": 0.7614413466596528,
"repo_name": "swiftstack/pympler",
"id": "cee15784c83853fd92a1bb64adabf1b682441bd1",
"size": "3802",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/source/intro.rst",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "15427"
},
{
"name": "Python",
"bytes": "476253"
}
],
"symlink_target": ""
} |
package org.mp.em4so.agentManager;
import java.util.Collections;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.mp.em4so.exceptions.DocNotSpecifiedException;
import org.mp.em4so.model.actuating.Action;
import org.mp.em4so.model.actuating.Activity;
import org.mp.em4so.model.actuating.ExecutionInstance;
import org.mp.em4so.model.agent.Goal;
import org.mp.em4so.model.agent.Role;
import org.mp.em4so.model.common.Element;
import org.mp.em4so.model.common.Service;
import org.mp.em4so.model.network.Host;
import org.mp.em4so.model.reasoning.Scenario;
import org.mp.em4so.model.sensing.Observation;
import org.mp.em4so.model.sensing.Property;
import org.mp.em4so.utils.CouchDBUtils;
import org.mp.em4so.utils.SOMFileConfigUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.almende.eve.capabilities.CapabilityBuilder;
import com.almende.eve.state.State;
import com.almende.eve.state.couch.CouchStateConfig;
import com.almende.eve.state.memory.MemoryStateConfig;
// TODO: Auto-generated Javadoc
/**
* The Class MemoryManager.
*/
public class MemoryManager {
/** The log. */
private final Logger LOG = LoggerFactory.getLogger(MemoryManager.class
.getName());
/** The my state. */
private State myState;
/** The params. */
private CouchStateConfig params;
/** The documents. */
private Hashtable<String, State> documents;
/** The mm. */
private static MemoryManager mm;
/** The cdb. */
private CouchDBUtils cdb;
/** The stm params. */
private MemoryStateConfig stmParams;
/** The stm state. */
private State stmState;
/**
* Initialize short term memory.
*
* @param agentId the agent id
*/
public void initializeShortTermMemory(String agentId){
stmParams = new MemoryStateConfig();
stmParams.setId(agentId);
stmState = new CapabilityBuilder<State>()
.withConfig(stmParams)
.build();
}
/**
* Gets the responsible role.
*
* @param role the role
* @return the responsible role
*/
public Role getResponsibleRole(Role role){
Hashtable <String,String> args = null;
try {
args = new Hashtable<String,String>();
args.put("queryType", "byactivity");
args.put("listType", "single");
args.put("reduce", "false");
args.put("group", "false");
args.put("doc", "role");
args.put("key", role.getActivity().get(0).getId());
role = cdb.querySingle(Role.class,args);
} catch (DocNotSpecifiedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return role;
}
/**
* Gets the role host.
*
* @param role the role
* @return the role host
*/
public Host getRoleHost(Role role){
Host host = null;
Hashtable <String,String> args = null;
try {
args = new Hashtable<String,String>();
args.put("queryType", "byrole");
args.put("listType", "single");
args.put("reduce", "true");
args.put("group", "true");
args.put("key", role.getId());
host = cdb.querySingle(Host.class,args);
} catch (DocNotSpecifiedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return host;
}
/**
* Gets the next goal.
*
* @return the next goal
*/
public Goal getNextGoal() {
// TODO Dinamic adding of goals
// params.setId("goal0");
// documents.put("goal0",
// new StateBuilder()
// .withConfig(params)
// .build());
// Goal goal = documents.get("goal0").get("goal",Goal.class);
// goal = new Goal("live");
Goal goal = null;
return goal;
}
/**
* Gets the goals.
*
* @return the goals
*/
public Map<String,Goal> getGoals() {
Hashtable <String,String> args = null;
Map<String, Goal> goals = null;
try {
args = new Hashtable<String,String>();
args.put("queryType", "all");
args.put("listType", "all");
args.put("reduce", "false");
args.put("group", "false");
goals = cdb.query(args, Goal.class);
} catch (DocNotSpecifiedException e) {
LOG.error(e.getMessage(),e);
}
return goals;
}
/**
* Gets the scenario.
*
* @param goal the goal
* @return the scenario
*/
public Scenario getScenario(Goal goal){
Hashtable <String,String> args = null;
Scenario scenario = null;
try {
args = new Hashtable<String,String>();
args.put("queryType", "all");
args.put("listType", "single");
args.put("key", goal.getId());
args.put("reduce", "false");
args.put("group", "false");
scenario = cdb.querySingleCollection(Scenario.class, args);
} catch (DocNotSpecifiedException e) {
LOG.error(e.getMessage(),e);
}
return scenario;
}
/**
* Instantiates a new memory manager.
*
* @param agentId the agent id
*/
MemoryManager(String agentId) {
this(SOMFileConfigUtils.getKBUrl(),SOMFileConfigUtils.getKBUser(),SOMFileConfigUtils.getKBPass(),SOMFileConfigUtils.getKBPrefix()+agentId,agentId);
}
/**
* Instantiates a new memory manager.
*
* @param url the url
* @param user the user
* @param pass the pass
* @param db the db
* @param agentId the agent id
*/
MemoryManager(String url, String user, String pass, String db, String agentId) {
params = new CouchStateConfig();
params.setUrl(url);
params.setUsername(user);
params.setPassword(pass);
/* default value, therefor optional: */
params.setDatabase(db); // Database name
// initialize goals
params.setId(agentId);
documents = new Hashtable<String, State>();
State st = new CapabilityBuilder<State>().withConfig(params).build();
cdb = new CouchDBUtils(url+"/"+db, user,
pass);
LOG.trace(agentId+": Initialized Long term memory");
initializeShortTermMemory(agentId);
LOG.trace(agentId+": Initialized Short term memory");
}
/**
* Gets the my properties.
*
* @return the my properties
*/
public List<Property> getMyProperties() {
List<Property> properties = cdb.queryAll("property", Property.class);
LOG.trace("Getting properties {}",properties);
return properties;
}
/**
* Gets the roles.
*
* @return the roles
*/
public List<Role> getRoles() {
Hashtable<String, Object> args = null;
List<Role> roles=null;
try {
args = new Hashtable<String, Object>();
args.put("includeDocs", "true");
roles = cdb.query(Role.class, args);
} catch (DocNotSpecifiedException e) {
LOG.error(e.getMessage(),e);
}
return roles;
}
/**
* Gets the my state.
*
* @return the my state
*/
public State getMyState() {
return myState;
}
/**
* Gets the params.
*
* @return the params
*/
public CouchStateConfig getParams() {
return params;
}
/**
* Gets the documents.
*
* @return the documents
*/
public Hashtable<String, State> getDocuments() {
return documents;
}
/**
* Gets the mm.
*
* @return the mm
*/
public static MemoryManager getMm() {
return mm;
}
/**
* Sets the my state.
*
* @param myState the new my state
*/
public void setMyState(State myState) {
this.myState = myState;
}
/**
* Sets the params.
*
* @param params the new params
*/
public void setParams(CouchStateConfig params) {
this.params = params;
}
/**
* Sets the documents.
*
* @param documents the documents
*/
public void setDocuments(Hashtable<String, State> documents) {
this.documents = documents;
}
/**
* Sets the mm.
*
* @param mm the new mm
*/
public static void setMm(MemoryManager mm) {
MemoryManager.mm = mm;
}
/**
* Gets the cdb.
*
* @return the cdb
*/
public CouchDBUtils getCdb() {
return cdb;
}
/**
* Sets the cdb.
*
* @param cdb the new cdb
*/
public void setCdb(CouchDBUtils cdb) {
this.cdb = cdb;
}
/**
* Gets the recent observations.
*
* @param propertyKey the property key
* @return the recent observations
*/
public List<Observation> getRecentObservations(String propertyKey) {
Hashtable<String, String> args = null;
List<Observation> results = null;
try {
args = new Hashtable<String, String>();
args.put("reduce", "true");
args.put("group", "true");
if(propertyKey!=null)args.put("key", propertyKey);
results = cdb.query(Observation.class, args);
} catch (Exception e) {
LoggerFactory.getLogger(this.getClass().getName()).error(
e.getMessage());
}
return results;
}
/**
* Gets the all observations.
*
* @return the all observations
*/
public List<Observation> getAllObservations() {
Hashtable<String, String> args = null;
List<Observation> results = null;
try {
args = new Hashtable<String, String>();
args.put("reduce", "false");
args.put("group", "false");
results = cdb.query(Observation.class, args);
} catch (Exception e) {
LoggerFactory.getLogger(this.getClass().getName()).error(
e.getMessage());
}
return results;
}
/**
* Gets the activities by rol.
*
* @param role the role
* @return the activities by rol
*/
public List<Activity> getActivitiesByRol(Role role){
List<Activity> activities = null;
Hashtable<String, Object> args=null;
List<Activity> newActivities = null;
List<String> strKeys = null;
try {
if(role!=null){
activities = role.getActivity();
}
args = new Hashtable<String, Object>();
//TODO Improvement: to send attribute to get in order to pass it to the query
strKeys = new LinkedList<String>();
for(Activity a: activities){
strKeys.add(a.getId());
}
args.put("keys", strKeys);
args.put("queryType", "all_n_actions");
// args.put("includeDocs", "true");
activities = cdb.query(Activity.class, args);
for(Activity a: activities){
a.setActions(Collections.synchronizedList(a.getActions()));
}
// stmState.put("activities",activities);
// activities = getExecutionDetails(activities);
LOG.trace("are they equal->"+activities.equals(newActivities));
} catch (DocNotSpecifiedException e) {
LOG.error(e.getMessage(),e);
}
return activities;
}
/**
* Gets the action.
*
* @param key the key
* @return the action
*/
public Action getAction(String key){
List<Action> actions = null;
Hashtable<String, String> args = new Hashtable<String,String>();
args.put("key", key);
Action result = null;
try {
actions = cdb.query(Action.class, args);
if (actions != null && actions.size()>0)
result = actions.get(0);
} catch (DocNotSpecifiedException e) {
// TODO Auto-generated catch block
LOG.error(e.getMessage(),e);
}
return result;
}
/**
* Gets the activity.
*
* @param activity the activity
* @return the activity
*/
public Activity getActivity(Activity activity){
List<Activity> result = null;
Hashtable<String, String> args = new Hashtable<String,String>();
try {
args.put("key", activity.getId());
args.put("queryType", "all_n_actions");
args.put("listType", "all_n_actions");
result = cdb.query(Activity.class,args);
if(result!=null && result.size()==1){
activity = result.get(0);
}
} catch (DocNotSpecifiedException e) {
// TODO Auto-generated catch block
LOG.error(e.getMessage(),e);
}
return activity;
}
/**
* Gets the category providers.
*
* @param service the service
* @param avoidUrls the avoid urls
* @return the category providers
*/
public HashMap<String,Host> getCategoryProviders(Service service, List<String> avoidUrls){
HashMap<String,Host> results=null;
Hashtable<String,Object> args = new Hashtable<String,Object>();
args.put("keys",service.getCategories());
args.put("reduce", "false");
args.put("group", "false");
args.put("queryType", "categories_host");
args.put("listType", "remote_host");
args.put("doc", "host");
args.put("params1",avoidUrls);
try {
results = cdb.query(args,Host.class);
} catch (DocNotSpecifiedException e) {
// TODO Auto-generated catch block
LOG.error(e.getMessage(),e);
}
return results;
}
/**
* Gets the known providers.
*
* @param avoidUrls the avoid urls
* @return the known providers
*/
public HashMap<String,Host> getKnownProviders(List<String> avoidUrls){
HashMap<String,Host> results=null;
Hashtable<String,Object> args = new Hashtable<String,Object>();
args.put("reduce", "false");
args.put("group", "false");
args.put("queryType", "host_url");
args.put("listType", "remote_host");
args.put("doc", "host");
args.put("params1",avoidUrls);
try {
results = cdb.query(args, Host.class);
} catch (DocNotSpecifiedException e) {
// TODO Auto-generated catch block
LOG.error(e.getMessage(),e);
}
return results;
}
/**
* Get providers of a category.
*
* @param service the service
* @return the providers
*/
public List<Service> getProviders(Service service){//TO review to send to smart object host
//TODO get All providers...
List<Service> results=null;
Hashtable<String,Object> args = new Hashtable<String,Object>();
args.put("keys",service.getCategories());
args.put("reduce", "true");
args.put("group", "true");
args.put("queryType", "categories_ranking");
args.put("listType", "all");
// TODO Im here
try {
results = cdb.query(Service.class, args);
} catch (DocNotSpecifiedException e) {
// TODO Auto-generated catch block
LOG.error(e.getMessage(),e);
}
return results;
}
/**
* Gets the stm state.
*
* @return the stm state
*/
public State getStmState() {
return stmState;
}
/**
* Sets the stm state.
*
* @param stmState the new stm state
*/
public void setStmState(State stmState) {
this.stmState = stmState;
}
/**
* Gets the services by type.
*
* @param type the type
* @return the services by type
*/
public List<Service> getServicesByType(String type){
//TODO get All providers...
List<Service> results=null;
Hashtable<String,Object> args = new Hashtable<String,Object>();
args.put("key", type);
args.put("reduce", "true");
args.put("group", "true");
args.put("queryType", "kind_ranking");
args.put("listType", "all");
// args.put("doc", "service");
try {
results = cdb.query(Service.class, args);
} catch (DocNotSpecifiedException e) {
// TODO Auto-generated catch block
LOG.error(e.getMessage(),e);
}
return results;
}
/**
* Gets the service.
*
* @param service the service
* @return the service
*/
public Service getService(Service service){
String strContract = null;
Hashtable<String,Object> args = new Hashtable<String,Object>();
if(service.getId()!=null){
args.put("key", service.getId());
args.put("queryType", "byid");
}else if(service.getName()!= null ){
strContract = "[\""+service.getName()+"\",\""+service.getResult()+"\",[";
for(String arg:service.getArgTypes().keySet()){
strContract += "\""+service.getArgTypes().get(arg) +"\",";
}
strContract = strContract.substring(0,strContract.lastIndexOf(","))+"]]";
args.put("queryType", "bycontract");
args.put("key", strContract);
}
args.put("reduce", "false");
args.put("group", "false");
args.put("listType", "single");
try {
service = cdb.querySingle(Service.class, args);
} catch (DocNotSpecifiedException e) {
// TODO Auto-generated catch block
LOG.error(e.getMessage(),e);
}
return service;
}
/**
* Gets the services properties by type.
*
* @param type the type
* @return the services properties by type
*/
public HashMap<String,Service> getServicesPropertiesByType(String type){
//TODO get All providers...
HashMap<String,Service> results=null;
Hashtable<String,Object> args = new Hashtable<String,Object>();
args.put("reduce", "false");
args.put("group", "false");
args.put("queryType", "sensor_properties");
args.put("listType", "all_table");
// args.put("doc", "service");
try {
results = cdb.query(args, Service.class);
// if(results!=null && !results.isEmpty()){
// args.put("reduce", "true");
// args.put("group", "true");
// results = cdb.query(args, Service.class);
// }
} catch (DocNotSpecifiedException e) {
// TODO Auto-generated catch block
LOG.error(e.getMessage(),e);
}
return results;
}
/**
* Gets the documents.
*
* @param <T> the generic type
* @param clazz the clazz
* @param document the document
* @param query the query
* @param docId the doc id
* @return the documents
*/
public <T> List<T> getDocuments(Class<T> clazz, String document,String query,String docId){
List<T> result=null;
try {
HashMap<String,Service> results=null;
Hashtable<String,Object> args = new Hashtable<String,Object>();
args.put("key", docId);
args.put("doc", document);
args.put("queryType", query);
args.put("listType", "all");
result = cdb.<T>queryDocs(clazz,args);
} catch (DocNotSpecifiedException e) {
// TODO Auto-generated catch block
LOG.error(e.getMessage(),e);
}
return result;
}
/**
* Gets the document.
*
* @param <T> the generic type
* @param clazz the clazz
* @param document the document
* @param query the query
* @param docId the doc id
* @return the document
*/
public <T>T getDocument(Class<T> clazz, String document,String query,String docId){
T result=null;
try {
HashMap<String,Service> results=null;
Hashtable<String,Object> args = new Hashtable<String,Object>();
args.put("key", docId);
args.put("doc", document);
args.put("queryType", query);
args.put("listType", "single");
result = cdb.<T>queryDoc(clazz,args);
} catch (DocNotSpecifiedException e) {
// TODO Auto-generated catch block
LOG.error(e.getMessage(),e);
}
return result;
}
/**
* Gets the access rights service.
*
* @param service the service
* @param senderUrl the sender url
* @return the access rights service
*/
public String getAccessRightsService(Service service, String senderUrl){
String result = "false";
try {
Hashtable<String,String> args = new Hashtable<String,String>();
args.put("key", service.getName());
args.put("param1", senderUrl);
args.put("doc", "accesspolicy");
args.put("queryType","all" );
args.put("listType", "single_url");
result = cdb.queryDoc(String.class,args);
} catch (DocNotSpecifiedException e) {
// TODO Auto-generated catch block
LOG.error(e.getMessage(),e);
}
return result;
}
/**
* Gets the execution instance.
*
* @param service the service
* @return the execution instance
*/
public ExecutionInstance getExecutionInstance(Service service){
String strContract;
ExecutionInstance ei = null;
try {
strContract = "[\""+service.getName()+"\",\""+service.getResult()+"\",[";
if(service.getArgTypes()!=null && !service.getArgTypes().isEmpty()){
for(String arg:service.getArgTypes().keySet()){
strContract += "\""+service.getArgTypes().get(arg) +"\",";
}
strContract = strContract.substring(0,strContract.lastIndexOf(","));
}
strContract+= "]]";
Hashtable<String,String> args = new Hashtable<String,String>();
args.put("key", strContract);
args.put("queryType","exin_bycontract" );
args.put("reduce", "true");
args.put("group", "true");
args.put("listType", "exin_single");
args.put("doc","service");
//complete method with getting hosts
ei = cdb.queryDoc(ExecutionInstance.class,args);
if(ei!=null){
ei.setHost(
getHost(ei.getHost())
);
}else
service.setExecutionInstance(null);
} catch (DocNotSpecifiedException e) {
// TODO Auto-generated catch block
LOG.error(e.getMessage(),e);
}
return ei;
}
/**
* Gets the host.
*
* @param host the host
* @return the host
*/
public Host getHost(Host host){
Host found = null;
try {
Hashtable<String,String> args = new Hashtable<String,String>();
args.put("key", host.getId());
args.put("queryType","all" );
args.put("reduce", "false");
args.put("group", "false");
args.put("listType", "single");
found = cdb.queryDoc(Host.class,args);
} catch (DocNotSpecifiedException e) {
// TODO Auto-generated catch block
LOG.error(e.getMessage(),e);
}
return found;
}
/**
* Gets the host from url.
*
* @param host the host
* @return the host from url
*/
public Host getHostFromUrl(Host host){
Host found = null;
Hashtable<String,String> args = null;
try {
if(host!=null && host.getUrls()!=null&&!host.getUrls().isEmpty()){
args = new Hashtable<String,String>();
args.put("reduce", "false");
args.put("group", "false");
args.put("listType", "single_host_url");
for(String url:host.getUrls()){
args.put("key", url);
args.put("queryType","host_url" );
LOG.trace("Looking for existing host with url: {}",url);
found = cdb.queryDoc(Host.class,args);
if(found!=null) break;
}
}
} catch (DocNotSpecifiedException e) {
// TODO Auto-generated catch block
LOG.error(e.getMessage(),e);
}
return found;
}
/**
* Gets the property value.
*
* @param element the element
* @return the property value
*/
public Element getPropertyValue(Element element) {
Element found = null;
Hashtable<String, Object> args = null;
String query = null;
String key = null;
try {
if (element != null) {
args = new Hashtable<String, Object>();
args.put("reduce", "false");
args.put("group", "false");
query = "by_sco";
key = "[\"" + element.getScope() + "\"";
if (element.getName() != null) {
query += "_prop";
key += ",\"" + element.getName() + "\"";
} else if (element.getKind() != null) {
query += "_kind";
key += ",\"" + element.getKind() + "\"";
}
args.put("listType", "single_property");
key += "]";
args.put("queryType", query);
args.put("key", key);
args.put("doc", "property");
LOG.trace("Looking for property {}.{} ->{}", element.getScope(), element.getName(),
args.get("listType"));
found = cdb.queryDoc(Element.class, args);
}
} catch (DocNotSpecifiedException e) {
// TODO Auto-generated catch block
LOG.error(e.getMessage(), e);
}
return found;
}
/**
* Gets the element.
*
* @param element the element
* @return the element
*/
public Element getElement(Element element) {
Element found = null;
Hashtable<String, Object> args = null;
List<String> params = null;
String levelValue = null;
Observation o = null;
String query = null;
String key = null;
try {
if (element != null) {
args = new Hashtable<String, Object>();
args.put("reduce", "false");
args.put("group", "false");
query = "by_sco";
key = "[\"" + element.getScope() + "\"";
if (element.getName() != null) {
query += "_prop";
key += ",\"" + element.getName() + "\"";
} else if (element.getKind() != null) {
query += "_kind";
key += ",\"" + element.getKind() + "\"";
}
if (element.getAttributeName() != null) {
args.put("listType", "single");
key += ",\"" + element.getAttributeName() + "\"";
query += "_attr";
} else if (element.getAttrNames() != null) {
args.put("listType", "multi_attribute");
params = new LinkedList<String>();
for (int i = 0; i < element.getAttrNames().length; i++) {
params.add(element.getAttrNames()[i]);
if (element.getAttrNames()[i].equals("level")) {
o = getRecentObservation(element.getName());
}
}
args.put("params1", params);
}
key += "]";
args.put("queryType", query);
args.put("key", key);
args.put("doc", "property");
LOG.trace("Looking for element {}.{} ->{}", element.getScope(), element.getName(),
args.get("listType"));
found = cdb.queryDoc(Element.class, args);
if (found != null && o != null) {
if (found.getAttributes().equals(null))
found.setAttributes(new HashMap<String, String>());
found.getAttributes().put("level", levelValue);
}
}
} catch (DocNotSpecifiedException e) {
// TODO Auto-generated catch block
LOG.error(e.getMessage(), e);
}
return found;
}
/**
* Gets the pending message.
*
* @param element the element
* @return the pending message
*/
public Element getPendingMessage(Element element){
Element found = null;
Hashtable<String,String> args = null;
try {
if(element!=null){
args = new Hashtable<String,String>();
args.put("reduce", "true");
args.put("group", "true");
args.put("listType", "single");
args.put("queryType","pending_message" );
args.put("doc", "message");
args.put("key",element.getValue() );
LOG.trace("Looking for message: {}",element.getValue());
found = cdb.queryDoc(Element.class,args);
}
} catch (DocNotSpecifiedException e) {
// TODO Auto-generated catch block
LOG.error(e.getMessage(),e);
}
return found;
}
/**
* Gets the recent observation.
*
* @param propertyKey the property key
* @return the recent observation
*/
public Observation getRecentObservation(String propertyKey){
List<Observation> results = getRecentObservations(propertyKey);
if(results!=null && results.size()>0) return results.get(0);
else return null;
}
}
| {
"content_hash": "d0de0fbb013f795948a9762850bc21be",
"timestamp": "",
"source": "github",
"line_count": 1097,
"max_line_length": 149,
"avg_line_length": 24.23792160437557,
"alnum_prop": 0.6078077400428749,
"repo_name": "uol-cs-multiot/em4so-java",
"id": "9ce5e4bf44c70c8798933174927d06a06de9bc35",
"size": "26680",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/src/main/java/org/mp/em4so/agentManager/MemoryManager.java",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "343337"
},
{
"name": "JavaScript",
"bytes": "30394"
},
{
"name": "Shell",
"bytes": "795"
}
],
"symlink_target": ""
} |
<?xml version="1.0" ?><!DOCTYPE TS><TS language="ca@valencia" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About WorldCurrencyCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+39"/>
<source><b>WorldCurrencyCoin</b> version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2012-2014 The NovaCoin developers
Copyright © 2014 The WorldCurrencyCoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young (eay@cryptsoft.com) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation>Doble click per editar la direccio o la etiqueta</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Crear nova direccio</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copieu l'adreça seleccionada al porta-retalls del sistema</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-46"/>
<source>These are your WorldCurrencyCoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a WorldCurrencyCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Verify a message to ensure it was signed with a specified WorldCurrencyCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>Eliminar</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+35"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-58"/>
<source>WorldCurrencyCoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+280"/>
<source>Sign &message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+242"/>
<source>Synchronizing with network...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-308"/>
<source>&Overview</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-13"/>
<source>&Receive coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-7"/>
<source>&Send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>E&xit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Show information about WorldCurrencyCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+250"/>
<source>~%n block(s) remaining</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-247"/>
<source>&Export...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Send coins to a WorldCurrencyCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Modify configuration options for WorldCurrencyCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Encrypt or decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-200"/>
<source>WorldCurrencyCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+178"/>
<source>&About WorldCurrencyCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>&File</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Actions toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+9"/>
<source>[testnet]</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>WorldCurrencyCoin client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+70"/>
<source>%n active connection(s) to WorldCurrencyCoin network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+40"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+413"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-403"/>
<source>%n second(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="-284"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+288"/>
<source>%n minute(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Up to date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Last received block was generated %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid WorldCurrencyCoin address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+76"/>
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+18"/>
<source>Not staking</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+109"/>
<source>A fatal error occurred. WorldCurrencyCoin can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+90"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+551"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-515"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+155"/>
<source>DUST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+66"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid WorldCurrencyCoin address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+420"/>
<location line="+12"/>
<source>WorldCurrencyCoin-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start WorldCurrencyCoin after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start WorldCurrencyCoin on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the WorldCurrencyCoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Connect to the WorldCurrencyCoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting WorldCurrencyCoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show WorldCurrencyCoin addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+149"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting WorldCurrencyCoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<location line="+231"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the WorldCurrencyCoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-160"/>
<source>Stake:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-107"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Total:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+113"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+348"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the WorldCurrencyCoin-Qt help message to get a list with possible WorldCurrencyCoin command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>WorldCurrencyCoin - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>WorldCurrencyCoin Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the WorldCurrencyCoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-33"/>
<source>Welcome to the WorldCurrencyCoin RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+86"/>
<location line="+86"/>
<location line="+32"/>
<source>0.00 BC</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-191"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>123.456 BC</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter a WorldCurrencyCoin address (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid WorldCurrencyCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<location filename="../sendcoinsentry.cpp" line="+25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a WorldCurrencyCoin address (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this WorldCurrencyCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified WorldCurrencyCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a WorldCurrencyCoin address (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter WorldCurrencyCoin signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+19"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-2"/>
<source>Open for %n block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+8"/>
<source>conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 25 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-211"/>
<source>, has not been successfully broadcast yet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>unknown</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+226"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+190"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+144"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+206"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+33"/>
<source>WorldCurrencyCoin version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or worldcurrencycoind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: worldcurrencycoin.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: worldcurrencycoind.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Listen for connections on <port> (default: 22937 or testnet: 32937)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Stake your coins to support network and gain reward (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>Listen for JSON-RPC connections on <port> (default: 22938 or testnet: 32938)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+101"/>
<source>Error: Transaction creation failed </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-8"/>
<source>Importing blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Importing bootstrap blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-88"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+117"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+61"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong WorldCurrencyCoin will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-18"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-30"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+94"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-90"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+83"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-82"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-42"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-54"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-84"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=worldcurrencycoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "WorldCurrencyCoin Alert" admin@foo.com
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+53"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-158"/>
<source>This help message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+95"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. WorldCurrencyCoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-98"/>
<source>WorldCurrencyCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+140"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Loading addresses...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Error loading blkindex.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of WorldCurrencyCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart WorldCurrencyCoin to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Error: could not start node</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-34"/>
<source>Loading block index...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-103"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Unable to bind to %s on this computer. WorldCurrencyCoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-97"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Loading wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot initialize keypool</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Done loading</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-167"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS> | {
"content_hash": "ee4b5cd3bb291f42ecbe2e98d37ee6ba",
"timestamp": "",
"source": "github",
"line_count": 3275,
"max_line_length": 394,
"avg_line_length": 32.922442748091605,
"alnum_prop": 0.5850622791478469,
"repo_name": "ANTIICO/WorldCurrencyCoin",
"id": "d86645f1c2c9e16842eaf67beb8cd5cbdbecbdf4",
"size": "107825",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/qt/locale/bitcoin_ca@valencia.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "51312"
},
{
"name": "C",
"bytes": "34471"
},
{
"name": "C++",
"bytes": "2585820"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "HTML",
"bytes": "50620"
},
{
"name": "Makefile",
"bytes": "121776"
},
{
"name": "NSIS",
"bytes": "6077"
},
{
"name": "Objective-C",
"bytes": "858"
},
{
"name": "Objective-C++",
"bytes": "3537"
},
{
"name": "Python",
"bytes": "41580"
},
{
"name": "QMake",
"bytes": "13988"
},
{
"name": "Roff",
"bytes": "12684"
},
{
"name": "Shell",
"bytes": "9083"
}
],
"symlink_target": ""
} |
<?php
namespace Droid\Test\Plugin\Mysql\Command;
use RuntimeException;
use Symfony\Component\Console\Application;
use Symfony\Component\Console\Tester\CommandTester;
use Droid\Plugin\Mysql\Command\MysqlMasterInfoCommand;
use Droid\Plugin\Mysql\Db\Client;
use Droid\Plugin\Mysql\Db\ClientException;
use Droid\Plugin\Mysql\Db\Config;
class MysqlMasterInfoCommandTest extends \PHPUnit_Framework_TestCase
{
protected $client;
protected $config;
protected function setUp()
{
$this->config = $this
->getMockBuilder(Config::class)
->getMock()
;
$this->client = $this
->getMockBuilder(Client::class)
->disableOriginalConstructor()
->getMock()
;
$this
->client
->method('getConfig')
->willReturn($this->config)
;
$command = new MysqlMasterInfoCommand($this->client);
$this->app = new Application;
$this->app->add($command);
$this->tester = new CommandTester($command);
}
/**
* @expectedException RuntimeException
* @expectedExceptionMessage You must specify both (or neither) --log-name and --log-position
*/
public function testCommandThrowsRuntimeExceptionWhenLogNameWithoutLogPos()
{
$this->tester->execute(array(
'command' => $this->app->find('mysql:master-info'),
'url' => 'mysql://db_user:passw0rd@db_host/',
'master_hostname' => '203.0.113.0',
'replication_username' => 'repln_user',
'replication_password' => 's3kr3t',
'--log-name' => 'mysql-bin.log',
));
}
/**
* @expectedException RuntimeException
* @expectedExceptionMessage You must specify both (or neither) --log-name and --log-position
*/
public function testCommandThrowsRuntimeExceptionWhenLogPosWithoutLogName()
{
$this->tester->execute(array(
'command' => $this->app->find('mysql:master-info'),
'url' => 'mysql://db_user:passw0rd@db_host/',
'master_hostname' => '203.0.113.0',
'replication_username' => 'repln_user',
'replication_password' => 's3kr3t',
'--log-position' => 174,
));
}
public function testCommandConfiguresClientWithConnectionUrl()
{
$this
->config
->expects($this->once())
->method('setConnectionUrl')
->with('mysql://db_user:passw0rd@db_host/')
;
$this
->client
->method('execute')
->willReturn(true)
;
$this->tester->execute(array(
'command' => $this->app->find('mysql:master-info'),
'url' => 'mysql://db_user:passw0rd@db_host/',
'master_hostname' => '203.0.113.0',
'replication_username' => 'repln_user',
'replication_password' => 's3kr3t',
));
}
/**
* @expectedException RuntimeException
* @expectedExceptionMessage I cannot execute the CHANGE MASTER query
*/
public function testCommandThrowsRuntimeExceptionWhenQueryFailsToExecute()
{
$this
->config
->expects($this->once())
->method('setConnectionUrl')
->with('mysql://db_user:passw0rd@db_host/')
;
$this
->client
->method('execute')
->willThrowException(new ClientException)
;
$this->tester->execute(array(
'command' => $this->app->find('mysql:master-info'),
'url' => 'mysql://db_user:passw0rd@db_host/',
'master_hostname' => '203.0.113.0',
'replication_username' => 'repln_user',
'replication_password' => 's3kr3t',
));
}
public function testCommandReportsThatItFailedToCompleteSuccessfully()
{
$this
->config
->expects($this->once())
->method('setConnectionUrl')
->with('mysql://db_user:passw0rd@db_host/')
;
$this
->client
->method('execute')
->willReturn(false)
;
$this->tester->execute(array(
'command' => $this->app->find('mysql:master-info'),
'url' => 'mysql://db_user:passw0rd@db_host/',
'master_hostname' => '203.0.113.0',
'replication_username' => 'repln_user',
'replication_password' => 's3kr3t',
));
$this->assertRegExp(
'/^I cannot configure the slave with the master information/',
$this->tester->getDisplay()
);
}
public function testCommandReportsThatItCompletedSuccessfully()
{
$this
->config
->expects($this->once())
->method('setConnectionUrl')
->with('mysql://db_user:passw0rd@db_host/')
;
$this
->client
->method('execute')
->willReturn(true)
;
$this->tester->execute(array(
'command' => $this->app->find('mysql:master-info'),
'url' => 'mysql://db_user:passw0rd@db_host/',
'master_hostname' => '203.0.113.0',
'replication_username' => 'repln_user',
'replication_password' => 's3kr3t',
));
$this->assertRegExp(
'/^I have successfully configured the slave with the master information/',
$this->tester->getDisplay()
);
}
}
| {
"content_hash": "978c10e9d2f846447144acaa29b9756e",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 97,
"avg_line_length": 30.467032967032967,
"alnum_prop": 0.5406672678088368,
"repo_name": "droid-php/droid-mysql",
"id": "bde3df4fe84fffaa3fe64b887ac7d01ebb400aed",
"size": "5545",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/Command/MysqlMasterInfoCommandTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "103003"
}
],
"symlink_target": ""
} |
/*jslint nomen: false, strict: false */
/*global define: false */
define(['env!env/print'], function (print) {
var logger = {
TRACE: 0,
INFO: 1,
WARN: 2,
ERROR: 3,
SILENT: 4,
level: 0,
logPrefix: "",
logLevel: function( level ) {
this.level = level;
},
trace: function (message) {
if (this.level <= this.TRACE) {
this._print(message);
}
},
info: function (message) {
if (this.level <= this.INFO) {
this._print(message);
}
},
warn: function (message) {
if (this.level <= this.WARN) {
this._print(message);
}
},
error: function (message) {
if (this.level <= this.ERROR) {
this._print(message);
}
},
_print: function (message) {
this._sysPrint((this.logPrefix ? (this.logPrefix + " ") : "") + message);
},
_sysPrint: function (message) {
print(message);
}
};
return logger;
});
| {
"content_hash": "5be27e67bbca353d872b16d64062c64f",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 85,
"avg_line_length": 22.51851851851852,
"alnum_prop": 0.40625,
"repo_name": "quantumlicht/collarbone",
"id": "5fdf7e15081ec800d2e6d4166546e9b29e55999d",
"size": "1416",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "public/js/libs/rjs/build/jslib/logger.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1060737"
},
{
"name": "CoffeeScript",
"bytes": "96"
},
{
"name": "JavaScript",
"bytes": "5791376"
},
{
"name": "Python",
"bytes": "3630"
},
{
"name": "Ruby",
"bytes": "9730"
},
{
"name": "Shell",
"bytes": "2421"
}
],
"symlink_target": ""
} |
SELECT * FROM GROUP_USER_RELATIONSHIP
WHERE
EMPLOYEE_ID = ?
AND SECTION_CODE = ?
;
| {
"content_hash": "49bdefa706de90ed83260b9214b49615",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 37,
"avg_line_length": 17.2,
"alnum_prop": 0.6976744186046512,
"repo_name": "support-project/knowledge",
"id": "781f5d0cc20e9e43e88d89a1a8e29cd5e32dfdf8",
"size": "86",
"binary": false,
"copies": "1",
"ref": "refs/heads/v1",
"path": "src/test/resources/org/support/project/ormapping/gen/dao/sql/GroupUserRelationshipDao/GroupUserRelationshipDao_physical_select_on_key.sql",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "31886"
},
{
"name": "Dockerfile",
"bytes": "255"
},
{
"name": "HTML",
"bytes": "11097"
},
{
"name": "Java",
"bytes": "6586273"
},
{
"name": "JavaScript",
"bytes": "220782"
},
{
"name": "Shell",
"bytes": "706"
}
],
"symlink_target": ""
} |
"""
A context manager to perform a series of tasks on a set of resources.
:class:`TaskManager` is a context manager, created on-demand to allow
synchronized access to a node and its resources.
The :class:`TaskManager` will, by default, acquire an exclusive lock on
a node for the duration that the TaskManager instance exists. You may
create a TaskManager instance without locking by passing "shared=True"
when creating it, but certain operations on the resources held by such
an instance of TaskManager will not be possible. Requiring this exclusive
lock guards against parallel operations interfering with each other.
A shared lock is useful when performing non-interfering operations,
such as validating the driver interfaces.
An exclusive lock is stored in the database to coordinate between
:class:`ironic.conductor.manager` instances, that are typically deployed on
different hosts.
:class:`TaskManager` methods, as well as driver methods, may be decorated to
determine whether their invocation requires an exclusive lock.
The TaskManager instance exposes certain node resources and properties as
attributes that you may access:
task.context
The context passed to TaskManager()
task.shared
False if Node is locked, True if it is not locked. (The
'shared' kwarg arg of TaskManager())
task.node
The Node object
task.ports
Ports belonging to the Node
task.driver
The Driver for the Node, or the Driver based on the
'driver_name' kwarg of TaskManager().
Example usage:
::
with task_manager.acquire(context, node_id, purpose='power on') as task:
task.driver.power.power_on(task.node)
If you need to execute task-requiring code in a background thread, the
TaskManager instance provides an interface to handle this for you, making
sure to release resources when the thread finishes (successfully or if
an exception occurs). Common use of this is within the Manager like so:
::
with task_manager.acquire(context, node_id, purpose='some work') as task:
<do some work>
task.spawn_after(self._spawn_worker,
utils.node_power_action, task, new_state)
All exceptions that occur in the current GreenThread as part of the
spawn handling are re-raised. You can specify a hook to execute custom
code when such exceptions occur. For example, the hook is a more elegant
solution than wrapping the "with task_manager.acquire()" with a
try..exception block. (Note that this hook does not handle exceptions
raised in the background thread.):
::
def on_error(e):
if isinstance(e, Exception):
...
with task_manager.acquire(context, node_id, purpose='some work') as task:
<do some work>
task.set_spawn_error_hook(on_error)
task.spawn_after(self._spawn_worker,
utils.node_power_action, task, new_state)
"""
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import timeutils
import retrying
import six
from ironic.common import driver_factory
from ironic.common import exception
from ironic.common.i18n import _LW
from ironic.common import states
from ironic import objects
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
def require_exclusive_lock(f):
"""Decorator to require an exclusive lock.
Decorated functions must take a :class:`TaskManager` as the first
parameter. Decorated class methods should take a :class:`TaskManager`
as the first parameter after "self".
"""
@six.wraps(f)
def wrapper(*args, **kwargs):
# NOTE(dtantsur): this code could be written simpler, but then unit
# testing decorated functions is pretty hard, as we usually pass a Mock
# object instead of TaskManager there.
if len(args) > 1:
task = args[1] if isinstance(args[1], TaskManager) else args[0]
else:
task = args[0]
if task.shared:
raise exception.ExclusiveLockRequired()
return f(*args, **kwargs)
return wrapper
def acquire(context, node_id, shared=False, driver_name=None,
purpose='unspecified action'):
"""Shortcut for acquiring a lock on a Node.
:param context: Request context.
:param node_id: ID or UUID of node to lock.
:param shared: Boolean indicating whether to take a shared or exclusive
lock. Default: False.
:param driver_name: Name of Driver. Default: None.
:param purpose: human-readable purpose to put to debug logs.
:returns: An instance of :class:`TaskManager`.
"""
return TaskManager(context, node_id, shared=shared,
driver_name=driver_name, purpose=purpose)
class TaskManager(object):
"""Context manager for tasks.
This class wraps the locking, driver loading, and acquisition
of related resources (eg, Node and Ports) when beginning a unit of work.
"""
def __init__(self, context, node_id, shared=False, driver_name=None,
purpose='unspecified action'):
"""Create a new TaskManager.
Acquire a lock on a node. The lock can be either shared or
exclusive. Shared locks may be used for read-only or
non-disruptive actions only, and must be considerate to what
other threads may be doing on the same node at the same time.
:param context: request context
:param node_id: ID or UUID of node to lock.
:param shared: Boolean indicating whether to take a shared or exclusive
lock. Default: False.
:param driver_name: The name of the driver to load, if different
from the Node's current driver.
:param purpose: human-readable purpose to put to debug logs.
:raises: DriverNotFound
:raises: NodeNotFound
:raises: NodeLocked
"""
self._spawn_method = None
self._on_error_method = None
self.context = context
self.node = None
self.node_id = node_id
self.shared = shared
self.fsm = states.machine.copy()
self._purpose = purpose
self._debug_timer = timeutils.StopWatch()
try:
LOG.debug("Attempting to get %(type)s lock on node %(node)s (for "
"%(purpose)s)",
{'type': 'shared' if shared else 'exclusive',
'node': node_id, 'purpose': purpose})
if not self.shared:
self._lock()
else:
self._debug_timer.restart()
self.node = objects.Node.get(context, node_id)
self.ports = objects.Port.list_by_node_id(context, self.node.id)
self.driver = driver_factory.get_driver(driver_name or
self.node.driver)
# NOTE(deva): this handles the Juno-era NOSTATE state
# and should be deleted after Kilo is released
if self.node.provision_state is states.NOSTATE:
self.node.provision_state = states.AVAILABLE
self.node.save()
self.fsm.initialize(start_state=self.node.provision_state,
target_state=self.node.target_provision_state)
except Exception:
with excutils.save_and_reraise_exception():
self.release_resources()
def _lock(self):
self._debug_timer.restart()
# NodeLocked exceptions can be annoying. Let's try to alleviate
# some of that pain by retrying our lock attempts. The retrying
# module expects a wait_fixed value in milliseconds.
@retrying.retry(
retry_on_exception=lambda e: isinstance(e, exception.NodeLocked),
stop_max_attempt_number=CONF.conductor.node_locked_retry_attempts,
wait_fixed=CONF.conductor.node_locked_retry_interval * 1000)
def reserve_node():
self.node = objects.Node.reserve(self.context, CONF.host,
self.node_id)
LOG.debug("Node %(node)s successfully reserved for %(purpose)s "
"(took %(time).2f seconds)",
{'node': self.node_id, 'purpose': self._purpose,
'time': self._debug_timer.elapsed()})
self._debug_timer.restart()
reserve_node()
def upgrade_lock(self):
"""Upgrade a shared lock to an exclusive lock.
Also reloads node object from the database.
Does nothing if lock is already exclusive.
"""
if self.shared:
LOG.debug('Upgrading shared lock on node %(uuid)s for %(purpose)s '
'to an exclusive one (shared lock was held %(time).2f '
'seconds)',
{'uuid': self.node.uuid, 'purpose': self._purpose,
'time': self._debug_timer.elapsed()})
self._lock()
self.shared = False
def spawn_after(self, _spawn_method, *args, **kwargs):
"""Call this to spawn a thread to complete the task.
The specified method will be called when the TaskManager instance
exits.
:param _spawn_method: a method that returns a GreenThread object
:param args: args passed to the method.
:param kwargs: additional kwargs passed to the method.
"""
self._spawn_method = _spawn_method
self._spawn_args = args
self._spawn_kwargs = kwargs
def set_spawn_error_hook(self, _on_error_method, *args, **kwargs):
"""Create a hook to handle exceptions when spawning a task.
Create a hook that gets called upon an exception being raised
from spawning a background thread to do a task.
:param _on_error_method: a callable object, it's first parameter
should accept the Exception object that was raised.
:param args: additional args passed to the callable object.
:param kwargs: additional kwargs passed to the callable object.
"""
self._on_error_method = _on_error_method
self._on_error_args = args
self._on_error_kwargs = kwargs
def release_resources(self):
"""Unlock a node and release resources.
If an exclusive lock is held, unlock the node. Reset attributes
to make it clear that this instance of TaskManager should no
longer be accessed.
"""
if not self.shared:
try:
if self.node:
objects.Node.release(self.context, CONF.host, self.node.id)
except exception.NodeNotFound:
# squelch the exception if the node was deleted
# within the task's context.
pass
if self.node:
LOG.debug("Successfully released %(type)s lock for %(purpose)s "
"on node %(node)s (lock was held %(time).2f sec)",
{'type': 'shared' if self.shared else 'exclusive',
'purpose': self._purpose, 'node': self.node.uuid,
'time': self._debug_timer.elapsed()})
self.node = None
self.driver = None
self.ports = None
self.fsm = None
def _thread_release_resources(self, t):
"""Thread.link() callback to release resources."""
self.release_resources()
def process_event(self, event, callback=None, call_args=None,
call_kwargs=None, err_handler=None, target_state=None):
"""Process the given event for the task's current state.
:param event: the name of the event to process
:param callback: optional callback to invoke upon event transition
:param call_args: optional \*args to pass to the callback method
:param call_kwargs: optional \**kwargs to pass to the callback method
:param err_handler: optional error handler to invoke if the
callback fails, eg. because there are no workers available
(err_handler should accept arguments node, prev_prov_state, and
prev_target_state)
:param target_state: if specified, the target provision state for the
node. Otherwise, use the target state from the fsm
:raises: InvalidState if the event is not allowed by the associated
state machine
"""
# Advance the state model for the given event. Note that this doesn't
# alter the node in any way. This may raise InvalidState, if this event
# is not allowed in the current state.
self.fsm.process_event(event, target_state=target_state)
# stash current states in the error handler if callback is set,
# in case we fail to get a worker from the pool
if err_handler and callback:
self.set_spawn_error_hook(err_handler, self.node,
self.node.provision_state,
self.node.target_provision_state)
self.node.provision_state = self.fsm.current_state
self.node.target_provision_state = self.fsm.target_state
# set up the async worker
if callback:
# clear the error if we're going to start work in a callback
self.node.last_error = None
if call_args is None:
call_args = ()
if call_kwargs is None:
call_kwargs = {}
self.spawn_after(callback, *call_args, **call_kwargs)
# publish the state transition by saving the Node
self.node.save()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is None and self._spawn_method is not None:
# Spawn a worker to complete the task
# The linked callback below will be called whenever:
# - background task finished with no errors.
# - background task has crashed with exception.
# - callback was added after the background task has
# finished or crashed. While eventlet currently doesn't
# schedule the new thread until the current thread blocks
# for some reason, this is true.
# All of the above are asserted in tests such that we'll
# catch if eventlet ever changes this behavior.
thread = None
try:
thread = self._spawn_method(*self._spawn_args,
**self._spawn_kwargs)
# NOTE(comstud): Trying to use a lambda here causes
# the callback to not occur for some reason. This
# also makes it easier to test.
thread.link(self._thread_release_resources)
# Don't unlock! The unlock will occur when the
# thread finshes.
return
except Exception as e:
with excutils.save_and_reraise_exception():
try:
# Execute the on_error hook if set
if self._on_error_method:
self._on_error_method(e, *self._on_error_args,
**self._on_error_kwargs)
except Exception:
LOG.warning(_LW("Task's on_error hook failed to "
"call %(method)s on node %(node)s"),
{'method': self._on_error_method.__name__,
'node': self.node.uuid})
if thread is not None:
# This means the link() failed for some
# reason. Nuke the thread.
thread.cancel()
self.release_resources()
self.release_resources()
| {
"content_hash": "d4de02c3a87165e48593d2e4df2e7723",
"timestamp": "",
"source": "github",
"line_count": 395,
"max_line_length": 79,
"avg_line_length": 40.549367088607596,
"alnum_prop": 0.605731410376475,
"repo_name": "hpproliant/ironic",
"id": "0cf24ee88ae598fd22de56c50097994089526b81",
"size": "16690",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ironic/conductor/task_manager.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "3716155"
}
],
"symlink_target": ""
} |
package de.matthiasmann.twl;
import de.matthiasmann.twl.utils.XMLParser;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import org.xmlpull.v1.XmlPullParserFactory;
import org.xmlpull.v1.XmlSerializer;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URL;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* A immutable InputMap class. It maps key strokes to action names.
*
* @author Matthias Mann
*/
public final class InputMap {
private static final InputMap EMPTY_MAP = new InputMap(new KeyStroke[0]);
private final KeyStroke[] keyStrokes;
public InputMap(KeyStroke[] keyStrokes) {
this.keyStrokes = keyStrokes;
}
/**
* Maps the given key event to an action.
* @param event the key event
* @return the action or null if no mapping was found
*/
public String mapEvent(Event event) {
if(event.isKeyEvent()) {
int mappedEventModifiers = KeyStroke.convertModifier(event);
for(KeyStroke ks : keyStrokes) {
if(ks.match(event, mappedEventModifiers)) {
return ks.getAction();
}
}
}
return null;
}
/**
* Creates a new InputMap containing both the current and the new KeyStrokes.
* If the new key strokes contain already mapped key strokes then the new mappings will replace the old mappings.
*
* @param newKeyStrokes the new key strokes.
* @return the InputMap containing the resulting mapping
*/
public InputMap addKeyStrokes(LinkedHashSet<KeyStroke> newKeyStrokes) {
int size = newKeyStrokes.size();
if(size == 0) {
return this;
}
KeyStroke[] combined = new KeyStroke[keyStrokes.length + size];
newKeyStrokes.toArray(combined); // copy new key strokes
for(KeyStroke ks : keyStrokes) {
if(!newKeyStrokes.contains(ks)) { // append old ones if they have not been replaced
combined[size++] = ks;
}
}
return new InputMap(shrink(combined, size));
}
/**
* Creates a new InputMap containing both the current and the new KeyStrokes from another InputMap.
* If the new key strokes contain already mapped key strokes then the new mappings will replace the old mappings.
*
* @param map the other InputMap containing the new key strokes.
* @return the InputMap containing the resulting mapping
*/
public InputMap addKeyStrokes(InputMap map) {
if(map == this || map.keyStrokes.length == 0) {
return this;
}
if(keyStrokes.length == 0) {
return map;
}
return addKeyStrokes(new LinkedHashSet<KeyStroke>(Arrays.asList(map.keyStrokes)));
}
/**
*
* Creates a new InputMap containing both the current and the new KeyStroke.
* If the specified key stroke is already mapped then the new mapping will replace the old mapping.
*
* @param keyStroke the new key stroke.
* @return the InputMap containing the resulting mapping
*/
public InputMap addKeyStroke(KeyStroke keyStroke) {
LinkedHashSet<KeyStroke> newKeyStrokes = new LinkedHashSet<KeyStroke>(1, 1.0f);
newKeyStrokes.add(keyStroke);
return addKeyStrokes(newKeyStrokes);
}
/**
* Remove key strokes from this mapping
*
* @param keyStrokes the key strokes to remove
* @return the InputMap containing the resulting mapping
*/
public InputMap removeKeyStrokes(Set<KeyStroke> keyStrokes) {
if(keyStrokes.isEmpty()) {
return this;
}
int size = 0;
KeyStroke[] result = new KeyStroke[this.keyStrokes.length];
for(KeyStroke ks : this.keyStrokes) {
if(!keyStrokes.contains(ks)) { // append old ones if it has not been removed
result[size++] = ks;
}
}
return new InputMap(shrink(result, size));
}
/**
* Returns all key strokes in this InputMap.
* @return all key strokes in this InputMap.
*/
public KeyStroke[] getKeyStrokes() {
return keyStrokes.clone();
}
/**
* Returns an empty input mapping
* @return an empty input mapping
*/
public static InputMap empty() {
return EMPTY_MAP;
}
/**
* Parses a stand alone <inputMapDef> XML file
*
* @param url the URL ton the XML file
* @return the parsed key strokes
* @throws IOException if an IO related error occured
*/
public static InputMap parse(URL url) throws IOException {
try {
XMLParser xmlp = new XMLParser(url);
try {
xmlp.require(XmlPullParser.START_DOCUMENT, null, null);
xmlp.nextTag();
xmlp.require(XmlPullParser.START_TAG, null, "inputMapDef");
xmlp.nextTag();
LinkedHashSet<KeyStroke> keyStrokes = parseBody(xmlp);
xmlp.require(XmlPullParser.END_TAG, null, "inputMapDef");
return new InputMap(keyStrokes.toArray(new KeyStroke[keyStrokes.size()]));
} finally {
xmlp.close();
}
} catch(XmlPullParserException ex) {
throw (IOException)(new IOException("Can't parse XML").initCause(ex));
}
}
/**
* Writes this input map into a XML file which can be parsed by {@link #parse(java.net.URL)}.
* The encoding is UTF8
*
* @param os the output where the XML will be written to
* @throws IOException if an IO error occured
* @see #parse(java.net.URL)
*/
public void writeXML(OutputStream os) throws IOException {
try {
XmlPullParserFactory factory = XmlPullParserFactory.newInstance();
XmlSerializer serializer = factory.newSerializer();
serializer.setOutput(os, "UTF8");
serializer.startDocument("UTF8", Boolean.TRUE);
serializer.text("\n");
serializer.startTag(null, "inputMapDef");
for(KeyStroke ks : keyStrokes) {
serializer.text("\n ");
serializer.startTag(null, "action");
serializer.attribute(null, "name", ks.getAction());
serializer.text(ks.getStroke());
serializer.endTag(null, "action");
}
serializer.text("\n");
serializer.endTag(null, "inputMapDef");
serializer.endDocument();
} catch(XmlPullParserException ex) {
throw (IOException)(new IOException("Can't generate XML").initCause(ex));
}
}
/**
* Parses the child elemets of the current XML tag as input map.
* This method is only public so that it can be called from ThemeManager.
*
* @param xmlp the XML parser
* @return the found key strokes
* @throws XmlPullParserException if a parser error occured
* @throws IOException if an IO error occured
*/
public static LinkedHashSet<KeyStroke> parseBody(XMLParser xmlp) throws XmlPullParserException, IOException {
LinkedHashSet<KeyStroke> newStrokes = new LinkedHashSet<KeyStroke>();
while(!xmlp.isEndTag()) {
xmlp.require(XmlPullParser.START_TAG, null, "action");
String name = xmlp.getAttributeNotNull("name");
String key = xmlp.nextText();
try {
KeyStroke ks = KeyStroke.parse(key, name);
if(!newStrokes.add(ks)) {
Logger.getLogger(InputMap.class.getName()).log(Level.WARNING, "Duplicate key stroke: {0}", ks.getStroke());
}
} catch (IllegalArgumentException ex) {
throw xmlp.error("can't parse Keystroke", ex);
}
xmlp.require(XmlPullParser.END_TAG, null, "action");
xmlp.nextTag();
}
return newStrokes;
}
private static KeyStroke[] shrink(KeyStroke[] keyStrokes, int size) {
if(size != keyStrokes.length) {
KeyStroke[] tmp = new KeyStroke[size];
System.arraycopy(keyStrokes, 0, tmp, 0, size);
keyStrokes = tmp;
}
return keyStrokes;
}
}
| {
"content_hash": "361c00272a59ddd7853871e534035200",
"timestamp": "",
"source": "github",
"line_count": 238,
"max_line_length": 127,
"avg_line_length": 35.45798319327731,
"alnum_prop": 0.6080104277758028,
"repo_name": "ColaMachine/MyBlock",
"id": "a3bc0878e787c5f7587b3f6f13e2768354566b8c",
"size": "10041",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/de/matthiasmann/twl/InputMap.java",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "86"
},
{
"name": "GLSL",
"bytes": "72019"
},
{
"name": "HTML",
"bytes": "10204"
},
{
"name": "Java",
"bytes": "6785515"
},
{
"name": "Lex",
"bytes": "4243"
},
{
"name": "TeX",
"bytes": "19738"
}
],
"symlink_target": ""
} |
// KeyEventArgs.cs
#region Using Statements
using System;
using Microsoft.Xna.Framework.Input;
#endregion
namespace Sandbox.Events
{
public class KeyEventArgs : InputEventArgs
{
public Keys Key { get; set; }
public KeyEventArgs(TimeSpan p_Timestamp) : base(p_Timestamp) { }
}
}
| {
"content_hash": "007be2b149f01208769e77ee86d286e7",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 73,
"avg_line_length": 17.333333333333332,
"alnum_prop": 0.6858974358974359,
"repo_name": "jasonwnorris/Sandbox",
"id": "8e3c32539498fc93bf8b01732ea620715762569f",
"size": "314",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Sandbox/Source/Events/KeyEventArgs.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "195652"
}
],
"symlink_target": ""
} |
CTL_CODE( DEVICE_FILE_TYPE, 0x900, METHOD_BUFFERED, FILE_READ_ACCESS )
#define IOCTL_READ_MSR \
CTL_CODE( DEVICE_FILE_TYPE, 0x901, METHOD_BUFFERED, FILE_READ_ACCESS )
#endif IOCTLCODES_H
| {
"content_hash": "b8511785138db3a5746bebd09ff57991",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 74,
"avg_line_length": 28.285714285714285,
"alnum_prop": 0.7171717171717171,
"repo_name": "BerntA/tfo-code",
"id": "a8f50d0b6a100cc40ebf1a59132a09438ad9ad05",
"size": "864",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "public/tier0/IOCTLCodes.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "238"
},
{
"name": "Batchfile",
"bytes": "9897"
},
{
"name": "C",
"bytes": "1255315"
},
{
"name": "C++",
"bytes": "39642849"
},
{
"name": "GLSL",
"bytes": "126492"
},
{
"name": "Makefile",
"bytes": "28908"
},
{
"name": "Objective-C",
"bytes": "72895"
},
{
"name": "Objective-C++",
"bytes": "369"
},
{
"name": "Perl",
"bytes": "93035"
},
{
"name": "Perl 6",
"bytes": "1820"
},
{
"name": "Shell",
"bytes": "1362"
}
],
"symlink_target": ""
} |
package com.netflix.spinnaker.clouddriver.kubernetes.artifact;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.jayway.jsonpath.Configuration;
import com.jayway.jsonpath.DocumentContext;
import com.jayway.jsonpath.JsonPath;
import com.jayway.jsonpath.spi.json.JacksonJsonNodeJsonProvider;
import com.jayway.jsonpath.spi.mapper.JacksonMappingProvider;
import com.netflix.spinnaker.clouddriver.kubernetes.description.manifest.KubernetesManifest;
import com.netflix.spinnaker.kork.artifacts.model.Artifact;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.Collection;
import java.util.List;
import java.util.function.Predicate;
import java.util.stream.Stream;
import javax.annotation.Nonnull;
import javax.annotation.ParametersAreNonnullByDefault;
import lombok.Value;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ParametersAreNonnullByDefault
public class ArtifactReplacer {
private static final Logger log = LoggerFactory.getLogger(ArtifactReplacer.class);
private static final ObjectMapper mapper = new ObjectMapper();
private static final Configuration configuration =
Configuration.builder()
.jsonProvider(new JacksonJsonNodeJsonProvider())
.mappingProvider(new JacksonMappingProvider())
.build();
private final ImmutableList<Replacer> replacers;
public ArtifactReplacer(Collection<Replacer> replacers) {
this.replacers = ImmutableList.copyOf(replacers);
}
private static ImmutableList<Artifact> filterArtifacts(
@Nonnull String namespace, @Nonnull String account, List<Artifact> artifacts) {
return artifacts.stream()
.filter(a -> !Strings.isNullOrEmpty(a.getType()))
.filter(nonKubernetes().or(namespaceMatches(namespace).and(accountMatches(account))))
.collect(toImmutableList());
}
private static Predicate<Artifact> nonKubernetes() {
return a -> !a.getType().startsWith("kubernetes/");
}
private static Predicate<Artifact> namespaceMatches(@Nonnull String namespace) {
return a -> Strings.nullToEmpty(a.getLocation()).equals(namespace);
}
private static Predicate<Artifact> accountMatches(@Nonnull String account) {
return a -> {
String artifactAccount = Strings.nullToEmpty((String) a.getMetadata("account"));
// If the artifact fails to provide an account, assume this was unintentional and match
// anyways
return artifactAccount.isEmpty() || artifactAccount.equals(account);
};
}
@Nonnull
public ReplaceResult replaceAll(
String dockerImageBinding,
KubernetesManifest input,
List<Artifact> artifacts,
@Nonnull String namespace,
@Nonnull String account) {
log.debug("Doing replacement on {} using {}", input, artifacts);
DocumentContext document;
try {
document = JsonPath.using(configuration).parse(mapper.writeValueAsString(input));
} catch (JsonProcessingException e) {
throw new UncheckedIOException("Malformed manifest", e);
}
ImmutableList<Artifact> filteredArtifacts = filterArtifacts(namespace, account, artifacts);
ImmutableSet.Builder<Artifact> replacedArtifacts = ImmutableSet.builder();
for (Replacer replacer : replacers) {
ImmutableCollection<Artifact> replaced =
replacer.replaceArtifacts(dockerImageBinding, document, filteredArtifacts);
replacedArtifacts.addAll(replaced);
}
try {
return new ReplaceResult(
mapper.readValue(document.jsonString(), KubernetesManifest.class),
replacedArtifacts.build());
} catch (IOException e) {
throw new UncheckedIOException("Malformed manifest", e);
}
}
@Nonnull
public ImmutableSet<Artifact> findAll(KubernetesManifest input) {
DocumentContext document;
try {
document = JsonPath.using(configuration).parse(mapper.writeValueAsString(input));
} catch (JsonProcessingException e) {
throw new UncheckedIOException("Malformed manifest", e);
}
return replacers.stream()
.flatMap(
r -> {
try {
return r.getArtifacts(document);
} catch (Exception e) {
// This happens when a manifest isn't fully defined (e.g. not all properties are
// there)
log.debug(
"Failure converting artifacts for {} using {} (skipping)",
input.getFullResourceName(),
r,
e);
return Stream.empty();
}
})
.collect(toImmutableSet());
}
@Value
public static class ReplaceResult {
private final KubernetesManifest manifest;
private final ImmutableSet<Artifact> boundArtifacts;
}
}
| {
"content_hash": "64e7440d579963db74a6dd655734db8e",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 96,
"avg_line_length": 37.093525179856115,
"alnum_prop": 0.7174166020170675,
"repo_name": "spinnaker/clouddriver",
"id": "f93273d57327f400e63a30feb8b091b7d3652225",
"size": "5752",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clouddriver-kubernetes/src/main/java/com/netflix/spinnaker/clouddriver/kubernetes/artifact/ArtifactReplacer.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Groovy",
"bytes": "7641380"
},
{
"name": "Java",
"bytes": "7248003"
},
{
"name": "Kotlin",
"bytes": "282069"
},
{
"name": "Shell",
"bytes": "3066"
},
{
"name": "Slim",
"bytes": "2423"
}
],
"symlink_target": ""
} |
import { ModuleWithProviders } from "@angular/core";
import { Routes, RouterModule } from "@angular/router";
import { ChangePasswordComponent } from "./change-password.component";
import { AuthGuard } from "../auth-guard.service";
const changePasswordRoutes: Routes = [
{ path: "changePassword", component: ChangePasswordComponent, canActivate: [AuthGuard] },
];
export const changePasswordRouting: ModuleWithProviders = RouterModule.forChild(changePasswordRoutes);
| {
"content_hash": "476f8748d31cf71cb7bf7051be9e9efa",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 102,
"avg_line_length": 47.1,
"alnum_prop": 0.772823779193206,
"repo_name": "racketometer/frontend-application",
"id": "a4930d5bbb319196368910b5ce02c9c98c45f7be",
"size": "471",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/change-password/change-password.routing.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6746"
},
{
"name": "HTML",
"bytes": "14063"
},
{
"name": "JavaScript",
"bytes": "2276"
},
{
"name": "Shell",
"bytes": "309"
},
{
"name": "TypeScript",
"bytes": "62940"
}
],
"symlink_target": ""
} |
package org.apache.camel.component.schematron;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import javax.xml.transform.Templates;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.sax.SAXSource;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import net.sf.saxon.TransformerFactoryImpl;
import org.apache.camel.Exchange;
import org.apache.camel.ExchangePattern;
import org.apache.camel.component.schematron.constant.Constants;
import org.apache.camel.component.schematron.processor.ClassPathURIResolver;
import org.apache.camel.component.schematron.processor.TemplatesFactory;
import org.apache.camel.support.DefaultExchange;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* Schematron Producer Unit Test.
*/
public class SchematronProducerTest extends CamelTestSupport {
private static SchematronProducer producer;
@BeforeClass
public static void setUP() {
SchematronEndpoint endpoint = new SchematronEndpoint();
TransformerFactory fac = new TransformerFactoryImpl();
fac.setURIResolver(new ClassPathURIResolver(Constants.SCHEMATRON_TEMPLATES_ROOT_DIR, endpoint.getUriResolver()));
Templates templates = TemplatesFactory.newInstance().getTemplates(ClassLoader.getSystemResourceAsStream("sch/schematron-1.sch"), fac);
endpoint.setRules(templates);
producer = new SchematronProducer(endpoint);
}
@Test
public void testProcessValidXML() throws Exception {
Exchange exc = new DefaultExchange(context, ExchangePattern.InOut);
exc.getIn().setBody(ClassLoader.getSystemResourceAsStream("xml/article-1.xml"));
// process xml payload
producer.process(exc);
// assert
assertTrue(exc.getOut().getHeader(Constants.VALIDATION_STATUS).equals(Constants.SUCCESS));
}
@Test
public void testProcessInValidXML() throws Exception {
Exchange exc = new DefaultExchange(context, ExchangePattern.InOut);
exc.getIn().setBody(ClassLoader.getSystemResourceAsStream("xml/article-2.xml"));
// process xml payload
producer.process(exc);
// assert
assertTrue(exc.getOut().getHeader(Constants.VALIDATION_STATUS).equals(Constants.FAILED));
}
@Test
public void testProcessValidXMLAsSource() throws Exception {
Exchange exc = new DefaultExchange(context, ExchangePattern.InOut);
exc.getIn().setBody(new SAXSource(getXMLReader(), new InputSource(ClassLoader.getSystemResourceAsStream("xml/article-1.xml"))));
// process xml payload
producer.process(exc);
// assert
assertTrue(exc.getOut().getHeader(Constants.VALIDATION_STATUS).equals(Constants.SUCCESS));
}
@Test
public void testProcessInValidXMLAsSource() throws Exception {
Exchange exc = new DefaultExchange(context, ExchangePattern.InOut);
exc.getIn().setBody(new SAXSource(getXMLReader(), new InputSource(ClassLoader.getSystemResourceAsStream("xml/article-2.xml"))));
// process xml payload
producer.process(exc);
// assert
assertTrue(exc.getOut().getHeader(Constants.VALIDATION_STATUS).equals(Constants.FAILED));
}
private static XMLReader getXMLReader() throws ParserConfigurationException, SAXException {
final SAXParserFactory fac = SAXParserFactory.newInstance();
fac.setValidating(false);
final SAXParser parser = fac.newSAXParser();
XMLReader reader = parser.getXMLReader();
return reader;
}
}
| {
"content_hash": "dbea1c11ba975b59fd670deddf4dbd21",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 142,
"avg_line_length": 36.86138613861386,
"alnum_prop": 0.7373086220789685,
"repo_name": "Fabryprog/camel",
"id": "80938aed45c18bae28d7bda7b1484a0a567b2106",
"size": "4525",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "components/camel-schematron/src/test/java/org/apache/camel/component/schematron/SchematronProducerTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Apex",
"bytes": "6521"
},
{
"name": "Batchfile",
"bytes": "2353"
},
{
"name": "CSS",
"bytes": "17204"
},
{
"name": "Elm",
"bytes": "10852"
},
{
"name": "FreeMarker",
"bytes": "8015"
},
{
"name": "Groovy",
"bytes": "14479"
},
{
"name": "HTML",
"bytes": "909437"
},
{
"name": "Java",
"bytes": "82182194"
},
{
"name": "JavaScript",
"bytes": "102432"
},
{
"name": "Makefile",
"bytes": "513"
},
{
"name": "Shell",
"bytes": "17240"
},
{
"name": "TSQL",
"bytes": "28835"
},
{
"name": "Tcl",
"bytes": "4974"
},
{
"name": "Thrift",
"bytes": "6979"
},
{
"name": "XQuery",
"bytes": "546"
},
{
"name": "XSLT",
"bytes": "271473"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "f7d424d42ae3dbc78f54d65ee7b13dec",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.307692307692308,
"alnum_prop": 0.6940298507462687,
"repo_name": "mdoering/backbone",
"id": "34a4e9c21ca0840a72acbbaf680bd622856c5634",
"size": "177",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Protozoa/Granuloreticulosea/Foraminiferida/Caucasinidae/Fursenkoina/Fursenkoina pontoni/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_232) on Tue Sep 15 08:52:40 UTC 2020 -->
<title>NonNullFields (Spring Framework 5.1.18.RELEASE API)</title>
<meta name="date" content="2020-09-15">
<link rel="stylesheet" type="text/css" href="../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="NonNullFields (Spring Framework 5.1.18.RELEASE API)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/NonNullFields.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage">Spring Framework</div>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../org/springframework/lang/NonNullApi.html" title="annotation in org.springframework.lang"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../org/springframework/lang/Nullable.html" title="annotation in org.springframework.lang"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?org/springframework/lang/NonNullFields.html" target="_top">Frames</a></li>
<li><a href="NonNullFields.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Field | </li>
<li>Required | </li>
<li>Optional</li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Element</li>
</ul>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle">org.springframework.lang</div>
<h2 title="Annotation Type NonNullFields" class="title">Annotation Type NonNullFields</h2>
</div>
<div class="contentContainer">
<div class="description">
<ul class="blockList">
<li class="blockList">
<hr>
<br>
<pre><a href="https://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Target.html?is-external=true" title="class or interface in java.lang.annotation">@Target</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Target.html?is-external=true#value--" title="class or interface in java.lang.annotation">value</a>=<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/annotation/ElementType.html?is-external=true#PACKAGE" title="class or interface in java.lang.annotation">PACKAGE</a>)
<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Retention.html?is-external=true" title="class or interface in java.lang.annotation">@Retention</a>(<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Retention.html?is-external=true#value--" title="class or interface in java.lang.annotation">value</a>=<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/annotation/RetentionPolicy.html?is-external=true#RUNTIME" title="class or interface in java.lang.annotation">RUNTIME</a>)
<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Documented.html?is-external=true" title="class or interface in java.lang.annotation">@Documented</a>
<a href="https://docs.oracle.com/javase/8/docs/api/javax/annotation/Nonnull.html?is-external=true" title="class or interface in javax.annotation">@Nonnull</a>
<a href="https://docs.oracle.com/javase/8/docs/api/javax/annotation/meta.TypeQualifierDefault.html?is-external=true" title="class or interface in javax.annotation">@TypeQualifierDefault</a>(value=<a href="https://docs.oracle.com/javase/8/docs/api/java/lang/annotation/ElementType.html?is-external=true#FIELD" title="class or interface in java.lang.annotation">FIELD</a>)
public @interface <span class="memberNameLabel">NonNullFields</span></pre>
<div class="block">A common Spring annotation to declare that fields are to be considered as
non-nullable by default for a given package.
<p>Leverages JSR-305 meta-annotations to indicate nullability in Java to common
tools with JSR-305 support and used by Kotlin to infer nullability of Spring API.
<p>Should be used at package level in association with <a href="../../../org/springframework/lang/Nullable.html" title="annotation in org.springframework.lang"><code>Nullable</code></a>
annotations at field level.</div>
<dl>
<dt><span class="simpleTagLabel">Since:</span></dt>
<dd>5.0</dd>
<dt><span class="simpleTagLabel">Author:</span></dt>
<dd>Sebastien Deleuze</dd>
<dt><span class="seeLabel">See Also:</span></dt>
<dd><a href="../../../org/springframework/lang/NonNullApi.html" title="annotation in org.springframework.lang"><code>NonNullApi</code></a>,
<a href="../../../org/springframework/lang/Nullable.html" title="annotation in org.springframework.lang"><code>Nullable</code></a>,
<a href="../../../org/springframework/lang/NonNull.html" title="annotation in org.springframework.lang"><code>NonNull</code></a></dd>
</dl>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/NonNullFields.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage">Spring Framework</div>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../org/springframework/lang/NonNullApi.html" title="annotation in org.springframework.lang"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../org/springframework/lang/Nullable.html" title="annotation in org.springframework.lang"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?org/springframework/lang/NonNullFields.html" target="_top">Frames</a></li>
<li><a href="NonNullFields.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Field | </li>
<li>Required | </li>
<li>Optional</li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Element</li>
</ul>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
| {
"content_hash": "c0eb094eaa7c91ae9a28f457ec4ab0fb",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 528,
"avg_line_length": 45,
"alnum_prop": 0.6761229314420804,
"repo_name": "akhr/java",
"id": "22059e855db333dd62b26b5892f3f51aa127b1fd",
"size": "8460",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Spring/jars/spring-framework-5.1.18.RELEASE/docs/javadoc-api/org/springframework/lang/NonNullFields.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "93017"
},
{
"name": "HTML",
"bytes": "203648040"
},
{
"name": "Java",
"bytes": "1237949"
},
{
"name": "JavaScript",
"bytes": "827"
},
{
"name": "Shell",
"bytes": "59"
}
],
"symlink_target": ""
} |
@extends('layouts.master')
@section('title')
Chocoburbujas: Estetica Canina, Boutique y Veterinaria
@endsection
@section('content')
<!--Esta pantalla es de login-->
<div class="men">
<div class="container">
<div class="register">
<div class="col-md-6 login-left">
<h3>Nuevos Clientes</h3>
<p>Para que pueda comprar nuestros productos. Por favor rellene nuestro formulario y ¡afiliese con nostros! </p>
<a class="acount-btn" href="{{route('cliente.register')}}">Crear una cuenta</a>
</div>
<div class="col-md-6 login-right">
<h3>Clientes Registrados</h3>
<p>Si tienes una cuenta con nostros, inicia sesión.</p>
@if(count($errors)>0)
<div class="alert alert-danger">
@foreach($errors-> all() as $error)
<p>{{$error}}</p>
@endforeach
</div>
@endif
<form>
{!! csrf_field()!!}
<div>
<span>Correo Electronico o Usuario<label>*</label></span>
<input type="email" name="email" id="email">
</div>
<div>
<span>Contraseña<label>*</label></span>
<input type="password" name="password" id="password">
</div>
<a class="forgot" href="#">¿Olvidaste tu contraseña?</a>
<br>
</form>
<a href="#" class="acount-btn" id="btnLogin">Iniciar Sesión</a>
</div>
<div class="clearfix"> </div>
</div>
</div>
</div>
@endsection
@section('scripts')
<script type="text/javascript" src="{{asset('js/loginC.js')}}"></script>
@endsection | {
"content_hash": "c8022478b518f3fae111640154981d85",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 118,
"avg_line_length": 30.22,
"alnum_prop": 0.5916611515552614,
"repo_name": "SoftTecnologias/chocoburbujas",
"id": "0735439f354111e64054cdc50905a0bac6898ca6",
"size": "1517",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "resources/views/cliente/login.blade.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2075354"
},
{
"name": "JavaScript",
"bytes": "2400368"
},
{
"name": "PHP",
"bytes": "297153"
}
],
"symlink_target": ""
} |
package com.nesscomputing.amqp.rabbitmq;
import org.junit.Ignore;
import com.nesscomputing.amqp.AbstractTestStrangeStuff;
import com.nesscomputing.amqp.AmqpProvider;
import com.nesscomputing.amqp.RabbitMQProvider;
@Ignore
public class TestStrangeStuffRabbitMQ extends AbstractTestStrangeStuff
{
private AmqpProvider provider = new RabbitMQProvider();
@Override
protected AmqpProvider getProvider()
{
return provider;
}
}
| {
"content_hash": "159b5e21819dbb52948661e5d69ba413",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 70,
"avg_line_length": 22.7,
"alnum_prop": 0.7863436123348018,
"repo_name": "NessComputing/components-ness-amqp",
"id": "63adad6505782fa028ba7bff38df7897b4bce462",
"size": "1058",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/java/com/nesscomputing/amqp/rabbitmq/TestStrangeStuffRabbitMQ.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "100378"
}
],
"symlink_target": ""
} |
plugin 'fckeditor', :git => "git://github.com/molpe/fckeditor.git"
plugin 'acts_as_tree', :git => "git://github.com/rails/acts_as_tree.git"
plugin 'tog_vault', :git => "git://github.com/tog/tog_vault.git"
route "map.routes_from_plugin 'tog_vault'"
generate "update_tog_migration"
rake "db:migrate"
rake "tog:plugins:copy_resources PLUGIN=tog_vault"
| {
"content_hash": "39111c4c0a1669ea949432d42a7fd74f",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 72,
"avg_line_length": 32.09090909090909,
"alnum_prop": 0.7110481586402266,
"repo_name": "tog/tog_vault",
"id": "fa1238c5419677575cf8f162de49cc2f7fd73622",
"size": "353",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tog_vault_template.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "5335"
},
{
"name": "Ruby",
"bytes": "15587"
}
],
"symlink_target": ""
} |
<?php
require_once("base/Problems_Tags.dao.base.php");
require_once("base/Problems_Tags.vo.base.php");
/** Page-level DocBlock .
*
* @author alanboy
* @package docs
*
*/
/** ProblemsTags Data Access Object (DAO).
*
* Esta clase contiene toda la manipulacion de bases de datos que se necesita para
* almacenar de forma permanente y recuperar instancias de objetos {@link ProblemsTags }.
* @author alanboy
* @access public
* @package docs
*
*/
class ProblemsTagsDAO extends ProblemsTagsDAOBase
{
public static function getProblemTags(Problems $problem, $public_only) {
$sql = '
SELECT
t.name, pt.public
FROM
Problems_Tags pt
INNER JOIN
Tags t on t.tag_id = pt.tag_id
WHERE
pt.problem_id = ?';
$params = array($problem->problem_id);
if ($public_only) {
$sql .= ' AND pt.public = 1';
}
$sql .= ';';
global $conn;
return $conn->GetAll($sql, $params);
}
}
| {
"content_hash": "448ae3b43550143832002104b617bb7c",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 91,
"avg_line_length": 22.19047619047619,
"alnum_prop": 0.6469957081545065,
"repo_name": "rendon/omegaup",
"id": "c27774ed142e3d909d40c181f832a519d280fb47",
"size": "932",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "frontend/server/libs/dao/Problems_Tags.dao.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "3315"
},
{
"name": "CSS",
"bytes": "116353"
},
{
"name": "JavaScript",
"bytes": "740448"
},
{
"name": "Makefile",
"bytes": "630"
},
{
"name": "PHP",
"bytes": "2610860"
},
{
"name": "Python",
"bytes": "31491"
},
{
"name": "Ruby",
"bytes": "1011"
},
{
"name": "Scala",
"bytes": "8896"
},
{
"name": "Shell",
"bytes": "25379"
}
],
"symlink_target": ""
} |
package com.hutong.supersdk.sdk.modeltools.efan;
public class EFunSDKInfo {
private String appKey;
private String payKey;
public String getAppKey() {
return appKey;
}
public void setAppKey(String appKey) {
this.appKey = appKey;
}
public String getPayKey() {
return payKey;
}
public void setPayKey(String payKey) {
this.payKey = payKey;
}
}
| {
"content_hash": "18a6a2d7b23b88db12bcffa870dc639d",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 48,
"avg_line_length": 15.291666666666666,
"alnum_prop": 0.7111716621253406,
"repo_name": "bigshiliu/test",
"id": "c3e7d2ee5c0bd5988211a87c37756ad088b29bc4",
"size": "367",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "com.hutong.supersdk/src/main/java/com/hutong/supersdk/sdk/modeltools/efan/EFunSDKInfo.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1646"
},
{
"name": "Java",
"bytes": "1232410"
}
],
"symlink_target": ""
} |
<?php
namespace Lexik\Bundle\FormFilterBundle\Filter\ORM\Filters;
use Doctrine\ORM\QueryBuilder;
use Lexik\Bundle\FormFilterBundle\Filter\ORM\ORMFilter;
use Lexik\Bundle\FormFilterBundle\Filter\ORM\Expr;
use Lexik\Bundle\FormFilterBundle\Filter\Extension\Type\BooleanFilterType;
/**
* Filter to use with boolean values.
*
* @author Cédric Girard <c.girard@lexik.fr>
*/
class BooleanFilter extends ORMFilter
{
/**
* {@inheritdoc}
*/
public function getName()
{
return 'filter_boolean';
}
/**
* {@inheritdoc}
*/
protected function apply(QueryBuilder $filterBuilder, Expr $expr, $field, array $values)
{
if (!empty($values['value'])) {
$value = (int)(BooleanFilterType::VALUE_YES == $values['value']);
$filterBuilder->andWhere($expr->eq($field, $value));
}
}
}
| {
"content_hash": "152570a2212869bce95249540355eb65",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 92,
"avg_line_length": 24.055555555555557,
"alnum_prop": 0.648960739030023,
"repo_name": "aleksilb/LexikFormFilterBundle",
"id": "09646654461ee27a8f06083f8b09ce4fcdfd1c96",
"size": "867",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Filter/ORM/Filters/BooleanFilter.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "101933"
}
],
"symlink_target": ""
} |
package com.amazonaws.services.securityhub.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.securityhub.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* ResourceNotFoundException JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ResourceNotFoundExceptionUnmarshaller extends EnhancedJsonErrorUnmarshaller {
private ResourceNotFoundExceptionUnmarshaller() {
super(com.amazonaws.services.securityhub.model.ResourceNotFoundException.class, "ResourceNotFoundException");
}
@Override
public com.amazonaws.services.securityhub.model.ResourceNotFoundException unmarshallFromContext(JsonUnmarshallerContext context) throws Exception {
com.amazonaws.services.securityhub.model.ResourceNotFoundException resourceNotFoundException = new com.amazonaws.services.securityhub.model.ResourceNotFoundException(
null);
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("Code", targetDepth)) {
context.nextToken();
resourceNotFoundException.setCode(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return resourceNotFoundException;
}
private static ResourceNotFoundExceptionUnmarshaller instance;
public static ResourceNotFoundExceptionUnmarshaller getInstance() {
if (instance == null)
instance = new ResourceNotFoundExceptionUnmarshaller();
return instance;
}
}
| {
"content_hash": "ef97855bd050772782a23baec31b79bd",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 174,
"avg_line_length": 38.32835820895522,
"alnum_prop": 0.6748442367601246,
"repo_name": "aws/aws-sdk-java",
"id": "533db6ea30d11be32bcb8010c9815c2d1d5c3aae",
"size": "3148",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aws-java-sdk-securityhub/src/main/java/com/amazonaws/services/securityhub/model/transform/ResourceNotFoundExceptionUnmarshaller.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.6.0_27) on Thu Jan 23 20:22:09 EST 2014 -->
<meta http-equiv="Content-Type" content="text/html" charset="utf-8">
<title>Uses of Class org.apache.solr.response.TextResponseWriter (Solr 4.6.1 API)</title>
<meta name="date" content="2014-01-23">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.solr.response.TextResponseWriter (Solr 4.6.1 API)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>PREV</li>
<li>NEXT</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/apache/solr/response//class-useTextResponseWriter.html" target="_top">FRAMES</a></li>
<li><a href="TextResponseWriter.html" target="_top">NO FRAMES</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class org.apache.solr.response.TextResponseWriter" class="title">Uses of Class<br>org.apache.solr.response.TextResponseWriter</h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#org.apache.solr.response">org.apache.solr.response</a></td>
<td class="colLast">
<div class="block">
API and implementations of <a href="../../../../../org/apache/solr/response/QueryResponseWriter.html" title="interface in org.apache.solr.response"><code>QueryResponseWriter</code></a> for formating Solr request responses</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="#org.apache.solr.schema">org.apache.solr.schema</a></td>
<td class="colLast">
<div class="block">
<a href="../../../../../org/apache/solr/schema/IndexSchema.html" title="class in org.apache.solr.schema"><code>IndexSchema</code></a> and <a href="../../../../../org/apache/solr/schema/FieldType.html" title="class in org.apache.solr.schema"><code>FieldType</code></a> implementations for powering schema.xml</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="org.apache.solr.response">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> in <a href="../../../../../org/apache/solr/response/package-summary.html">org.apache.solr.response</a></h3>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing subclasses, and an explanation">
<caption><span>Subclasses of <a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> in <a href="../../../../../org/apache/solr/response/package-summary.html">org.apache.solr.response</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>class </code></td>
<td class="colLast"><code><strong><a href="../../../../../org/apache/solr/response/SchemaXmlWriter.html" title="class in org.apache.solr.response">SchemaXmlWriter</a></strong></code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>class </code></td>
<td class="colLast"><code><strong><a href="../../../../../org/apache/solr/response/XMLWriter.html" title="class in org.apache.solr.response">XMLWriter</a></strong></code> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="org.apache.solr.schema">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> in <a href="../../../../../org/apache/solr/schema/package-summary.html">org.apache.solr.schema</a></h3>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../org/apache/solr/schema/package-summary.html">org.apache.solr.schema</a> with parameters of type <a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">BCDIntField.</span><code><strong><a href="../../../../../org/apache/solr/schema/BCDIntField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">TextField.</span><code><strong><a href="../../../../../org/apache/solr/schema/TextField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">UUIDField.</span><code><strong><a href="../../../../../org/apache/solr/schema/UUIDField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">FloatField.</span><code><strong><a href="../../../../../org/apache/solr/schema/FloatField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">CollationField.</span><code><strong><a href="../../../../../org/apache/solr/schema/CollationField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">AbstractSpatialFieldType.</span><code><strong><a href="../../../../../org/apache/solr/schema/AbstractSpatialFieldType.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">BoolField.</span><code><strong><a href="../../../../../org/apache/solr/schema/BoolField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">ShortField.</span><code><strong><a href="../../../../../org/apache/solr/schema/ShortField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code>
<div class="block"><strong>Deprecated.</strong> </div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">ExternalFileField.</span><code><strong><a href="../../../../../org/apache/solr/schema/ExternalFileField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">SortableDoubleField.</span><code><strong><a href="../../../../../org/apache/solr/schema/SortableDoubleField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code>
<div class="block"><strong>Deprecated.</strong> </div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">IntField.</span><code><strong><a href="../../../../../org/apache/solr/schema/IntField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">CurrencyField.</span><code><strong><a href="../../../../../org/apache/solr/schema/CurrencyField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> field)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">PreAnalyzedField.</span><code><strong><a href="../../../../../org/apache/solr/schema/PreAnalyzedField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">DoubleField.</span><code><strong><a href="../../../../../org/apache/solr/schema/DoubleField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">SortableFloatField.</span><code><strong><a href="../../../../../org/apache/solr/schema/SortableFloatField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code>
<div class="block"><strong>Deprecated.</strong> </div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">GeoHashField.</span><code><strong><a href="../../../../../org/apache/solr/schema/GeoHashField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">StrField.</span><code><strong><a href="../../../../../org/apache/solr/schema/StrField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">SchemaField.</span><code><strong><a href="../../../../../org/apache/solr/schema/SchemaField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> val)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">TrieField.</span><code><strong><a href="../../../../../org/apache/solr/schema/TrieField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">SortableLongField.</span><code><strong><a href="../../../../../org/apache/solr/schema/SortableLongField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code>
<div class="block"><strong>Deprecated.</strong> </div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">RandomSortField.</span><code><strong><a href="../../../../../org/apache/solr/schema/RandomSortField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">DateField.</span><code><strong><a href="../../../../../org/apache/solr/schema/DateField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code>
<div class="block"><strong>Deprecated.</strong> </div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">SortableIntField.</span><code><strong><a href="../../../../../org/apache/solr/schema/SortableIntField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code>
<div class="block"><strong>Deprecated.</strong> </div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">PointType.</span><code><strong><a href="../../../../../org/apache/solr/schema/PointType.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>abstract void</code></td>
<td class="colLast"><span class="strong">FieldType.</span><code><strong><a href="../../../../../org/apache/solr/schema/FieldType.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code>
<div class="block">calls back to TextResponseWriter to write the field value</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">ByteField.</span><code><strong><a href="../../../../../org/apache/solr/schema/ByteField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code>
<div class="block"><strong>Deprecated.</strong> </div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">BinaryField.</span><code><strong><a href="../../../../../org/apache/solr/schema/BinaryField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">LatLonType.</span><code><strong><a href="../../../../../org/apache/solr/schema/LatLonType.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">LongField.</span><code><strong><a href="../../../../../org/apache/solr/schema/LongField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">TrieDateField.</span><code><strong><a href="../../../../../org/apache/solr/schema/TrieDateField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">EnumField.</span><code><strong><a href="../../../../../org/apache/solr/schema/EnumField.html#write(org.apache.solr.response.TextResponseWriter, java.lang.String, org.apache.lucene.index.IndexableField)">write</a></strong>(<a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">TextResponseWriter</a> writer,
<a href="http://download.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
<a href="http://lucene.apache.org/core/4_6_1/core/org/apache/lucene/index/IndexableField.html?is-external=true" title="class or interface in org.apache.lucene.index">IndexableField</a> f)</code>
<div class="block">calls back to TextResponseWriter to write the field value</div>
</td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../org/apache/solr/response/TextResponseWriter.html" title="class in org.apache.solr.response">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>PREV</li>
<li>NEXT</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/apache/solr/response//class-useTextResponseWriter.html" target="_top">FRAMES</a></li>
<li><a href="TextResponseWriter.html" target="_top">NO FRAMES</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>
<i>Copyright © 2000-2014 Apache Software Foundation. All Rights Reserved.</i>
<script src='../../../../../prettify.js' type='text/javascript'></script>
<script type='text/javascript'>
(function(){
var oldonload = window.onload;
if (typeof oldonload != 'function') {
window.onload = prettyPrint;
} else {
window.onload = function() {
oldonload();
prettyPrint();
}
}
})();
</script>
</small></p>
</body>
</html>
| {
"content_hash": "9dc92677249f8f5ce2939ee13c6c25f4",
"timestamp": "",
"source": "github",
"line_count": 403,
"max_line_length": 444,
"avg_line_length": 90.77667493796525,
"alnum_prop": 0.6981931498236886,
"repo_name": "arnaud71/webso-db",
"id": "9d9d2ee96bc7098261e8318ad2cb0e40f82c3dc1",
"size": "36583",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/solr-core/org/apache/solr/response/class-use/TextResponseWriter.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "358410"
},
{
"name": "JavaScript",
"bytes": "1852502"
},
{
"name": "Shell",
"bytes": "16381"
},
{
"name": "XSLT",
"bytes": "99846"
}
],
"symlink_target": ""
} |
require 'vcr'
require 'test/unit'
require 'zenodo-client'
require 'coveralls'
Coveralls.wear!
VCR.configure do |config|
config.cassette_library_dir = "test/cassettes"
config.hook_into :webmock
end
| {
"content_hash": "542d6eb915550a15c10f0269fcc29cd3",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 48,
"avg_line_length": 18.454545454545453,
"alnum_prop": 0.7586206896551724,
"repo_name": "seek4science/zenodo-client",
"id": "0600e00406c56a91e59bc23df7a1811c83640d5c",
"size": "203",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_helper.rb",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Ruby",
"bytes": "10367"
}
],
"symlink_target": ""
} |
package org.hswebframework.web.authorization;
import org.hswebframework.web.authorization.token.*;
import org.hswebframework.web.id.IDGenerator;
import org.junit.Assert;
import org.redisson.Redisson;
import org.redisson.api.LocalCachedMapOptions;
import org.redisson.api.RedissonClient;
import org.redisson.codec.FstCodec;
import org.redisson.codec.SerializationCodec;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
public class RedisUserTokenManagerTests {
static DefaultUserTokenManager userTokenManager;
static String token = IDGenerator.MD5.generate();
private static Logger logger = LoggerFactory.getLogger("hsweb.session");
public static void main(String[] args) throws InterruptedException {
RedissonClient client = Redisson.create();
try {
ConcurrentMap<String, SimpleUserToken> repo = client.getMap("hsweb.user-token", new SerializationCodec());
ConcurrentMap<String, Set<String>> userRepo = client.getMap("hsweb.user-token-u", new SerializationCodec());
userTokenManager = new DefaultUserTokenManager(repo, userRepo) {
@Override
protected Set<String> getUserToken(String userId) {
userRepo.computeIfAbsent(userId,u->new HashSet<>());
return client.getSet("hsweb.user-token-"+userId, new SerializationCodec());
}
};
userTokenManager.setAllopatricLoginMode(AllopatricLoginMode.deny);
// userTokenManager=new DefaultUserTokenManager();
// userRepo.clear();
// repo.clear();
// for (int i = 0; i < 1000; i++) {
// userTokenManager.signIn(IDGenerator.MD5.generate(), "sessionId", "admin", 60*3600*1000);
// }
// userTokenManager.signIn(IDGenerator.MD5.generate(), "sessionId", "admin2", 60*3600*1000);
testGet();
testGetAll();
testSignOut();
testGetAll();
} finally {
client.shutdown();
}
}
public static void testSignOut(){
userTokenManager.signOutByUserId("admin");
}
public static void testGet() {
List<UserToken> userToken = userTokenManager.getByUserId("admin");
Assert.assertTrue(!userToken.isEmpty());
}
public static void testGetAll() {
logger.warn("total user : " + userTokenManager.totalUser());
logger.warn("total token : " + userTokenManager.totalToken());
userTokenManager.allLoggedUser(token -> System.out.println(token.getToken()));
}
}
| {
"content_hash": "81af56809efea502490faa584ffa00a1",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 120,
"avg_line_length": 34.2625,
"alnum_prop": 0.659248449470996,
"repo_name": "asiaon123/hsweb-framework",
"id": "38dd2ad3137c1333a34149bffc236a2d0095058e",
"size": "2741",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hsweb-authorization/hsweb-authorization-basic/src/test/java/org/hswebframework/web/authorization/RedisUserTokenManagerTests.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "7395"
},
{
"name": "Java",
"bytes": "1680165"
},
{
"name": "JavaScript",
"bytes": "59474"
}
],
"symlink_target": ""
} |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.roots.impl;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileTypes.FileTypeRegistry;
import com.intellij.openapi.fileTypes.impl.FileTypeAssocTable;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.module.UnloadedModuleDescription;
import com.intellij.openapi.module.impl.ModuleManagerEx;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.roots.impl.libraries.LibraryEx;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Conditions;
import com.intellij.openapi.util.Couple;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileWithId;
import com.intellij.util.*;
import com.intellij.util.containers.Stack;
import com.intellij.util.containers.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.jps.model.fileTypes.FileNameMatcherFactory;
import java.util.HashMap;
import java.util.HashSet;
import java.util.*;
class RootIndex {
static final Comparator<OrderEntry> BY_OWNER_MODULE = (o1, o2) -> {
String name1 = o1.getOwnerModule().getName();
String name2 = o2.getOwnerModule().getName();
return name1.compareTo(name2);
};
private static final Logger LOG = Logger.getInstance(RootIndex.class);
private static final FileTypeRegistry ourFileTypes = FileTypeRegistry.getInstance();
private final Map<VirtualFile, String> myPackagePrefixByRoot;
private final Map<VirtualFile, DirectoryInfo> myRootInfos;
private final boolean myHasNonDirectoryRoots;
private final ConcurrentBitSet myNonInterestingIds = ConcurrentBitSet.create();
@NotNull private final Project myProject;
private final RootFileSupplier myRootSupplier;
final PackageDirectoryCache myPackageDirectoryCache;
private OrderEntryGraph myOrderEntryGraph;
private final DirectoryIndexAnalyticsReporter.BuildRequestKind myBuildRequestKind;
RootIndex(@NotNull Project project, DirectoryIndexAnalyticsReporter.BuildRequestKind buildRequestKind) {
this(project, RootFileSupplier.INSTANCE, buildRequestKind);
}
RootIndex(@NotNull Project project,
@NotNull RootFileSupplier rootSupplier,
DirectoryIndexAnalyticsReporter.BuildRequestKind buildRequestKind) {
myProject = project;
myRootSupplier = rootSupplier;
myBuildRequestKind = buildRequestKind;
ApplicationManager.getApplication().assertReadAccessAllowed();
if (project.isDefault()) {
LOG.error("Directory index may not be queried for default project");
}
ModuleManager manager = ModuleManager.getInstance(project);
if (manager instanceof ModuleManagerEx) {
LOG.assertTrue(((ModuleManagerEx)manager).areModulesLoaded(), "Directory index can only be queried after project initialization");
}
DirectoryIndexAnalyticsReporter.ActivityReporter activityReporter = logActivityStarted(DirectoryIndexAnalyticsReporter.BuildPart.MAIN);
try {
final RootInfo info = buildRootInfo(project, activityReporter);
DirectoryIndexAnalyticsReporter.PhaseReporter phase = activityReporter.reportFinalizingPhaseStarted();
Set<VirtualFile> allRoots = info.getAllRoots();
MultiMap<String, VirtualFile> rootsByPackagePrefix = MultiMap.create(allRoots.size(), 0.75f);
myRootInfos = new HashMap<>(allRoots.size());
myHasNonDirectoryRoots = ContainerUtil.exists(allRoots, r -> !r.isDirectory());
myPackagePrefixByRoot = new HashMap<>(allRoots.size());
List<List<VirtualFile>> hierarchies = new ArrayList<>(allRoots.size());
for (VirtualFile root : allRoots) {
List<VirtualFile> hierarchy = getHierarchy(root, allRoots, info);
hierarchies.add(hierarchy);
Pair<DirectoryInfo, String> pair = hierarchy != null
? calcDirectoryInfoAndPackagePrefix(root, hierarchy, info)
: new Pair<>(NonProjectDirectoryInfo.IGNORED, null);
myRootInfos.put(root, pair.first);
String packagePrefix = pair.second;
rootsByPackagePrefix.putValue(packagePrefix, root);
myPackagePrefixByRoot.put(root, packagePrefix);
}
storeContentsBeneathExcluded(allRoots, hierarchies);
storeOutsideProjectRootsButHasContentInside();
myPackageDirectoryCache = new PackageDirectoryCache(rootsByPackagePrefix) {
@Override
protected boolean isPackageDirectory(@NotNull VirtualFile dir, @NotNull String packageName) {
return getInfoForFile(dir).isInProject(dir) && packageName.equals(getPackageName(dir));
}
};
phase.reportPhaseFinished();
}
finally {
activityReporter.reportFinished();
}
}
private DirectoryIndexAnalyticsReporter.ActivityReporter logActivityStarted(DirectoryIndexAnalyticsReporter.BuildPart part) {
return DirectoryIndexAnalyticsReporter.reportStarted(myProject, myBuildRequestKind, part);
}
private void storeOutsideProjectRootsButHasContentInside() {
nextRoot:
for (VirtualFile root : new ArrayList<>(myRootInfos.keySet())) {
for (VirtualFile v = root.getParent(); v != null; v = v.getParent()) {
DirectoryInfo info = myRootInfos.get(v);
if (info == NonProjectDirectoryInfo.OUTSIDE_PROJECT_ROOTS_BUT_HAS_CONTENT_BENEATH) {
break;
}
if (info != null) continue nextRoot;
}
// mark all [root.parent .. disk root] as OUTSIDE_PROJECT_ROOTS_BUT_HAS_CONTENT_BENEATH
for (VirtualFile v = root.getParent(); v != null; v = v.getParent()) {
DirectoryInfo info = myRootInfos.get(v);
if (info == NonProjectDirectoryInfo.OUTSIDE_PROJECT_ROOTS_BUT_HAS_CONTENT_BENEATH) {
break;
}
myRootInfos.put(v, NonProjectDirectoryInfo.OUTSIDE_PROJECT_ROOTS_BUT_HAS_CONTENT_BENEATH);
}
}
}
private void storeContentsBeneathExcluded(@NotNull Set<? extends VirtualFile> allRoots, @NotNull List<? extends List<VirtualFile>> hierarchies) {
// exploit allRoots being LinkedHashSet
int i = 0;
for (VirtualFile root : allRoots) {
List<VirtualFile> hierarchy = hierarchies.get(i++);
if (hierarchy == null) continue;
// calculate bits "hasContentBeneath" and "hasExcludedBeneath" for which we need all other DirectoryInfos built
DirectoryInfo dirInfo = myRootInfos.get(root);
assert dirInfo != null;
boolean hasContent = !isExcluded(dirInfo) && dirInfo.getContentRoot() != null;
if (hasContent) {
// start with the strict parent and update parent excluded dir info
VirtualFile parentRoot = hierarchy.size() >= 2 ? hierarchy.get(1) : null;
if (parentRoot != null) {
DirectoryInfo parentInfo = myRootInfos.get(parentRoot);
if (isExcluded(parentInfo)) {
addContentBeneathExcludedInfo(parentInfo, parentRoot, dirInfo);
}
}
}
}
}
private void addContentBeneathExcludedInfo(@NotNull DirectoryInfo parentExcludedInfo,
@NotNull VirtualFile parentFile,
@NotNull DirectoryInfo childInfo) {
List<DirectoryInfoImpl> beneathInfo;
if (parentExcludedInfo instanceof NonProjectDirectoryInfo.WithBeneathInfo) {
beneathInfo = ((NonProjectDirectoryInfo.WithBeneathInfo)parentExcludedInfo).myContentInfosBeneath;
}
else if (parentExcludedInfo instanceof NonProjectDirectoryInfo) {
NonProjectDirectoryInfo.WithBeneathInfo newInfo = new NonProjectDirectoryInfo.WithBeneathInfo((NonProjectDirectoryInfo)parentExcludedInfo);
myRootInfos.put(parentFile, newInfo);
beneathInfo = newInfo.myContentInfosBeneath;
}
else if (parentExcludedInfo instanceof DirectoryInfoImpl) {
beneathInfo = ((DirectoryInfoImpl)parentExcludedInfo).myContentInfosBeneath;
}
else {
throw new RuntimeException("unknown info: "+parentExcludedInfo);
}
beneathInfo.add((DirectoryInfoImpl)childInfo);
}
private static boolean isExcluded(@NotNull DirectoryInfo info) {
return info instanceof DirectoryInfoImpl && info.isExcluded(((DirectoryInfoImpl)info).getRoot())
|| info instanceof NonProjectDirectoryInfo && ((NonProjectDirectoryInfo)info).isExcluded();
}
void onLowMemory() {
myPackageDirectoryCache.onLowMemory();
}
@NotNull
private RootInfo buildRootInfo(@NotNull Project project, DirectoryIndexAnalyticsReporter.ActivityReporter activity) {
final RootInfo info = new RootInfo();
ModuleManager moduleManager = ModuleManager.getInstance(project);
boolean includeProjectJdk = true;
DirectoryIndexAnalyticsReporter.PhaseReporter phase = activity.reportWorkspacePhaseStarted();
for (final Module module : moduleManager.getModules()) {
final ModuleRootManager moduleRootManager = ModuleRootManager.getInstance(module);
for (ContentEntry contentEntry : moduleRootManager.getContentEntries()) {
for (VirtualFile excludeRoot : contentEntry.getExcludeFolderFiles()) {
if (!ensureValid(excludeRoot, contentEntry)) continue;
info.excludedFromModule.put(excludeRoot, module);
}
VirtualFile contentRoot = myRootSupplier.getContentRoot(contentEntry);
if (contentRoot != null && ensureValid(contentRoot, module)) {
if (!info.contentRootOf.containsKey(contentRoot)) {
info.contentRootOf.put(contentRoot, module);
}
List<String> patterns = contentEntry.getExcludePatterns();
if (!patterns.isEmpty()) {
FileTypeAssocTable<Boolean> table = new FileTypeAssocTable<>();
for (String pattern : patterns) {
table.addAssociation(FileNameMatcherFactory.getInstance().createMatcher(pattern), Boolean.TRUE);
}
info.excludeFromContentRootTables.put(contentRoot, table);
}
}
// Init module sources
for (final SourceFolder sourceFolder : contentEntry.getSourceFolders()) {
VirtualFile sourceFolderRoot = myRootSupplier.getSourceRoot(sourceFolder);
if (sourceFolderRoot != null && ensureValid(sourceFolderRoot, sourceFolder)) {
info.sourceFolders.put(sourceFolderRoot, sourceFolder);
info.classAndSourceRoots.add(sourceFolderRoot);
info.sourceRootOf.putValue(sourceFolderRoot, module);
info.packagePrefix.put(sourceFolderRoot, sourceFolder.getPackagePrefix());
}
}
}
for (OrderEntry orderEntry : moduleRootManager.getOrderEntries()) {
if (orderEntry instanceof LibraryOrSdkOrderEntry) {
LibraryOrSdkOrderEntry entry = (LibraryOrSdkOrderEntry)orderEntry;
VirtualFile[] sourceRoots = myRootSupplier.getLibraryRoots(entry, OrderRootType.SOURCES);
VirtualFile[] classRoots = myRootSupplier.getLibraryRoots(entry, OrderRootType.CLASSES);
fillIndexWithLibraryRoots(info, entry, sourceRoots, classRoots);
if (orderEntry instanceof LibraryOrderEntry) {
Library library = ((LibraryOrderEntry)orderEntry).getLibrary();
if (library != null) {
for (VirtualFile root : myRootSupplier.getExcludedRoots((LibraryEx) library)) {
if (!ensureValid(root, library)) continue;
info.excludedFromLibraries.putValue(root, library);
}
for (VirtualFile root : sourceRoots) {
if (!ensureValid(root, library)) continue;
info.sourceOfLibraries.putValue(root, library);
}
for (VirtualFile root : classRoots) {
if (!ensureValid(root, library)) continue;
info.classOfLibraries.putValue(root, library);
}
}
}
else {
includeProjectJdk = false;
}
}
}
}
phase.reportPhaseFinished();
if (includeProjectJdk) {
phase = activity.reportSdkPhaseStarted();
Sdk sdk = ProjectRootManager.getInstance(project).getProjectSdk();
if (sdk != null) {
fillIndexWithLibraryRoots(info, sdk, myRootSupplier.getSdkRoots(sdk, OrderRootType.SOURCES),
myRootSupplier.getSdkRoots(sdk, OrderRootType.CLASSES));
}
phase.reportPhaseFinished();
}
phase = activity.reportAdditionalLibrariesPhaseStarted();
for (AdditionalLibraryRootsProvider provider : AdditionalLibraryRootsProvider.EP_NAME.getExtensionList()) {
Collection<SyntheticLibrary> libraries = provider.getAdditionalProjectLibraries(project);
for (SyntheticLibrary library : libraries) {
for (VirtualFile sourceRoot : library.getSourceRoots()) {
sourceRoot = myRootSupplier.correctRoot(sourceRoot, library, provider);
if (sourceRoot == null) continue;
info.libraryOrSdkSources.add(sourceRoot);
info.classAndSourceRoots.add(sourceRoot);
if (library instanceof JavaSyntheticLibrary) {
info.packagePrefix.put(sourceRoot, "");
}
info.sourceOfLibraries.putValue(sourceRoot, library);
}
for (VirtualFile classRoot : library.getBinaryRoots()) {
classRoot = myRootSupplier.correctRoot(classRoot, library, provider);
if (classRoot == null) continue;
info.libraryOrSdkClasses.add(classRoot);
info.classAndSourceRoots.add(classRoot);
if (library instanceof JavaSyntheticLibrary) {
info.packagePrefix.put(classRoot, "");
}
info.classOfLibraries.putValue(classRoot, library);
}
for (VirtualFile file : library.getExcludedRoots()) {
file = myRootSupplier.correctRoot(file, library, provider);
if (file == null) continue;
info.excludedFromLibraries.putValue(file, library);
}
}
}
phase.reportPhaseFinished();
phase = activity.reportExclusionPolicyPhaseStarted();
for (DirectoryIndexExcludePolicy policy : DirectoryIndexExcludePolicy.EP_NAME.getExtensions(project)) {
List<VirtualFile> files = ContainerUtil.mapNotNull(policy.getExcludeUrlsForProject(), myRootSupplier::findFileByUrl);
info.excludedFromProject.addAll(ContainerUtil.filter(files, file -> RootFileSupplier.ensureValid(file, project, policy)));
Function<Sdk, List<VirtualFile>> fun = policy.getExcludeSdkRootsStrategy();
if (fun != null) {
Set<Sdk> sdks = new HashSet<>();
for (Module m : ModuleManager.getInstance(myProject).getModules()) {
Sdk sdk = ModuleRootManager.getInstance(m).getSdk();
if (sdk != null) {
sdks.add(sdk);
}
}
Set<VirtualFile> roots = new HashSet<>();
for (Sdk sdk: sdks) {
roots.addAll(Arrays.asList(sdk.getRootProvider().getFiles(OrderRootType.CLASSES)));
}
for (Sdk sdk: sdks) {
for (VirtualFile file : fun.fun(sdk)) {
if (!roots.contains(file)) {
ContainerUtil.addIfNotNull(info.excludedFromSdkRoots, myRootSupplier.correctRoot(file, sdk, policy));
}
}
}
}
}
for (UnloadedModuleDescription description : moduleManager.getUnloadedModuleDescriptions()) {
for (VirtualFile contentRoot : myRootSupplier.getUnloadedContentRoots(description)) {
if (ensureValid(contentRoot, description)) {
info.contentRootOfUnloaded.put(contentRoot, description.getName());
}
}
}
phase.reportPhaseFinished();
return info;
}
private static void fillIndexWithLibraryRoots(RootInfo info, Object container, VirtualFile[] sourceRoots, VirtualFile[] classRoots) {
// Init library sources
for (final VirtualFile sourceRoot : sourceRoots) {
if (!ensureValid(sourceRoot, container)) continue;
info.classAndSourceRoots.add(sourceRoot);
info.libraryOrSdkSources.add(sourceRoot);
info.packagePrefix.put(sourceRoot, "");
}
// init library classes
for (final VirtualFile classRoot : classRoots) {
if (!ensureValid(classRoot, container)) continue;
info.classAndSourceRoots.add(classRoot);
info.libraryOrSdkClasses.add(classRoot);
info.packagePrefix.put(classRoot, "");
}
}
private static boolean ensureValid(@NotNull VirtualFile file, @NotNull Object container) {
return RootFileSupplier.ensureValid(file, container, null);
}
@NotNull
private synchronized OrderEntryGraph getOrderEntryGraph() {
if (myOrderEntryGraph == null) {
DirectoryIndexAnalyticsReporter.ActivityReporter activityReporter =
logActivityStarted(DirectoryIndexAnalyticsReporter.BuildPart.ORDER_ENTRY_GRAPH);
try {
RootInfo rootInfo = buildRootInfo(myProject, activityReporter);
DirectoryIndexAnalyticsReporter.PhaseReporter phase = activityReporter.reportFinalizingPhaseStarted();
Couple<MultiMap<VirtualFile, OrderEntry>> pair = initLibraryClassSourceRoots();
myOrderEntryGraph = new OrderEntryGraph(myProject, rootInfo, pair.first, pair.second);
phase.reportPhaseFinished();
}
finally {
activityReporter.reportFinished();
}
}
return myOrderEntryGraph;
}
/**
* A reverse dependency graph of (library, jdk, module, module source) -> (module).
* <p>
* <p>Each edge carries with it the associated OrderEntry that caused the dependency.
*/
private static class OrderEntryGraph {
private static class Edge {
private final Module myKey;
private final ModuleOrderEntry myOrderEntry; // Order entry from myKey -> the node containing the edge
private final boolean myRecursive; // Whether this edge should be descended into during graph walk
Edge(@NotNull Module key, @NotNull ModuleOrderEntry orderEntry, boolean recursive) {
myKey = key;
myOrderEntry = orderEntry;
myRecursive = recursive;
}
@Override
public String toString() {
return myOrderEntry.toString();
}
}
private static final class Node {
private final Module myKey;
private final List<Edge> myEdges = new ArrayList<>();
private Set<String> myUnloadedDependentModules;
private Node(@NotNull Module key) {
myKey = key;
}
@Override
public String toString() {
return myKey.toString();
}
}
private static class Graph {
private final Map<Module, Node> myNodes;
Graph(int moduleCount) {
myNodes = new HashMap<>(moduleCount);
}
}
private final Project myProject;
private final RootInfo myRootInfo;
private final Set<VirtualFile> myAllRoots;
private final Graph myGraph;
private final MultiMap<VirtualFile, Node> myRoots; // Map of roots to their root nodes, eg. library jar -> library node
private final SynchronizedSLRUCache<VirtualFile, List<OrderEntry>> myCache;
private final SynchronizedSLRUCache<Module, Set<String>> myDependentUnloadedModulesCache;
private final MultiMap<VirtualFile, OrderEntry> myLibClassRootEntries;
private final MultiMap<VirtualFile, OrderEntry> myLibSourceRootEntries;
OrderEntryGraph(@NotNull Project project, @NotNull RootInfo rootInfo,
MultiMap<VirtualFile, OrderEntry> libClassRootEntries, MultiMap<VirtualFile, OrderEntry> libSourceRootEntries) {
myProject = project;
myRootInfo = rootInfo;
myAllRoots = myRootInfo.getAllRoots();
int cacheSize = Math.max(25, myAllRoots.size() / 100 * 2);
myCache = new SynchronizedSLRUCache<>(cacheSize, cacheSize) {
@NotNull
@Override
public List<OrderEntry> createValue(@NotNull VirtualFile key) {
return collectOrderEntries(key);
}
};
int dependentUnloadedModulesCacheSize = ModuleManager.getInstance(project).getModules().length / 2;
myDependentUnloadedModulesCache =
new SynchronizedSLRUCache<>(dependentUnloadedModulesCacheSize, dependentUnloadedModulesCacheSize) {
@NotNull
@Override
public Set<String> createValue(@NotNull Module key) {
return collectDependentUnloadedModules(key);
}
};
Pair<Graph, MultiMap<VirtualFile, Node>> pair = initGraphRoots();
myGraph = pair.getFirst();
myRoots = pair.getSecond();
myLibClassRootEntries = libClassRootEntries;
myLibSourceRootEntries = libSourceRootEntries;
}
@NotNull
private Pair<Graph, MultiMap<VirtualFile, Node>> initGraphRoots() {
ModuleManager moduleManager = ModuleManager.getInstance(myProject);
Module[] modules = moduleManager.getModules();
Graph graph = new Graph(modules.length);
MultiMap<VirtualFile, Node> roots = new MultiMap<>();
for (final Module module : modules) {
final ModuleRootManager moduleRootManager = ModuleRootManager.getInstance(module);
List<OrderEnumerationHandler> handlers = OrderEnumeratorBase.getCustomHandlers(module);
for (OrderEntry orderEntry : moduleRootManager.getOrderEntries()) {
if (orderEntry instanceof ModuleOrderEntry) {
ModuleOrderEntry moduleOrderEntry = (ModuleOrderEntry)orderEntry;
final Module depModule = moduleOrderEntry.getModule();
if (depModule != null) {
Node node = graph.myNodes.get(depModule);
OrderEnumerator en = OrderEnumerator.orderEntries(depModule).exportedOnly();
if (node == null) {
node = new Node(depModule);
graph.myNodes.put(depModule, node);
VirtualFile[] importedClassRoots = en.classes().usingCache().getRoots();
for (VirtualFile importedClassRoot : importedClassRoots) {
roots.putValue(importedClassRoot, node);
}
VirtualFile[] importedSourceRoots = en.sources().usingCache().getRoots();
for (VirtualFile sourceRoot : importedSourceRoots) {
roots.putValue(sourceRoot, node);
}
}
boolean shouldRecurse = en.recursively().shouldRecurse(moduleOrderEntry, handlers);
node.myEdges.add(new Edge(module, moduleOrderEntry, shouldRecurse));
}
}
}
}
for (UnloadedModuleDescription description : moduleManager.getUnloadedModuleDescriptions()) {
for (String depName : description.getDependencyModuleNames()) {
Module depModule = moduleManager.findModuleByName(depName);
if (depModule != null) {
Node node = graph.myNodes.get(depModule);
if (node == null) {
node = new Node(depModule);
graph.myNodes.put(depModule, node);
}
if (node.myUnloadedDependentModules == null) {
node.myUnloadedDependentModules = new LinkedHashSet<>();
}
node.myUnloadedDependentModules.add(description.getName());
}
}
}
return Pair.create(graph, roots);
}
@NotNull
private List<OrderEntry> getOrderEntries(@NotNull VirtualFile file) {
return myCache.get(file);
}
/**
* Traverses the graph from the given file, collecting all encountered order entries.
*/
@NotNull
private List<OrderEntry> collectOrderEntries(@NotNull VirtualFile file) {
List<VirtualFile> roots = getHierarchy(file, myAllRoots, myRootInfo);
if (roots == null) {
return Collections.emptyList();
}
Stack<Node> stack = new Stack<>(roots.size());
for (VirtualFile root : roots) {
Collection<Node> nodes = myRoots.get(root);
for (Node node : nodes) {
stack.push(node);
}
}
Set<Node> seen = new HashSet<>(stack.size());
List<OrderEntry> result = new ArrayList<>(stack.size());
while (!stack.isEmpty()) {
Node node = stack.pop();
if (!seen.add(node)) {
continue;
}
for (Edge edge : node.myEdges) {
result.add(edge.myOrderEntry);
if (edge.myRecursive) {
Node targetNode = myGraph.myNodes.get(edge.myKey);
if (targetNode != null) {
stack.push(targetNode);
}
}
}
}
Pair<VirtualFile, List<Condition<? super VirtualFile>>> libraryClassRootInfo = myRootInfo.findLibraryRootInfo(roots, false);
Pair<VirtualFile, List<Condition<? super VirtualFile>>> librarySourceRootInfo = myRootInfo.findLibraryRootInfo(roots, true);
result.addAll(myRootInfo.getLibraryOrderEntries(roots,
Pair.getFirst(libraryClassRootInfo),
Pair.getFirst(librarySourceRootInfo),
myLibClassRootEntries, myLibSourceRootEntries));
VirtualFile moduleContentRoot = myRootInfo.findNearestContentRoot(roots);
if (moduleContentRoot != null) {
ContainerUtil.addIfNotNull(result, myRootInfo.getModuleSourceEntry(roots, moduleContentRoot, myLibClassRootEntries));
}
result.sort(BY_OWNER_MODULE);
return ContainerUtil.immutableList(result);
}
@NotNull
Set<String> getDependentUnloadedModules(@NotNull Module module) {
return myDependentUnloadedModulesCache.get(module);
}
/**
* @return names of unloaded modules which directly or transitively via exported dependencies depend on the specified module
*/
@NotNull
private Set<String> collectDependentUnloadedModules(@NotNull Module module) {
Node start = myGraph.myNodes.get(module);
if (start == null) return Collections.emptySet();
Deque<Node> stack = new ArrayDeque<>();
stack.push(start);
Set<Node> seen = new HashSet<>();
Set<String> result = null;
while (!stack.isEmpty()) {
Node node = stack.pop();
if (!seen.add(node)) {
continue;
}
if (node.myUnloadedDependentModules != null) {
if (result == null) {
result = new LinkedHashSet<>(node.myUnloadedDependentModules);
}
else {
result.addAll(node.myUnloadedDependentModules);
}
}
for (Edge edge : node.myEdges) {
if (edge.myRecursive) {
Node targetNode = myGraph.myNodes.get(edge.myKey);
if (targetNode != null) {
stack.push(targetNode);
}
}
}
}
return result != null ? result : Collections.emptySet();
}
}
@NotNull
private Couple<MultiMap<VirtualFile, OrderEntry>> initLibraryClassSourceRoots() {
MultiMap<VirtualFile, OrderEntry> libClassRootEntries = new MultiMap<>();
MultiMap<VirtualFile, OrderEntry> libSourceRootEntries = new MultiMap<>();
for (final Module module : ModuleManager.getInstance(myProject).getModules()) {
final ModuleRootManager moduleRootManager = ModuleRootManager.getInstance(module);
for (OrderEntry orderEntry : moduleRootManager.getOrderEntries()) {
if (orderEntry instanceof LibraryOrSdkOrderEntry) {
final LibraryOrSdkOrderEntry entry = (LibraryOrSdkOrderEntry)orderEntry;
for (final VirtualFile sourceRoot : myRootSupplier.getLibraryRoots(entry, OrderRootType.SOURCES)) {
libSourceRootEntries.putValue(sourceRoot, orderEntry);
}
for (final VirtualFile classRoot : myRootSupplier.getLibraryRoots(entry, OrderRootType.CLASSES)) {
libClassRootEntries.putValue(classRoot, orderEntry);
}
}
}
}
return Couple.of(libClassRootEntries, libSourceRootEntries);
}
@NotNull
DirectoryInfo getInfoForFile(@NotNull VirtualFile file) {
if (!file.isValid()) {
return NonProjectDirectoryInfo.INVALID;
}
if (!file.isDirectory()) {
DirectoryInfo info = getOwnFileInfo(file);
if (info != null) return info;
file = file.getParent();
}
if (file instanceof VirtualFileWithId) {
for (VirtualFile each = file; each != null; each = each.getParent()) {
int id = ((VirtualFileWithId)each).getId();
if (!myNonInterestingIds.get(id)) {
DirectoryInfo info = handleInterestingId(id, each);
if (info != null) return info;
}
}
}
else {
for (VirtualFile each = file; each != null; each = each.getParent()) {
DirectoryInfo info = getOwnInfo(each);
if (info != null) return info;
}
}
return NonProjectDirectoryInfo.NOT_UNDER_PROJECT_ROOTS;
}
@Nullable
private DirectoryInfo getOwnFileInfo(@NotNull VirtualFile file) {
if (myHasNonDirectoryRoots) {
return file instanceof VirtualFileWithId
? getOwnInfo(((VirtualFileWithId)file).getId(), file)
: getOwnInfo(file);
}
return ourFileTypes.isFileIgnored(file) ? NonProjectDirectoryInfo.IGNORED : null;
}
@Nullable
private DirectoryInfo getOwnInfo(int id, VirtualFile file) {
return myNonInterestingIds.get(id) ? null : handleInterestingId(id, file);
}
@Nullable
private DirectoryInfo getOwnInfo(@NotNull VirtualFile file) {
DirectoryInfo info = myRootInfos.get(file);
if (info != null) {
return info;
}
if (ourFileTypes.isFileIgnored(file)) {
return NonProjectDirectoryInfo.IGNORED;
}
return null;
}
@Nullable
private DirectoryInfo handleInterestingId(int id, @NotNull VirtualFile file) {
DirectoryInfo info = myRootInfos.get(file);
if (info == null && ourFileTypes.isFileIgnored(file)) {
info = NonProjectDirectoryInfo.IGNORED;
}
if (info == null) {
if ((id > 500_000_000 || id < 0) && LOG.isDebugEnabled()) {
LOG.error("Invalid id: " + id + " for " + file + " of " + file.getClass());
}
myNonInterestingIds.set(id);
}
return info;
}
@NotNull
Query<VirtualFile> getDirectoriesByPackageName(@NotNull final String packageName, final boolean includeLibrarySources) {
// Note that this method is used in upsource as well, hence, don't reduce this method's visibility.
List<VirtualFile> result = myPackageDirectoryCache.getDirectoriesByPackageName(packageName);
if (!includeLibrarySources) {
result = ContainerUtil.filter(result, file -> {
DirectoryInfo info = getInfoForFile(file);
return info.isInProject(file) && (!info.isInLibrarySource(file) || info.isInModuleSource(file) || info.hasLibraryClassRoot());
});
}
return new CollectionQuery<>(result);
}
@Nullable
String getPackageName(@NotNull final VirtualFile dir) {
if (dir.isDirectory()) {
if (ourFileTypes.isFileIgnored(dir)) {
return null;
}
if (myPackagePrefixByRoot.containsKey(dir)) {
return myPackagePrefixByRoot.get(dir);
}
final VirtualFile parent = dir.getParent();
if (parent != null) {
return getPackageNameForSubdir(getPackageName(parent), dir.getName());
}
}
return null;
}
private static String getPackageNameForSubdir(@Nullable String parentPackageName, @NotNull String subdirName) {
if (parentPackageName == null) return null;
return parentPackageName.isEmpty() ? subdirName : parentPackageName + "." + subdirName;
}
/**
* @return list of all super-directories which are marked as some kind of root, or {@code null} if {@code deepDir} is under the ignored folder (with no nested roots)
*/
@Nullable("returns null only if dir is under ignored folder")
private static List<VirtualFile> getHierarchy(@NotNull VirtualFile deepDir, @NotNull Set<? extends VirtualFile> allRoots, @NotNull RootInfo info) {
List<VirtualFile> hierarchy = new ArrayList<>();
boolean hasContentRoots = false;
for (VirtualFile dir = deepDir; dir != null; dir = dir.getParent()) {
hasContentRoots |= info.contentRootOf.get(dir) != null;
if (!hasContentRoots && ourFileTypes.isFileIgnored(dir)) {
return null;
}
if (allRoots.contains(dir)) {
hierarchy.add(dir);
}
}
return hierarchy;
}
private static class RootInfo {
// getDirectoriesByPackageName used to be in this order, some clients might rely on that
@NotNull private final Set<VirtualFile> classAndSourceRoots = new LinkedHashSet<>();
@NotNull private final Set<VirtualFile> libraryOrSdkSources = new HashSet<>();
@NotNull private final Set<VirtualFile> libraryOrSdkClasses = new HashSet<>();
@NotNull private final Map<VirtualFile, Module> contentRootOf = new HashMap<>();
@NotNull private final Map<VirtualFile, String> contentRootOfUnloaded = new HashMap<>();
@NotNull private final MultiMap<VirtualFile, Module> sourceRootOf = MultiMap.createSet();
@NotNull private final Map<VirtualFile, SourceFolder> sourceFolders = new HashMap<>();
@NotNull private final MultiMap<VirtualFile, /*Library|SyntheticLibrary*/ Object> excludedFromLibraries = MultiMap.createSet();
@NotNull private final MultiMap<VirtualFile, /*Library|SyntheticLibrary*/ Object> classOfLibraries = MultiMap.createSet();
@NotNull private final MultiMap<VirtualFile, /*Library|SyntheticLibrary*/ Object> sourceOfLibraries = MultiMap.createSet();
@NotNull private final Set<VirtualFile> excludedFromProject = new HashSet<>();
@NotNull private final Set<VirtualFile> excludedFromSdkRoots = new HashSet<>();
@NotNull private final Map<VirtualFile, Module> excludedFromModule = new HashMap<>();
@NotNull private final Map<VirtualFile, FileTypeAssocTable<Boolean>> excludeFromContentRootTables = new HashMap<>();
@NotNull private final Map<VirtualFile, String> packagePrefix = new HashMap<>();
@NotNull
Set<VirtualFile> getAllRoots() {
Set<VirtualFile> result = new LinkedHashSet<>();
result.addAll(classAndSourceRoots);
result.addAll(contentRootOf.keySet());
result.addAll(contentRootOfUnloaded.keySet());
result.addAll(excludedFromLibraries.keySet());
result.addAll(excludedFromModule.keySet());
result.addAll(excludedFromProject);
result.addAll(excludedFromSdkRoots);
return result;
}
/**
* Returns nearest content root for a file by its parent directories hierarchy. If the file is excluded (i.e. located under an excluded
* root and there are no source roots on the path to the excluded root) returns {@code null}.
*/
@Nullable
private VirtualFile findNearestContentRoot(@NotNull List<? extends VirtualFile> hierarchy) {
Collection<Module> sourceRootOwners = null;
boolean underExcludedSourceRoot = false;
for (VirtualFile root : hierarchy) {
Module module = contentRootOf.get(root);
Module excludedFrom = excludedFromModule.get(root);
if (module != null) {
FileTypeAssocTable<Boolean> table = excludeFromContentRootTables.get(root);
if (table != null && isExcludedByPattern(root, hierarchy, table)) {
excludedFrom = module;
}
}
if (module != null && (excludedFrom != module || underExcludedSourceRoot && sourceRootOwners.contains(module))) {
return root;
}
if (excludedFrom != null || excludedFromProject.contains(root) || contentRootOfUnloaded.containsKey(root)) {
if (sourceRootOwners == null) {
return null;
}
underExcludedSourceRoot = true;
}
if (!underExcludedSourceRoot && sourceRootOf.containsKey(root)) {
Collection<Module> modulesForSourceRoot = sourceRootOf.get(root);
if (!modulesForSourceRoot.isEmpty()) {
sourceRootOwners = sourceRootOwners == null ? modulesForSourceRoot : ContainerUtil.union(sourceRootOwners, modulesForSourceRoot);
}
}
}
return null;
}
private static boolean isExcludedByPattern(@NotNull VirtualFile contentRoot,
@NotNull List<? extends VirtualFile> hierarchy,
@NotNull FileTypeAssocTable<Boolean> table) {
for (VirtualFile file : hierarchy) {
if (table.findAssociatedFileType(file.getNameSequence()) != null) {
return true;
}
if (file.equals(contentRoot)) {
break;
}
}
return false;
}
@Nullable
private VirtualFile findNearestContentRootForExcluded(@NotNull List<? extends VirtualFile> hierarchy) {
for (VirtualFile root : hierarchy) {
if (contentRootOf.containsKey(root) || contentRootOfUnloaded.containsKey(root)) {
return root;
}
}
return null;
}
/**
* @return root and set of libraries that provided it
*/
@Nullable
private Pair<VirtualFile, List<Condition<? super VirtualFile>>> findLibraryRootInfo(@NotNull List<? extends VirtualFile> hierarchy,
boolean source) {
Set</*Library|SyntheticLibrary*/ Object> librariesToIgnore = createLibrarySet();
for (VirtualFile root : hierarchy) {
librariesToIgnore.addAll(excludedFromLibraries.get(root));
if (source && libraryOrSdkSources.contains(root)) {
List<Condition<? super VirtualFile>> found = findInLibraryProducers(root, sourceOfLibraries, librariesToIgnore);
if (found != null) return Pair.create(root, found);
}
else if (!source && libraryOrSdkClasses.contains(root)) {
List<Condition<? super VirtualFile>> found = findInLibraryProducers(root, classOfLibraries, librariesToIgnore);
if (found != null) return Pair.create(root, found);
}
}
return null;
}
@NotNull
private static Set</*Library|SyntheticLibrary*/ Object> createLibrarySet() {
return CollectionFactory.createCustomHashingStrategySet(new HashingStrategy<>() {
@Override
public int hashCode(Object object) {
// reduce complexity of hashCode calculation to speed it up
return Objects.hashCode(object instanceof Library ? ((Library)object).getName() : object);
}
@Override
public boolean equals(Object o1, Object o2) {
return Objects.equals(o1, o2);
}
});
}
private static List<Condition<? super VirtualFile>> findInLibraryProducers(@NotNull VirtualFile root,
@NotNull MultiMap<VirtualFile, Object> libraryRoots,
@NotNull Set<Object> librariesToIgnore) {
if (!libraryRoots.containsKey(root)) {
return Collections.emptyList();
}
Collection<Object> producers = libraryRoots.get(root);
Set</*Library|SyntheticLibrary*/ Object> libraries = new HashSet<>(producers.size());
List<Condition<? super VirtualFile>> exclusions = new SmartList<>();
for (Object library : producers) {
if (librariesToIgnore.contains(library)) continue;
if (library instanceof SyntheticLibrary) {
Condition<VirtualFile> exclusion = ((SyntheticLibrary)library).getExcludeFileCondition();
if (exclusion != null) {
exclusions.add(exclusion);
if (exclusion.value(root)) {
continue;
}
}
}
libraries.add(library);
}
if (!libraries.isEmpty()) {
return exclusions;
}
return null;
}
private String calcPackagePrefix(@NotNull VirtualFile root, VirtualFile packageRoot) {
String prefix = packagePrefix.get(packageRoot);
if (prefix != null && !root.equals(packageRoot)) {
assert packageRoot != null;
String relative = VfsUtilCore.getRelativePath(root, packageRoot, '.');
prefix = StringUtil.isEmpty(prefix) ? relative : prefix + '.' + relative;
}
return prefix;
}
@Nullable
private VirtualFile findPackageRootInfo(@NotNull List<? extends VirtualFile> hierarchy,
VirtualFile moduleContentRoot,
VirtualFile libraryClassRoot,
VirtualFile librarySourceRoot) {
for (VirtualFile root : hierarchy) {
if (moduleContentRoot != null &&
sourceRootOf.get(root).contains(contentRootOf.get(moduleContentRoot)) &&
librarySourceRoot == null) {
return root;
}
if (root.equals(libraryClassRoot) || root.equals(librarySourceRoot)) {
return root;
}
if (root.equals(moduleContentRoot) && !sourceRootOf.containsKey(root) && librarySourceRoot == null && libraryClassRoot == null) {
return null;
}
}
return null;
}
@NotNull
private Set<OrderEntry> getLibraryOrderEntries(@NotNull List<? extends VirtualFile> hierarchy,
@Nullable VirtualFile libraryClassRoot,
@Nullable VirtualFile librarySourceRoot,
@NotNull MultiMap<VirtualFile, OrderEntry> libClassRootEntries,
@NotNull MultiMap<VirtualFile, OrderEntry> libSourceRootEntries) {
Set<OrderEntry> orderEntries = new LinkedHashSet<>();
for (VirtualFile root : hierarchy) {
if (root.equals(libraryClassRoot) && !sourceRootOf.containsKey(root)) {
orderEntries.addAll(libClassRootEntries.get(root));
}
if (root.equals(librarySourceRoot) && libraryClassRoot == null) {
orderEntries.addAll(libSourceRootEntries.get(root));
}
if (libClassRootEntries.containsKey(root) || sourceRootOf.containsKey(root) && librarySourceRoot == null) {
break;
}
}
return orderEntries;
}
@Nullable
private ModuleSourceOrderEntry getModuleSourceEntry(@NotNull List<? extends VirtualFile> hierarchy,
@NotNull VirtualFile moduleContentRoot,
@NotNull MultiMap<VirtualFile, OrderEntry> libClassRootEntries) {
Module module = contentRootOf.get(moduleContentRoot);
for (VirtualFile root : hierarchy) {
if (sourceRootOf.get(root).contains(module)) {
return ContainerUtil.findInstance(ModuleRootManager.getInstance(module).getOrderEntries(), ModuleSourceOrderEntry.class);
}
if (libClassRootEntries.containsKey(root)) {
return null;
}
}
return null;
}
}
@NotNull
private static Pair<DirectoryInfo, String> calcDirectoryInfoAndPackagePrefix(@NotNull final VirtualFile root,
@NotNull final List<? extends VirtualFile> hierarchy,
@NotNull RootInfo info) {
VirtualFile moduleContentRoot = info.findNearestContentRoot(hierarchy);
Pair<VirtualFile, List<Condition<? super VirtualFile>>> librarySourceRootInfo = info.findLibraryRootInfo(hierarchy, true);
VirtualFile librarySourceRoot = Pair.getFirst(librarySourceRootInfo);
Pair<VirtualFile, List<Condition<? super VirtualFile>>> libraryClassRootInfo = info.findLibraryRootInfo(hierarchy, false);
VirtualFile libraryClassRoot = Pair.getFirst(libraryClassRootInfo);
boolean inProject = moduleContentRoot != null ||
(libraryClassRoot != null || librarySourceRoot != null) && !info.excludedFromSdkRoots.contains(root);
VirtualFile nearestContentRoot;
if (inProject) {
nearestContentRoot = moduleContentRoot;
}
else {
nearestContentRoot = info.findNearestContentRootForExcluded(hierarchy);
if (nearestContentRoot == null) {
return new Pair<>(NonProjectDirectoryInfo.EXCLUDED, null);
}
}
VirtualFile sourceRoot = info.findPackageRootInfo(hierarchy, moduleContentRoot, null, librarySourceRoot);
VirtualFile moduleSourceRoot = librarySourceRoot == null ? sourceRoot :
info.findPackageRootInfo(hierarchy, moduleContentRoot, null, null);
boolean inModuleSources = moduleSourceRoot != null;
boolean inLibrarySource = librarySourceRoot != null;
SourceFolder sourceFolder = moduleSourceRoot != null ? info.sourceFolders.get(moduleSourceRoot) : null;
Module module = info.contentRootOf.get(nearestContentRoot);
String unloadedModuleName = info.contentRootOfUnloaded.get(nearestContentRoot);
FileTypeAssocTable<Boolean> contentExcludePatterns =
moduleContentRoot != null ? info.excludeFromContentRootTables.get(moduleContentRoot) : null;
Condition<? super VirtualFile> libraryExclusionPredicate = getLibraryExclusionPredicate(Pair.getSecond(librarySourceRootInfo));
DirectoryInfo directoryInfo = contentExcludePatterns != null || libraryExclusionPredicate != null
? new DirectoryInfoWithExcludePatterns(root, module, nearestContentRoot, sourceRoot, sourceFolder,
libraryClassRoot, inModuleSources, inLibrarySource, !inProject,
contentExcludePatterns, libraryExclusionPredicate, unloadedModuleName)
: new DirectoryInfoImpl(root, module, nearestContentRoot, sourceRoot, sourceFolder,
libraryClassRoot, inModuleSources, inLibrarySource,
!inProject, unloadedModuleName);
VirtualFile packageRoot = libraryClassRoot == null ? sourceRoot :
info.findPackageRootInfo(hierarchy, moduleContentRoot, libraryClassRoot, librarySourceRoot);
String packagePrefix = info.calcPackagePrefix(root, packageRoot);
return Pair.create(directoryInfo, packagePrefix);
}
@Nullable
private static Condition<? super VirtualFile> getLibraryExclusionPredicate(@Nullable List<? extends Condition<? super VirtualFile>> exclusions) {
if (exclusions != null) {
Condition<VirtualFile> result = Conditions.alwaysFalse();
for (Condition<? super VirtualFile> exclusion : exclusions) {
result = Conditions.or(result, exclusion);
}
return result == Conditions.<VirtualFile>alwaysFalse() ? null : result;
}
return null;
}
@NotNull
List<OrderEntry> getOrderEntries(@NotNull DirectoryInfo info) {
if (!(info instanceof DirectoryInfoImpl)) return Collections.emptyList();
return getOrderEntryGraph().getOrderEntries(((DirectoryInfoImpl)info).getRoot());
}
@NotNull
Set<String> getDependentUnloadedModules(@NotNull Module module) {
return getOrderEntryGraph().getDependentUnloadedModules(module);
}
/**
* An LRU cache with synchronization around the primary cache operations (get() and insertion
* of a newly created value). Other map operations are not synchronized.
*/
abstract static class SynchronizedSLRUCache<K, V> extends SLRUMap<K, V> {
private final Object myLock = ObjectUtils.sentinel("Root index lock");
SynchronizedSLRUCache(final int protectedQueueSize, final int probationalQueueSize) {
super(protectedQueueSize, probationalQueueSize);
}
@NotNull
public abstract V createValue(@NotNull K key);
@Override
@NotNull
public V get(K key) {
V value;
synchronized (myLock) {
value = super.get(key);
if (value != null) {
return value;
}
}
value = createValue(key);
synchronized (myLock) {
put(key, value);
}
return value;
}
}
}
| {
"content_hash": "eec781e211784dd514a0aab1537702e8",
"timestamp": "",
"source": "github",
"line_count": 1139,
"max_line_length": 167,
"avg_line_length": 42.81035996488148,
"alnum_prop": 0.6631939459814196,
"repo_name": "dahlstrom-g/intellij-community",
"id": "6ca13ee5f95e1add9af47cf6fd5d642e1b93e6f8",
"size": "48761",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "platform/projectModel-impl/src/com/intellij/openapi/roots/impl/RootIndex.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<?php
namespace GDAPI;
class Collection extends Resource implements \ArrayAccess, \Iterator, \Countable
{
private $data = array();
private $pos = 0;
public function __construct($clientId, $body=false)
{
if ( $body && $body->data )
{
$this->data = $body->data;
unset($body->data);
}
parent::__construct($clientId,$body);
}
protected function schemaField($name)
{
$type_name = $this->getType();
$type = $this->getClient()->{$type_name};
if ( !$type )
{
return null;
}
$field = $type->collectionField($name);
return $field;
}
/* ArrayAccess */
public function offsetExists($offset)
{
return isset($this->data[$offset]);
}
public function offsetGet($offset)
{
if ( isset($this->data[$offset]) )
return $this->data[$offset];
return null;
}
public function offsetSet($offset, $value)
{
if ( is_null($offset) )
{
$this->data[] = $value;
}
else
{
$this->data[$offset] = $value;
}
}
public function offsetUnset($offset)
{
unset($this->data[$offset]);
}
/* End: ArrayAccess */
/* Iterator */
public function current()
{
return $this->data[$this->pos];
}
public function key()
{
return $this->pos;
}
public function next()
{
$this->pos++;
}
public function rewind()
{
$this->pos = 0;
}
public function valid()
{
return isset($this->data[$this->pos]);
}
/* End: Iterator */
/* Countable */
public function count()
{
return count($this->data);
}
/* End: Countable */
/* Operations */
public function create($obj)
{
$data = ( $obj instanceof Resource ? $obj->getMeta() : $obj );
$url = $this->getLink('self');
$client = $this->getClient();
return $client->request('POST', $url, array(), $data, Client::MIME_TYPE_JSON);
}
public function remove($id_or_obj)
{
$id = ( $id_or_obj instanceof Resource ? $id_or_obj->getId() : $id_or_obj );
$url = $this->getLink('self').'/'. urlencode($id);
$client = $this->getClient();
return $client->request('DELETE', $url);
}
/* End: Operations */
}
| {
"content_hash": "7c49371031c70739e04163986983daa3",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 82,
"avg_line_length": 18.26890756302521,
"alnum_prop": 0.5611775528978841,
"repo_name": "godaddy/gdapi-php",
"id": "da5a2bfa6698547b1190e25a61a894620cc3d6ef",
"size": "3309",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "class/Collection.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "61856"
}
],
"symlink_target": ""
} |
package merlin.ianusinc.ianusinchq.blocks;
import net.minecraft.block.Block;
import net.minecraft.block.BlockGlass;
import net.minecraft.block.BlockTorch;
import net.minecraft.block.BlockTrapDoor;
import net.minecraft.block.properties.IProperty;
import net.minecraft.block.state.BlockStateContainer;
import net.minecraft.block.state.IBlockState;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.util.BlockRenderLayer;
import net.minecraft.util.EnumBlockRenderType;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.math.AxisAlignedBB;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
/**
* Created by Seine Eiligkeit on 29.06.2017.
*/
public class IanusIncQuantumCore extends CustomBlock {
public IanusIncQuantumCore(String name, float hardness, float resistance, int harvestLevel) {
super(name, hardness, resistance);
setHarvestLevel("pickaxe", harvestLevel);
setCreativeTab(CreativeTabs.MISC);
setLightLevel(10F);
}
protected static final AxisAlignedBB TOP_AABB = new AxisAlignedBB(0.4D, 0.4D, 0.4D, 0.6D, 0.6D, 0.6D);
@Override
public boolean isOpaqueCube(IBlockState state) {
return false;
}
@Override
public boolean isFullCube(IBlockState state) {
return false;
}
public BlockRenderLayer getBlockLayer() {
return BlockRenderLayer.CUTOUT_MIPPED;
}
@Override
public AxisAlignedBB getBoundingBox(IBlockState state, IBlockAccess source, BlockPos pos) {
return TOP_AABB;
}
}
| {
"content_hash": "2c6df1c3b8382236eb3011cd2e454209",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 106,
"avg_line_length": 32.132075471698116,
"alnum_prop": 0.7592483852025836,
"repo_name": "ConfusedMerlin/IanusInc",
"id": "75b265c97dd5fcf28749cc46aa20efcb7771e921",
"size": "1703",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/merlin/ianusinc/ianusinchq/blocks/IanusIncQuantumCore.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "47614"
}
],
"symlink_target": ""
} |
"""Tests for compute service with multiple compute nodes"""
from nova import context
from nova import exception
from nova.openstack.common import cfg
from nova.openstack.common import importutils
from nova import test
from nova.virt import fake
CONF = cfg.CONF
CONF.import_opt('compute_manager', 'nova.service')
CONF.import_opt('compute_driver', 'nova.virt.driver')
class BaseTestCase(test.TestCase):
def tearDown(self):
fake.restore_nodes()
super(BaseTestCase, self).tearDown()
class FakeDriverSingleNodeTestCase(BaseTestCase):
def setUp(self):
super(FakeDriverSingleNodeTestCase, self).setUp()
self.driver = fake.FakeDriver(virtapi=None)
fake.set_nodes(['xyz'])
def test_get_host_stats(self):
stats = self.driver.get_host_stats()
self.assertTrue(isinstance(stats, dict))
self.assertEqual(stats['hypervisor_hostname'], 'xyz')
def test_get_available_resource(self):
res = self.driver.get_available_resource('xyz')
self.assertEqual(res['hypervisor_hostname'], 'xyz')
class FakeDriverMultiNodeTestCase(BaseTestCase):
def setUp(self):
super(FakeDriverMultiNodeTestCase, self).setUp()
self.driver = fake.FakeDriver(virtapi=None)
fake.set_nodes(['aaa', 'bbb'])
def test_get_host_stats(self):
stats = self.driver.get_host_stats()
self.assertTrue(isinstance(stats, list))
self.assertEqual(len(stats), 2)
self.assertEqual(stats[0]['hypervisor_hostname'], 'aaa')
self.assertEqual(stats[1]['hypervisor_hostname'], 'bbb')
def test_get_available_resource(self):
res_a = self.driver.get_available_resource('aaa')
self.assertEqual(res_a['hypervisor_hostname'], 'aaa')
res_b = self.driver.get_available_resource('bbb')
self.assertEqual(res_b['hypervisor_hostname'], 'bbb')
self.assertRaises(exception.NovaException,
self.driver.get_available_resource, 'xxx')
class MultiNodeComputeTestCase(BaseTestCase):
def setUp(self):
super(MultiNodeComputeTestCase, self).setUp()
self.flags(compute_driver='nova.virt.fake.FakeDriver')
self.compute = importutils.import_object(CONF.compute_manager)
def test_update_available_resource_add_remove_node(self):
ctx = context.get_admin_context()
fake.set_nodes(['A', 'B', 'C'])
self.compute.update_available_resource(ctx)
self.assertEqual(sorted(self.compute._resource_tracker_dict.keys()),
['A', 'B', 'C'])
fake.set_nodes(['A', 'B'])
self.compute.update_available_resource(ctx)
self.assertEqual(sorted(self.compute._resource_tracker_dict.keys()),
['A', 'B'])
fake.set_nodes(['A', 'B', 'C'])
self.compute.update_available_resource(ctx)
self.assertEqual(sorted(self.compute._resource_tracker_dict.keys()),
['A', 'B', 'C'])
| {
"content_hash": "f3fa6fec8999560d9ea08042d9ec5c25",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 76,
"avg_line_length": 35.95180722891566,
"alnum_prop": 0.6511394101876675,
"repo_name": "fajoy/nova",
"id": "afce7ae908344fbf323de839221cae3483f651ff",
"size": "3664",
"binary": false,
"copies": "1",
"ref": "refs/heads/grizzly-2",
"path": "nova/tests/compute/test_multiple_nodes.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "16002"
},
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "7567423"
},
{
"name": "Shell",
"bytes": "15428"
}
],
"symlink_target": ""
} |
package ch.awae.mcddpui.exceptions
/**
* common base class for all custom exceptions of the MCDDPUI library
*
* @author Andreas Waelchli <andreas.waelchli@me.com>
* @version 1.1 (0.2.0), 2016-03-01
* @since MCDDPUI 0.2.0
*
* @constructor creates a new exception instance
* @param msg a message. may be `null`.
* @param cause a throwable that resulted in this exception. may be `null`.
*/
class MCDDPUIException(msg: String = null, cause: Throwable = null) extends Exception(msg, cause) | {
"content_hash": "cb0c3df59a5a41c55ccb18f9cd241b6c",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 97,
"avg_line_length": 35.42857142857143,
"alnum_prop": 0.7217741935483871,
"repo_name": "ksmonkey123/mcddpui",
"id": "422a1fc1611fd8e27f950ae4b59478c110f414b6",
"size": "496",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/scala/ch/awae/mcddpui/exceptions/MCDDPUIException.scala",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "91"
},
{
"name": "Scala",
"bytes": "69171"
},
{
"name": "Shell",
"bytes": "197"
}
],
"symlink_target": ""
} |
package org.apache.drill.metastore.iceberg.operate;
import com.typesafe.config.ConfigValueFactory;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.metastore.iceberg.IcebergBaseTest;
import org.apache.drill.metastore.iceberg.config.IcebergConfigConstants;
import org.apache.drill.metastore.iceberg.exceptions.IcebergMetastoreException;
import org.apache.iceberg.Table;
import org.junit.Test;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TestExpirationHandler extends IcebergBaseTest {
@Test
public void testConfigEmpty() {
ExpirationHandler expirationHandler = new ExpirationHandler(DrillConfig.create(), baseHadoopConfig());
assertEquals(0, expirationHandler.expirationPeriod());
}
@Test
public void testConfigOneUnit() {
DrillConfig config = new DrillConfig(DrillConfig.create()
.withValue(IcebergConfigConstants.EXPIRATION_PERIOD + ".hours",
ConfigValueFactory.fromAnyRef(5)));
ExpirationHandler expirationHandler = new ExpirationHandler(config, baseHadoopConfig());
assertEquals(TimeUnit.HOURS.toMillis(5), expirationHandler.expirationPeriod());
}
@Test
public void testConfigSeveralUnits() {
DrillConfig config = new DrillConfig(DrillConfig.create()
.withValue(IcebergConfigConstants.EXPIRATION_PERIOD + ".hours",
ConfigValueFactory.fromAnyRef(5))
.withValue(IcebergConfigConstants.EXPIRATION_PERIOD + ".minutes",
ConfigValueFactory.fromAnyRef(10)));
ExpirationHandler expirationHandler = new ExpirationHandler(config, baseHadoopConfig());
assertEquals(TimeUnit.HOURS.toMillis(5) + TimeUnit.MINUTES.toMillis(10),
expirationHandler.expirationPeriod());
}
@Test
public void testConfigNegativeValue() {
DrillConfig config = new DrillConfig(DrillConfig.create()
.withValue(IcebergConfigConstants.EXPIRATION_PERIOD + ".hours",
ConfigValueFactory.fromAnyRef(-5)));
ExpirationHandler expirationHandler = new ExpirationHandler(config, baseHadoopConfig());
assertEquals(TimeUnit.HOURS.toMillis(-5), expirationHandler.expirationPeriod());
}
@Test
public void testConfigIncorrectUnit() {
DrillConfig config = new DrillConfig(DrillConfig.create()
.withValue(IcebergConfigConstants.EXPIRATION_PERIOD + ".hour",
ConfigValueFactory.fromAnyRef(5)));
thrown.expect(IcebergMetastoreException.class);
new ExpirationHandler(config, baseHadoopConfig());
}
@Test
public void testConfigIncorrectValue() {
DrillConfig config = new DrillConfig(DrillConfig.create()
.withValue(IcebergConfigConstants.EXPIRATION_PERIOD + ".hours",
ConfigValueFactory.fromAnyRef("abc")));
thrown.expect(IcebergMetastoreException.class);
new ExpirationHandler(config, baseHadoopConfig());
}
@Test
public void testExpireZeroExpirationPeriod() {
DrillConfig config = new DrillConfig(DrillConfig.create()
.withValue(IcebergConfigConstants.EXPIRATION_PERIOD + ".millis",
ConfigValueFactory.fromAnyRef(0)));
ExpirationHandler expirationHandler = new ExpirationHandler(config, baseHadoopConfig());
Table table = mock(Table.class);
assertFalse(expirationHandler.expire(table));
}
@Test
public void testExpireNegativeExpirationPeriod() {
DrillConfig config = new DrillConfig(DrillConfig.create()
.withValue(IcebergConfigConstants.EXPIRATION_PERIOD + ".millis",
ConfigValueFactory.fromAnyRef(-10)));
ExpirationHandler expirationHandler = new ExpirationHandler(config, baseHadoopConfig());
Table table = mock(Table.class);
assertFalse(expirationHandler.expire(table));
}
@Test
public void testExpireFirstTime() {
DrillConfig config = new DrillConfig(DrillConfig.create()
.withValue(IcebergConfigConstants.EXPIRATION_PERIOD + ".millis",
ConfigValueFactory.fromAnyRef(1)));
ExpirationHandler expirationHandler = new ExpirationHandler(config, baseHadoopConfig());
Table table = mock(Table.class);
when(table.location()).thenReturn("/tmp/table");
assertFalse(expirationHandler.expire(table));
}
@Test
public void testExpireBefore() {
DrillConfig config = new DrillConfig(DrillConfig.create()
.withValue(IcebergConfigConstants.EXPIRATION_PERIOD + ".days",
ConfigValueFactory.fromAnyRef(1)));
ExpirationHandler expirationHandler = new ExpirationHandler(config, baseHadoopConfig());
Table table = mock(Table.class);
when(table.location()).thenReturn("/tmp/table");
assertFalse(expirationHandler.expire(table));
assertFalse(expirationHandler.expire(table));
}
}
| {
"content_hash": "1b148559f6dbfbd1d9a7f781da83c268",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 106,
"avg_line_length": 37.62992125984252,
"alnum_prop": 0.7570621468926554,
"repo_name": "Ben-Zvi/drill",
"id": "90ef55b2b78ec75406e11a8d2f10c2b1a402b1db",
"size": "5580",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "metastore/iceberg-metastore/src/test/java/org/apache/drill/metastore/iceberg/operate/TestExpirationHandler.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "21592"
},
{
"name": "Batchfile",
"bytes": "7649"
},
{
"name": "C",
"bytes": "31425"
},
{
"name": "C++",
"bytes": "592989"
},
{
"name": "CMake",
"bytes": "24975"
},
{
"name": "CSS",
"bytes": "15036"
},
{
"name": "Dockerfile",
"bytes": "1440"
},
{
"name": "FreeMarker",
"bytes": "191783"
},
{
"name": "Java",
"bytes": "29529379"
},
{
"name": "JavaScript",
"bytes": "81650"
},
{
"name": "PLSQL",
"bytes": "2685"
},
{
"name": "Python",
"bytes": "5388"
},
{
"name": "Shell",
"bytes": "100838"
},
{
"name": "TSQL",
"bytes": "6340"
}
],
"symlink_target": ""
} |
layout: model
title: English RoBertaForSequenceClassification Cased model (from aarnphm)
author: John Snow Labs
name: roberta_classifier_finetune_emotion_distilroberta
date: 2022-09-19
tags: [en, open_source, roberta, sequence_classification, classification]
task: Text Classification
language: en
edition: Spark NLP 4.1.0
spark_version: 3.0
supported: true
annotator: RoBertaForSequenceClassification
article_header:
type: cover
use_language_switcher: "Python-Scala-Java"
---
## Description
Pretrained RoBertaForSequenceClassification model, adapted from Hugging Face and curated to provide scalability and production-readiness using Spark NLP. `finetune_emotion_distilroberta` is a English model originally trained by `aarnphm`.
## Predicted Entities
`surprise`, `love`, `joy`, `fear`, `sadness`, `anger`
{:.btn-box}
<button class="button button-orange" disabled>Live Demo</button>
<button class="button button-orange" disabled>Open in Colab</button>
[Download](https://s3.amazonaws.com/auxdata.johnsnowlabs.com/public/models/roberta_classifier_finetune_emotion_distilroberta_en_4.1.0_3.0_1663608624632.zip){:.button.button-orange.button-orange-trans.arr.button-icon}
## How to use
<div class="tabs-box" markdown="1">
{% include programmingLanguageSelectScalaPythonNLU.html %}
```python
documentAssembler = DocumentAssembler() \
.setInputCols(["text"]) \
.setOutputCols("document")
tokenizer = Tokenizer() \
.setInputCols("document") \
.setOutputCol("token")
seq_classifier = RoBertaForSequenceClassification.pretrained("roberta_classifier_finetune_emotion_distilroberta","en") \
.setInputCols(["document", "token"]) \
.setOutputCol("class")
pipeline = Pipeline(stages=[documentAssembler, tokenizer, seq_classifier])
data = spark.createDataFrame([["PUT YOUR STRING HERE"]]).toDF("text")
result = pipeline.fit(data).transform(data)
```
```scala
val documentAssembler = new DocumentAssembler()
.setInputCols(Array("text"))
.setOutputCols(Array("document"))
val tokenizer = new Tokenizer()
.setInputCols("document")
.setOutputCol("token")
val seq_classifier = RoBertaForSequenceClassification.pretrained("roberta_classifier_finetune_emotion_distilroberta","en")
.setInputCols(Array("document", "token"))
.setOutputCol("class")
val pipeline = new Pipeline().setStages(Array(documentAssembler, tokenizer, seq_classifier))
val data = Seq("PUT YOUR STRING HERE").toDS.toDF("text")
val result = pipeline.fit(data).transform(data)
```
</div>
{:.model-param}
## Model Information
{:.table-model}
|---|---|
|Model Name:|roberta_classifier_finetune_emotion_distilroberta|
|Compatibility:|Spark NLP 4.1.0+|
|License:|Open Source|
|Edition:|Official|
|Input Labels:|[document, token]|
|Output Labels:|[class]|
|Language:|en|
|Size:|309.2 MB|
|Case sensitive:|true|
|Max sentence length:|256|
## References
- https://huggingface.co/aarnphm/finetune_emotion_distilroberta | {
"content_hash": "6b87c2c295cdd6e7a638f677d9fea385",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 238,
"avg_line_length": 31.063157894736843,
"alnum_prop": 0.7475432056929854,
"repo_name": "JohnSnowLabs/spark-nlp",
"id": "cef8749dd580ac597ba27b12349024ab8c58dab9",
"size": "2955",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/_posts/murat-gunay/2022-09-19-roberta_classifier_finetune_emotion_distilroberta_en.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "14452"
},
{
"name": "Java",
"bytes": "223289"
},
{
"name": "Makefile",
"bytes": "819"
},
{
"name": "Python",
"bytes": "1694517"
},
{
"name": "Scala",
"bytes": "4116435"
},
{
"name": "Shell",
"bytes": "5286"
}
],
"symlink_target": ""
} |
Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License. | {
"content_hash": "d3f3379f5aead03fbfa2db9ad4ab3d6e",
"timestamp": "",
"source": "github",
"line_count": 201,
"max_line_length": 78,
"avg_line_length": 57.333333333333336,
"alnum_prop": 0.7214508851093371,
"repo_name": "emrodri/random",
"id": "a31e496531489c8a8b649467ee2f7e2a382cde41",
"size": "11524",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "license.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "204"
}
],
"symlink_target": ""
} |
namespace Trigger.NET
{
using System;
public interface ILoggerFactory
{
ILogger GetLogger(Type jobType, Guid jobId);
}
}
| {
"content_hash": "5c35a8a0fd16cac326573ee92da2acc1",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 52,
"avg_line_length": 17.333333333333332,
"alnum_prop": 0.6089743589743589,
"repo_name": "Novakov/trigger.net",
"id": "2809614f0685463d0d1836092a7bc6d0df2cb629",
"size": "158",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/Trigger.NET/ILoggerFactory.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "62452"
},
{
"name": "CSS",
"bytes": "4203"
},
{
"name": "F#",
"bytes": "2463"
},
{
"name": "JavaScript",
"bytes": "1295"
},
{
"name": "Shell",
"bytes": "78"
}
],
"symlink_target": ""
} |
using System;
using System.Net.NetworkInformation.Unit.Tests;
namespace Windows.Networking.Connectivity
{
public enum NetworkConnectivityLevel
{
None = 0,
LocalAccess = 1,
ConstrainedInternetAccess = 2,
InternetAccess = 3
}
public class ConnectionProfile
{
public ConnectionProfile()
{
}
public NetworkConnectivityLevel GetNetworkConnectivityLevel()
{
return FakeNetwork.NetworkConnectivityLevel;
}
}
public class NetworkInformation
{
public static ConnectionProfile GetInternetConnectionProfile()
{
if (FakeNetwork.IsConnectionProfilePresent)
{
return new ConnectionProfile();
}
else
{
return null;
}
}
}
}
| {
"content_hash": "837517a1be7f04496a7b0b5027a044a7",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 70,
"avg_line_length": 21.7,
"alnum_prop": 0.5714285714285714,
"repo_name": "alexandrnikitin/corefx",
"id": "c5a3627bc1452a48830c85bb18fb0424a04515cc",
"size": "1020",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "src/System.Net.NetworkInformation/tests/UnitTests/Fakes/FakeWinRT.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1782"
},
{
"name": "C",
"bytes": "151134"
},
{
"name": "C#",
"bytes": "100304726"
},
{
"name": "C++",
"bytes": "275301"
},
{
"name": "CMake",
"bytes": "14013"
},
{
"name": "Groovy",
"bytes": "14325"
},
{
"name": "PowerShell",
"bytes": "457"
},
{
"name": "Python",
"bytes": "1535"
},
{
"name": "Shell",
"bytes": "22240"
},
{
"name": "Smalltalk",
"bytes": "1768"
},
{
"name": "Visual Basic",
"bytes": "827770"
}
],
"symlink_target": ""
} |
// -----------------------------------------------------------------------
// <copyright file="NoteViewModel.cs" company="Nodine Legal, LLC">
// Licensed to Nodine Legal, LLC under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. Nodine Legal, LLC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
// </copyright>
// -----------------------------------------------------------------------
namespace OpenLawOffice.Web.ViewModels.Notes
{
using System;
using AutoMapper;
using OpenLawOffice.Common.Models;
using System.Collections.Generic;
[MapMe]
public class NoteViewModel : CoreViewModel
{
public Guid? Id { get; set; }
public string Title { get; set; }
public string Body { get; set; }
public DateTime? Timestamp { get; set; }
public string[] NotifyContactIds { get; set; }
public List<NoteNotificationViewModel> NoteNotifications { get; set; }
public void BuildMappings()
{
Mapper.CreateMap<Common.Models.Notes.Note, NoteViewModel>()
.ForMember(dst => dst.IsStub, opt => opt.UseValue(false))
.ForMember(dst => dst.Created, opt => opt.MapFrom(src => src.Created))
.ForMember(dst => dst.Modified, opt => opt.MapFrom(src => src.Modified))
.ForMember(dst => dst.Disabled, opt => opt.MapFrom(src => src.Disabled))
.ForMember(dst => dst.CreatedBy, opt => opt.ResolveUsing(db =>
{
return new ViewModels.Account.UsersViewModel()
{
PId = db.CreatedBy.PId,
IsStub = true
};
}))
.ForMember(dst => dst.ModifiedBy, opt => opt.ResolveUsing(db =>
{
return new ViewModels.Account.UsersViewModel()
{
PId = db.ModifiedBy.PId,
IsStub = true
};
}))
.ForMember(dst => dst.DisabledBy, opt => opt.ResolveUsing(db =>
{
if (db.DisabledBy == null || !db.DisabledBy.PId.HasValue) return null;
return new ViewModels.Account.UsersViewModel()
{
PId = db.DisabledBy.PId.Value,
IsStub = true
};
}))
.ForMember(dst => dst.Id, opt => opt.MapFrom(src => src.Id))
.ForMember(dst => dst.Title, opt => opt.MapFrom(src => src.Title))
.ForMember(dst => dst.Body, opt => opt.MapFrom(src => src.Body))
.ForMember(dst => dst.Timestamp, opt => opt.MapFrom(src => src.Timestamp))
.ForMember(dst => dst.NotifyContactIds, opt => opt.Ignore())
.ForMember(dst => dst.NoteNotifications, opt => opt.Ignore());
Mapper.CreateMap<NoteViewModel, Common.Models.Notes.Note>()
.ForMember(dst => dst.Created, opt => opt.MapFrom(src => src.Created))
.ForMember(dst => dst.Modified, opt => opt.MapFrom(src => src.Modified))
.ForMember(dst => dst.Disabled, opt => opt.MapFrom(src => src.Disabled))
.ForMember(dst => dst.CreatedBy, opt => opt.ResolveUsing(x =>
{
if (x.CreatedBy == null || !x.CreatedBy.PId.HasValue)
return null;
return new ViewModels.Account.UsersViewModel()
{
PId = x.CreatedBy.PId
};
}))
.ForMember(dst => dst.ModifiedBy, opt => opt.ResolveUsing(x =>
{
if (x.CreatedBy == null || !x.CreatedBy.PId.HasValue)
return null;
return new ViewModels.Account.UsersViewModel()
{
PId = x.ModifiedBy.PId
};
}))
.ForMember(dst => dst.DisabledBy, opt => opt.ResolveUsing(x =>
{
if (x.DisabledBy == null || !x.DisabledBy.PId.HasValue)
return null;
return new ViewModels.Account.UsersViewModel()
{
PId = x.DisabledBy.PId.Value
};
}))
.ForMember(dst => dst.Id, opt => opt.MapFrom(src => src.Id))
.ForMember(dst => dst.Title, opt => opt.MapFrom(src => src.Title))
.ForMember(dst => dst.Body, opt => opt.MapFrom(src => src.Body))
.ForMember(dst => dst.Timestamp, opt => opt.MapFrom(src => src.Timestamp));
}
}
} | {
"content_hash": "dce4ef83b25575375b019c0465738685",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 91,
"avg_line_length": 46.46218487394958,
"alnum_prop": 0.4966540061493941,
"repo_name": "NodineLegal/OpenLawOffice.Web",
"id": "ebbb9f3e08ff7ecfa24f6db0ef0335d476d6a00d",
"size": "5531",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ViewModels/Notes/NoteViewModel.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "109"
},
{
"name": "C#",
"bytes": "2582354"
},
{
"name": "CSS",
"bytes": "28673"
},
{
"name": "HTML",
"bytes": "213"
},
{
"name": "JavaScript",
"bytes": "2763814"
}
],
"symlink_target": ""
} |
import styled from 'styled-components';
const Link = styled.a`
font-size: 16px;
color: #727272;
cursor: pointer;
font-family: 'Open Sans', sans-serif;
&:hover {
color: #727272;
}
`;
export default Link; | {
"content_hash": "0961c27fa69ce5d36e37d65419b24431",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 39,
"avg_line_length": 15.928571428571429,
"alnum_prop": 0.6457399103139013,
"repo_name": "amaralflavio/cmovies",
"id": "eab58ce52a532e3fde8964dbb62635a5c60e9d35",
"size": "223",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/components/Link.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "609"
},
{
"name": "HTML",
"bytes": "686"
},
{
"name": "JavaScript",
"bytes": "28536"
}
],
"symlink_target": ""
} |
<link rel="import" href="chrome://resources/polymer/v1_0/polymer/polymer.html">
<link rel="import" href="chrome://resources/polymer/v1_0/paper-radio-button/paper-radio-button.html">
<link rel="import" href="chrome://resources/polymer/v1_0/paper-radio-group/paper-radio-group.html">
<link rel="import" href="chrome://resources/cr_elements/v1_0/cr_button/cr_button.html">
<link rel="import" href="chrome://resources/cr_elements/v1_0/cr_checkbox/cr_checkbox.html">
<link rel="import" href="chrome://md-settings/checkbox/checkbox.html">
<dom-module id="cr-settings-sync-page">
<link rel="import" type="css"
href="chrome://md-settings/settings_page/settings_page.css">
<link rel="import" type="css" href="sync_page.css">
<template>
<div i18n-content="syncPageTitle"></div>
<select>
<option value="sync-everything"
i18n-content="syncEverythingMenuOption">
</option>
<option value="choose-what-to-sync"
i18n-content="chooseWhatToSyncMenuOption">
</option>
</select>
<div class="checkbox-container">
<div class="checkbox-container-row">
<cr-checkbox>
<span i18n-content="appCheckboxLabel"></span>
</cr-checkbox>
<cr-checkbox>
<span i18n-content="extensionsCheckboxLabel"></span>
</cr-checkbox>
<cr-checkbox>
<span i18n-content="settingsCheckboxLabel"></span>
</cr-checkbox>
</div>
<div class="checkbox-container-row">
<cr-checkbox>
<span i18n-content="autofillCheckboxLabel"></span>
</cr-checkbox>
<cr-checkbox>
<span i18n-content="historyCheckboxLabel"></span>
</cr-checkbox>
<cr-checkbox>
<span i18n-content="themesAndWallpapersCheckboxLabel"></span>
</cr-checkbox>
</div>
<div class="checkbox-container-row">
<cr-checkbox>
<span i18n-content="bookmarksCheckboxLabel"></span>
</cr-checkbox>
<cr-checkbox>
<span i18n-content="passwordsCheckboxLabel"></span>
</cr-checkbox>
<cr-checkbox>
<span i18n-content="openTabsCheckboxLabel"></span>
</cr-checkbox>
</div>
</div>
<div i18n-content="encryptionOptionsTitle"></div>
<div i18n-content="syncDataEncryptedText"></div>
<paper-radio-group selected="encrypt-with-google">
<paper-radio-button name="encrypt-with-google"
i18n-content="encryptWithGoogleCredentialsLabel">
</paper-radio-button>
<paper-radio-button name="encrypt-with-passphrase"
i18n-content="encryptWithSyncPassphraseLabel">
</paper-radio-button>
</paper-radio-group>
<div>
<cr-button i18n-content="useDefaultSettingsButton"></cr-button>
<cr-button i18n-content="cancelButton"></cr-button>
<cr-button i18n-content="okButton"></cr-button>
</div>
</template>
<script src="sync_page.js"></script>
</dom-module>
| {
"content_hash": "e9851af5e4149f73823d1b81ee44b049",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 101,
"avg_line_length": 39.82432432432432,
"alnum_prop": 0.6406515100101798,
"repo_name": "ltilve/ChromiumGStreamerBackend",
"id": "e8f89ba434deb2dbb751ca4ddc8ecad9160441ec",
"size": "2947",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "chrome/browser/resources/settings/sync_page/sync_page.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "37073"
},
{
"name": "Batchfile",
"bytes": "8451"
},
{
"name": "C",
"bytes": "9568645"
},
{
"name": "C++",
"bytes": "246813997"
},
{
"name": "CSS",
"bytes": "943687"
},
{
"name": "DM",
"bytes": "60"
},
{
"name": "Groff",
"bytes": "2494"
},
{
"name": "HTML",
"bytes": "27371019"
},
{
"name": "Java",
"bytes": "15348315"
},
{
"name": "JavaScript",
"bytes": "20872607"
},
{
"name": "Makefile",
"bytes": "70983"
},
{
"name": "Objective-C",
"bytes": "2029825"
},
{
"name": "Objective-C++",
"bytes": "10156554"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "PLpgSQL",
"bytes": "182741"
},
{
"name": "Perl",
"bytes": "63937"
},
{
"name": "Protocol Buffer",
"bytes": "494625"
},
{
"name": "Python",
"bytes": "8594611"
},
{
"name": "Shell",
"bytes": "486464"
},
{
"name": "Standard ML",
"bytes": "5106"
},
{
"name": "XSLT",
"bytes": "418"
},
{
"name": "nesC",
"bytes": "18347"
}
],
"symlink_target": ""
} |
package io.leishvl.core.data.mongodb;
import static io.leishvl.core.data.mongodb.MapKeyConverters.escapeMongo;
import static io.leishvl.core.data.mongodb.MapKeyConverters.unescapeMongo;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Arrays.asList;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.util.List;
import org.openprovenance.prov.interop.InteropFramework;
import org.openprovenance.prov.interop.InteropFramework.ProvFormat;
import org.openprovenance.prov.model.Document;
import org.springframework.core.convert.converter.Converter;
import org.springframework.data.convert.ReadingConverter;
import org.springframework.data.convert.WritingConverter;
import com.mongodb.DBObject;
/**
* A Spring Data converter to convert W3C PROV data model representations.
* @author Erik Torres <ertorser@upv.es>
*/
public class ProvConverters {
public static List<Converter<?, ?>> getProvConvertersToRegister() {
return asList(DBObjectToProvDocumentConverter.INSTANCE,
ProvDocumentToDBObjectConverter.INSTANCE);
}
@WritingConverter
public static enum ProvDocumentToDBObjectConverter implements Converter<Document, DBObject> {
INSTANCE;
@Override
public DBObject convert(final Document source) {
try (final ByteArrayOutputStream os = new ByteArrayOutputStream()) {
// export W3C PROV document to JSON using the interoperability framework
new InteropFramework().writeDocument(os, ProvFormat.JSON, source);
// escape MongoDB's unsupported characters
return escapeMongo(os.toString(UTF_8.name()));
} catch (Exception e) {
throw new IllegalStateException("Failed to convert W3C PROV Document to MongoDB DBObject", e);
}
}
}
/**
* Un-escape MongoDB's unsupported characters and create a W3C PROV document using the interoperability
* framework.
* @author Erik Torres <ertorser@upv.es>
*/
@ReadingConverter
public static enum DBObjectToProvDocumentConverter implements Converter<DBObject, Document> {
INSTANCE;
@Override
public Document convert(final DBObject source) {
return new InteropFramework().readDocument(new ByteArrayInputStream(unescapeMongo(source).getBytes()),
ProvFormat.JSON, null);
}
}
} | {
"content_hash": "5c4146f2b4f62d9a3cfa825a50aa9526",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 106,
"avg_line_length": 34.707692307692305,
"alnum_prop": 0.785904255319149,
"repo_name": "eubrazilcc/leishvl",
"id": "27a85df7ae4efad246f6331fe7ccdf6ff36222cb",
"size": "3182",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "leishvl-core/src/main/java/io/leishvl/core/data/mongodb/ProvConverters.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "5006"
},
{
"name": "Java",
"bytes": "2858931"
},
{
"name": "Shell",
"bytes": "7058"
}
],
"symlink_target": ""
} |
"""Open-source TensorFlow Inception v3 Example."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import time
from absl import app
from absl import flags
import absl.logging as _logging # pylint: disable=unused-import
import tensorflow.compat.v1 as tf
from tensorflow.compat.v1 import estimator as tf_estimator
import inception_preprocessing
import vgg_preprocessing
from tensorflow.contrib import cluster_resolver as contrib_cluster_resolver
from tensorflow.contrib import data as contrib_data
from tensorflow.contrib import framework as contrib_framework
from tensorflow.contrib import summary
from tensorflow.contrib import tpu as contrib_tpu
from tensorflow.contrib.framework.python.ops import arg_scope
from tensorflow.contrib.slim.nets import inception
from tensorflow.contrib.training.python.training import evaluation
# Cloud TPU Cluster Resolvers
flags.DEFINE_string(
'tpu', default=None,
help='The Cloud TPU to use for training. This should be either the name '
'used when creating the Cloud TPU, or a grpc://ip.address.of.tpu:8470 url.')
flags.DEFINE_string(
'gcp_project', default=None,
help='Project name for the Cloud TPU-enabled project. If not specified, we '
'will attempt to automatically detect the GCE project from metadata.')
flags.DEFINE_string(
'tpu_zone', default=None,
help='GCE zone where the Cloud TPU is located in. If not specified, we '
'will attempt to automatically detect the GCE project from metadata.')
# Model specific parameters
flags.DEFINE_string(
'data_dir', '',
'Directory where input data is stored')
flags.DEFINE_string(
'model_dir', None,
'Directory where model output is stored')
flags.DEFINE_string(
'export_dir',
default=None,
help=('The directory where the exported SavedModel will be stored.'))
flags.DEFINE_integer(
'num_shards', 8,
'Number of shards (workers).')
flags.DEFINE_integer(
'iterations', 100,
'Number of iterations per TPU training loop.')
flags.DEFINE_bool(
'skip_host_call', default=True,
help=('Skip the host call which is executed every training step. This is'
' generally used for generating training summaries (train loss,'
' learning rate, etc...). When --skip_host_call=false, there could'
' be a performance drop if host_call function is slow and cannot'
' keep up with the computation running on the TPU.'))
flags.DEFINE_integer(
'train_batch_size', 1024,
'Global (not per-shard) batch size for training')
flags.DEFINE_integer(
'eval_total_size', 0,
'Total batch size for evaluation, use the entire validation set if 0')
flags.DEFINE_integer(
'eval_batch_size', 1024,
'Global (not per-shard) batch size for evaluation')
flags.DEFINE_integer(
'train_steps', 213000,
'Number of steps use for training.')
flags.DEFINE_integer(
'train_steps_per_eval', 2000,
'Number of training steps to run between evaluations.')
flags.DEFINE_string(
'mode', 'train_and_eval',
'Mode to run: train, eval, train_and_eval')
flags.DEFINE_integer(
'min_eval_interval', 180,
'Minimum number of seconds between evaluations')
flags.DEFINE_integer(
'eval_timeout', None,
'Evaluation timeout: Maximum number of seconds that '
'may elapse while no new checkpoints are observed')
flags.DEFINE_bool(
'use_tpu', True,
'Use TPUs rather than plain CPUs')
flags.DEFINE_string(
'use_data', 'real',
'One of "fake","real"')
flags.DEFINE_float(
'learning_rate', 0.165,
'Learning rate.')
flags.DEFINE_string(
'optimizer', 'RMS',
'Optimizer (one of sgd, RMS, momentum)')
flags.DEFINE_integer(
'num_classes', 1001,
'Number of classes to distinguish')
flags.DEFINE_integer(
'width', 299,
'Width of input image')
flags.DEFINE_integer(
'height', 299,
'Height of input image')
flags.DEFINE_bool(
'transpose_enabled', False,
'Boolean to enable/disable explicit I/O transpose')
flags.DEFINE_bool(
'log_device_placement', False,
'Boolean to enable/disable log device placement')
flags.DEFINE_integer(
'save_summary_steps', 100,
'Number of steps which must have run before showing summaries.')
flags.DEFINE_integer(
'save_checkpoints_secs', 1000,
'Interval (in seconds) at which the model data '
'should be checkpointed. Set to 0 to disable.')
flags.DEFINE_bool(
'moving_average', True,
'Whether to enable moving average computation on variables')
flags.DEFINE_string(
'preprocessing', 'inception',
'Preprocessing stage to use: one of inception or vgg')
flags.DEFINE_bool(
'use_annotated_bbox', False,
'If true, use annotated bounding box as input to cropping function, '
'else use full image size')
flags.DEFINE_float(
'learning_rate_decay', 0.94,
'Exponential decay rate used in learning rate adjustment')
flags.DEFINE_integer(
'learning_rate_decay_epochs', 3,
'Exponential decay epochs used in learning rate adjustment')
flags.DEFINE_bool(
'display_tensors', False,
'Whether to dump prediction tensors for comparison')
flags.DEFINE_bool(
'clear_update_collections', True,
'Set batchnorm update_collections to None if true, else use default value')
flags.DEFINE_integer(
'cold_epochs', 2,
'Number of epochs using cold learning rate')
flags.DEFINE_integer(
'warmup_epochs', 7,
'Number of epochs using linearly increasing learning rate')
flags.DEFINE_bool(
'use_learning_rate_warmup', False,
'Apply learning rate warmup if true')
# Dataset specific paramenters
flags.DEFINE_bool(
'prefetch_enabled', True,
'Boolean to enable/disable prefetching')
flags.DEFINE_integer(
'prefetch_dataset_buffer_size', 8*1024*1024,
'Number of bytes in read buffer. 0 means no buffering.')
flags.DEFINE_integer(
'num_files_infeed', 8,
'Number of training files to read in parallel.')
flags.DEFINE_integer(
'num_parallel_calls', 64,
'Number of elements to process in parallel (by mapper)')
flags.DEFINE_integer(
'initial_shuffle_buffer_size', 1024,
'Number of elements from dataset that shuffler will sample from. '
'This shuffling is done before any other operations. '
'Set to 0 to disable')
flags.DEFINE_integer(
'followup_shuffle_buffer_size', 1000,
'Number of elements from dataset that shuffler will sample from. '
'This shuffling is done after prefetching is done. '
'Set to 0 to disable')
flags.DEFINE_string(
'precision', 'float32',
help=('Precision to use; one of: {bfloat16, float32}'))
FLAGS = flags.FLAGS
# Dataset constants
_NUM_TRAIN_IMAGES = 1281167
_NUM_EVAL_IMAGES = 50000
# Random cropping constants
_RESIZE_SIDE_MIN = 300
_RESIZE_SIDE_MAX = 600
# Constants dictating the learning rate schedule.
RMSPROP_DECAY = 0.9 # Decay term for RMSProp.
RMSPROP_MOMENTUM = 0.9 # Momentum in RMSProp.
RMSPROP_EPSILON = 1.0 # Epsilon term for RMSProp.
# Constants dictating moving average.
MOVING_AVERAGE_DECAY = 0.995
# Batchnorm moving mean/variance parameters
BATCH_NORM_DECAY = 0.996
BATCH_NORM_EPSILON = 1e-3
WEIGHT_DECAY = 0.00004
def preprocess_raw_bytes(image_bytes, is_training=False, bbox=None):
"""Preprocesses a raw JPEG image.
This implementation is shared in common between train/eval pipelines,
and when serving the model.
Args:
image_bytes: A string Tensor, containing the encoded JPEG.
is_training: Whether or not to preprocess for training.
bbox: In inception preprocessing, this bbox can be used for cropping.
Returns:
A 3-Tensor [height, width, RGB channels] of type float32.
"""
image = tf.image.decode_jpeg(image_bytes, channels=3)
image = tf.image.convert_image_dtype(image, dtype=tf.float32)
if FLAGS.preprocessing == 'vgg':
image = vgg_preprocessing.preprocess_image(
image=image,
output_height=FLAGS.height,
output_width=FLAGS.width,
is_training=is_training,
resize_side_min=_RESIZE_SIDE_MIN,
resize_side_max=_RESIZE_SIDE_MAX)
elif FLAGS.preprocessing == 'inception':
image = inception_preprocessing.preprocess_image(
image=image,
output_height=FLAGS.height,
output_width=FLAGS.width,
is_training=is_training,
bbox=bbox)
else:
assert False, 'Unknown preprocessing type: %s' % FLAGS.preprocessing
return image
class InputPipeline(object):
"""Generates ImageNet input_fn for training or evaluation.
The training data is assumed to be in TFRecord format with keys as specified
in the dataset_parser below, sharded across 1024 files, named sequentially:
train-00000-of-01024
train-00001-of-01024
...
train-01023-of-01024
The validation data is in the same format but sharded in 128 files.
The format of the data required is created by the script at:
https://github.com/tensorflow/tpu/blob/master/tools/datasets/imagenet_to_gcs.py
Args:
is_training: `bool` for whether the input is for training
"""
def __init__(self, is_training, data_dir, use_bfloat16):
self.is_training = is_training
self.data_dir = data_dir
self.use_bfloat16 = use_bfloat16
def dataset_parser(self, serialized_proto):
"""Parse an Imagenet record from value."""
keys_to_features = {
'image/encoded':
tf.FixedLenFeature((), tf.string, default_value=''),
'image/format':
tf.FixedLenFeature((), tf.string, default_value='jpeg'),
'image/class/label':
tf.FixedLenFeature([], dtype=tf.int64, default_value=-1),
'image/class/text':
tf.FixedLenFeature([], dtype=tf.string, default_value=''),
'image/object/bbox/xmin':
tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/ymin':
tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/xmax':
tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/ymax':
tf.VarLenFeature(dtype=tf.float32),
'image/object/class/label':
tf.VarLenFeature(dtype=tf.int64),
}
features = tf.parse_single_example(serialized_proto, keys_to_features)
bbox = None
if FLAGS.use_annotated_bbox:
xmin = tf.expand_dims(features['image/object/bbox/xmin'].values, 0)
ymin = tf.expand_dims(features['image/object/bbox/ymin'].values, 0)
xmax = tf.expand_dims(features['image/object/bbox/xmax'].values, 0)
ymax = tf.expand_dims(features['image/object/bbox/ymax'].values, 0)
# Note that we impose an ordering of (y, x) just to make life difficult.
bbox = tf.concat([ymin, xmin, ymax, xmax], 0)
# Force the variable number of bounding boxes into the shape
# [1, num_boxes, coords].
bbox = tf.expand_dims(bbox, 0)
bbox = tf.transpose(bbox, [0, 2, 1])
image = features['image/encoded']
image = preprocess_raw_bytes(image, is_training=self.is_training, bbox=bbox)
label = tf.cast(
tf.reshape(features['image/class/label'], shape=[]), dtype=tf.int32)
if self.use_bfloat16:
image = tf.cast(image, tf.bfloat16)
return image, label
def input_fn(self, params):
"""Input function which provides a single batch for train or eval.
Args:
params: `dict` of parameters passed from the `TPUEstimator`.
`params['batch_size']` is always provided and should be used as the
effective batch size.
Returns:
A `tf.data.Dataset` object.
"""
batch_size = params['batch_size']
if FLAGS.use_data == 'real':
assert self.data_dir, 'data_dir is required'
shuffle = self.is_training
file_pattern = os.path.join(
self.data_dir, 'train-*' if self.is_training else 'validation-*')
dataset = tf.data.Dataset.list_files(file_pattern, shuffle=shuffle)
if self.is_training:
dataset = dataset.repeat()
def prefetch_dataset(filename):
dataset = tf.data.TFRecordDataset(
filename, buffer_size=FLAGS.prefetch_dataset_buffer_size)
return dataset
dataset = dataset.apply(
contrib_data.parallel_interleave(
prefetch_dataset,
cycle_length=FLAGS.num_files_infeed,
sloppy=True))
if shuffle and FLAGS.followup_shuffle_buffer_size > 0:
dataset = dataset.shuffle(
buffer_size=FLAGS.followup_shuffle_buffer_size)
dataset = dataset.map(
self.dataset_parser, num_parallel_calls=FLAGS.num_parallel_calls)
else:
random_image = tf.random.uniform(
[FLAGS.height, FLAGS.width, 3],
minval=-1,
maxval=1,
dtype=tf.bfloat16 if self.use_bfloat16 else tf.float32)
random_label = tf.random.uniform([], minval=0, maxval=999, dtype=tf.int32)
dataset = tf.data.Dataset.range(1).repeat().map(
lambda data: (random_image, random_label))
dataset = dataset.prefetch(batch_size)
dataset = dataset.batch(batch_size, drop_remainder=True)
dataset = dataset.prefetch(2) # Prefetch overlaps in-feed with training
if FLAGS.transpose_enabled:
def transpose_images(images):
return tf.transpose(images, params['output_perm'])
dataset = dataset.map(
lambda images, labels: (transpose_images(images), labels),
num_parallel_calls=FLAGS.num_parallel_calls)
return dataset
def image_serving_input_fn():
"""Serving input fn for raw images.
This function is consumed when exporting a SavedModel.
Returns:
A ServingInputReceiver capable of serving MobileNet predictions.
"""
image_bytes_list = tf.placeholder(
shape=[None],
dtype=tf.string,
)
images = tf.map_fn(
preprocess_raw_bytes, image_bytes_list, back_prop=False, dtype=tf.float32)
return tf_estimator.export.ServingInputReceiver(
images, {'image_bytes': image_bytes_list})
def tensor_transform_fn(data, perm):
"""Transpose function.
This function is used to transpose an image tensor on the host and then
perform an inverse transpose on the TPU. The transpose on the TPU gets
effectively elided thus voiding any associated computational cost.
NOTE: Eventually the compiler will be able to detect when this kind of
operation may prove beneficial and perform these types of transformations
implicitly, voiding the need for user intervention
Args:
data: Tensor to be transposed
perm: New ordering of dimensions
Returns:
Transposed tensor
"""
if FLAGS.transpose_enabled:
return tf.transpose(data, perm)
return data
def inception_model_fn(features, labels, mode, params):
"""Inception v3 model using Estimator API."""
num_classes = FLAGS.num_classes
is_training = (mode == tf_estimator.ModeKeys.TRAIN)
is_eval = (mode == tf_estimator.ModeKeys.EVAL)
if isinstance(features, dict):
features = features['feature']
features = tensor_transform_fn(features, params['input_perm'])
# This nested function allows us to avoid duplicating the logic which
# builds the network, for different values of --precision.
def build_network():
if FLAGS.precision == 'bfloat16':
with contrib_tpu.bfloat16_scope():
logits, end_points = inception.inception_v3(
features,
num_classes,
is_training=is_training)
logits = tf.cast(logits, tf.float32)
elif FLAGS.precision == 'float32':
logits, end_points = inception.inception_v3(
features,
num_classes,
is_training=is_training)
return logits, end_points
if FLAGS.clear_update_collections:
# updates_collections must be set to None in order to use fused batchnorm
with arg_scope(inception.inception_v3_arg_scope(
weight_decay=0.0,
batch_norm_decay=BATCH_NORM_DECAY,
batch_norm_epsilon=BATCH_NORM_EPSILON,
updates_collections=None)):
logits, end_points = build_network()
else:
with arg_scope(inception.inception_v3_arg_scope(
batch_norm_decay=BATCH_NORM_DECAY,
batch_norm_epsilon=BATCH_NORM_EPSILON)):
logits, end_points = build_network()
predictions = {
'classes': tf.argmax(input=logits, axis=1),
'probabilities': tf.nn.softmax(logits, name='softmax_tensor')
}
if mode == tf_estimator.ModeKeys.PREDICT:
return tf_estimator.EstimatorSpec(
mode=mode,
predictions=predictions,
export_outputs={
'classify': tf_estimator.export.PredictOutput(predictions)
})
if mode == tf_estimator.ModeKeys.EVAL and FLAGS.display_tensors and (
not FLAGS.use_tpu):
with tf.control_dependencies([
tf.Print(
predictions['classes'], [predictions['classes']],
summarize=FLAGS.eval_batch_size,
message='prediction: ')
]):
labels = tf.Print(
labels, [labels], summarize=FLAGS.eval_batch_size, message='label: ')
one_hot_labels = tf.one_hot(labels, FLAGS.num_classes, dtype=tf.int32)
if 'AuxLogits' in end_points:
tf.losses.softmax_cross_entropy(
onehot_labels=one_hot_labels,
logits=tf.cast(end_points['AuxLogits'], tf.float32),
weights=0.4,
label_smoothing=0.1,
scope='aux_loss')
tf.losses.softmax_cross_entropy(
onehot_labels=one_hot_labels,
logits=logits,
weights=1.0,
label_smoothing=0.1)
losses = tf.add_n(tf.losses.get_losses())
l2_loss = []
for v in tf.trainable_variables():
if 'BatchNorm' not in v.name and 'weights' in v.name:
l2_loss.append(tf.nn.l2_loss(v))
loss = losses + WEIGHT_DECAY * tf.add_n(l2_loss)
initial_learning_rate = FLAGS.learning_rate * FLAGS.train_batch_size / 256
if FLAGS.use_learning_rate_warmup:
# Adjust initial learning rate to match final warmup rate
warmup_decay = FLAGS.learning_rate_decay**(
(FLAGS.warmup_epochs + FLAGS.cold_epochs) /
FLAGS.learning_rate_decay_epochs)
adj_initial_learning_rate = initial_learning_rate * warmup_decay
final_learning_rate = 0.0001 * initial_learning_rate
host_call = None
train_op = None
if is_training:
batches_per_epoch = _NUM_TRAIN_IMAGES / FLAGS.train_batch_size
global_step = tf.train.get_or_create_global_step()
current_epoch = tf.cast(
(tf.cast(global_step, tf.float32) / batches_per_epoch), tf.int32)
learning_rate = tf.train.exponential_decay(
learning_rate=initial_learning_rate,
global_step=global_step,
decay_steps=int(FLAGS.learning_rate_decay_epochs * batches_per_epoch),
decay_rate=FLAGS.learning_rate_decay,
staircase=True)
if FLAGS.use_learning_rate_warmup:
wlr = 0.1 * adj_initial_learning_rate
wlr_height = tf.cast(
0.9 * adj_initial_learning_rate /
(FLAGS.warmup_epochs + FLAGS.learning_rate_decay_epochs - 1),
tf.float32)
epoch_offset = tf.cast(FLAGS.cold_epochs - 1, tf.int32)
exp_decay_start = (FLAGS.warmup_epochs + FLAGS.cold_epochs +
FLAGS.learning_rate_decay_epochs)
lin_inc_lr = tf.add(
wlr, tf.multiply(
tf.cast(tf.subtract(current_epoch, epoch_offset), tf.float32),
wlr_height))
learning_rate = tf.where(
tf.greater_equal(current_epoch, FLAGS.cold_epochs),
(tf.where(tf.greater_equal(current_epoch, exp_decay_start),
learning_rate, lin_inc_lr)),
wlr)
# Set a minimum boundary for the learning rate.
learning_rate = tf.maximum(
learning_rate, final_learning_rate, name='learning_rate')
if FLAGS.optimizer == 'sgd':
tf.logging.info('Using SGD optimizer')
optimizer = tf.train.GradientDescentOptimizer(
learning_rate=learning_rate)
elif FLAGS.optimizer == 'momentum':
tf.logging.info('Using Momentum optimizer')
optimizer = tf.train.MomentumOptimizer(
learning_rate=learning_rate, momentum=0.9)
elif FLAGS.optimizer == 'RMS':
tf.logging.info('Using RMS optimizer')
optimizer = tf.train.RMSPropOptimizer(
learning_rate,
RMSPROP_DECAY,
momentum=RMSPROP_MOMENTUM,
epsilon=RMSPROP_EPSILON)
else:
tf.logging.fatal('Unknown optimizer:', FLAGS.optimizer)
if FLAGS.use_tpu:
optimizer = contrib_tpu.CrossShardOptimizer(optimizer)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
train_op = optimizer.minimize(loss, global_step=global_step)
if FLAGS.moving_average:
ema = tf.train.ExponentialMovingAverage(
decay=MOVING_AVERAGE_DECAY, num_updates=global_step)
variables_to_average = (
tf.trainable_variables() + tf.moving_average_variables())
with tf.control_dependencies([train_op]), tf.name_scope('moving_average'):
train_op = ema.apply(variables_to_average)
# To log the loss, current learning rate, and epoch for Tensorboard, the
# summary op needs to be run on the host CPU via host_call. host_call
# expects [batch_size, ...] Tensors, thus reshape to introduce a batch
# dimension. These Tensors are implicitly concatenated to
# [params['batch_size']].
gs_t = tf.reshape(global_step, [1])
loss_t = tf.reshape(loss, [1])
lr_t = tf.reshape(learning_rate, [1])
ce_t = tf.reshape(current_epoch, [1])
if not FLAGS.skip_host_call:
def host_call_fn(gs, loss, lr, ce):
"""Training host call. Creates scalar summaries for training metrics.
This function is executed on the CPU and should not directly reference
any Tensors in the rest of the `model_fn`. To pass Tensors from the
model to the `metric_fn`, provide them as part of the `host_call`. See
https://www.tensorflow.org/api_docs/python/tf/contrib/tpu/TPUEstimatorSpec
for more information.
Arguments should match the list of `Tensor` objects passed as the second
element in the tuple passed to `host_call`.
Args:
gs: `Tensor with shape `[batch]` for the global_step
loss: `Tensor` with shape `[batch]` for the training loss.
lr: `Tensor` with shape `[batch]` for the learning_rate.
ce: `Tensor` with shape `[batch]` for the current_epoch.
Returns:
List of summary ops to run on the CPU host.
"""
gs = gs[0]
with summary.create_file_writer(FLAGS.model_dir).as_default():
with summary.always_record_summaries():
summary.scalar('loss', tf.reduce_mean(loss), step=gs)
summary.scalar('learning_rate', tf.reduce_mean(lr), step=gs)
summary.scalar('current_epoch', tf.reduce_mean(ce), step=gs)
return summary.all_summary_ops()
host_call = (host_call_fn, [gs_t, loss_t, lr_t, ce_t])
eval_metrics = None
if is_eval:
def metric_fn(labels, logits):
"""Evaluation metric function. Evaluates accuracy.
This function is executed on the CPU and should not directly reference
any Tensors in the rest of the `model_fn`. To pass Tensors from the model
to the `metric_fn`, provide as part of the `eval_metrics`. See
https://www.tensorflow.org/api_docs/python/tf/contrib/tpu/TPUEstimatorSpec
for more information.
Arguments should match the list of `Tensor` objects passed as the second
element in the tuple passed to `eval_metrics`.
Args:
labels: `Tensor` with shape `[batch, ]`.
logits: `Tensor` with shape `[batch, num_classes]`.
Returns:
A dict of the metrics to return from evaluation.
"""
predictions = tf.argmax(logits, axis=1)
top_1_accuracy = tf.metrics.accuracy(labels, predictions)
in_top_5 = tf.cast(tf.nn.in_top_k(logits, labels, 5), tf.float32)
top_5_accuracy = tf.metrics.mean(in_top_5)
return {
'accuracy': top_1_accuracy,
'accuracy@5': top_5_accuracy,
}
eval_metrics = (metric_fn, [labels, logits])
return contrib_tpu.TPUEstimatorSpec(
mode=mode,
loss=loss,
train_op=train_op,
host_call=host_call,
eval_metrics=eval_metrics)
class LoadEMAHook(tf.train.SessionRunHook):
"""Hook to load exponential moving averages into corresponding variables."""
def __init__(self, model_dir):
super(LoadEMAHook, self).__init__()
self._model_dir = model_dir
def begin(self):
ema = tf.train.ExponentialMovingAverage(MOVING_AVERAGE_DECAY)
variables_to_restore = ema.variables_to_restore()
self._load_ema = contrib_framework.assign_from_checkpoint_fn(
tf.train.latest_checkpoint(self._model_dir), variables_to_restore)
def after_create_session(self, sess, coord):
tf.logging.info('Reloading EMA...')
self._load_ema(sess)
def main(unused_argv):
del unused_argv # Unused
tpu_cluster_resolver = contrib_cluster_resolver.TPUClusterResolver(
FLAGS.tpu, zone=FLAGS.tpu_zone, project=FLAGS.gcp_project)
assert FLAGS.precision == 'bfloat16' or FLAGS.precision == 'float32', (
'Invalid value for --precision flag; must be bfloat16 or float32.')
tf.logging.info('Precision: %s', FLAGS.precision)
params = {
'input_perm': [0, 1, 2, 3],
'output_perm': [0, 1, 2, 3],
}
batch_axis = 0
if FLAGS.transpose_enabled:
params['input_perm'] = [3, 0, 1, 2]
params['output_perm'] = [1, 2, 3, 0]
batch_axis = 3
if FLAGS.eval_total_size > 0:
eval_size = FLAGS.eval_total_size
else:
eval_size = _NUM_EVAL_IMAGES
eval_steps = eval_size // FLAGS.eval_batch_size
iterations = (eval_steps if FLAGS.mode == 'eval' else
FLAGS.iterations)
eval_batch_size = (None if FLAGS.mode == 'train' else
FLAGS.eval_batch_size)
tpu_config = contrib_tpu.TPUConfig(
iterations_per_loop=iterations, num_shards=FLAGS.num_shards)
run_config = contrib_tpu.RunConfig(
cluster=tpu_cluster_resolver,
model_dir=FLAGS.model_dir,
save_checkpoints_secs=FLAGS.save_checkpoints_secs,
save_summary_steps=FLAGS.save_summary_steps,
session_config=tf.ConfigProto(
allow_soft_placement=True,
log_device_placement=FLAGS.log_device_placement),
tpu_config=tpu_config)
inception_classifier = contrib_tpu.TPUEstimator(
model_fn=inception_model_fn,
use_tpu=FLAGS.use_tpu,
config=run_config,
params=params,
train_batch_size=FLAGS.train_batch_size,
eval_batch_size=eval_batch_size,
batch_axis=(batch_axis, 0))
# Input pipelines are slightly different (with regards to shuffling and
# preprocessing) between training and evaluation.
use_bfloat16 = FLAGS.precision == 'bfloat16'
imagenet_train = InputPipeline(
is_training=True,
data_dir=FLAGS.data_dir,
use_bfloat16=use_bfloat16)
imagenet_eval = InputPipeline(
is_training=False,
data_dir=FLAGS.data_dir,
use_bfloat16=use_bfloat16)
if FLAGS.moving_average:
eval_hooks = [LoadEMAHook(FLAGS.model_dir)]
else:
eval_hooks = []
if FLAGS.mode == 'eval':
# Run evaluation when there is a new checkpoint
for checkpoint in evaluation.checkpoints_iterator(
FLAGS.model_dir, timeout=FLAGS.eval_timeout):
tf.logging.info('Starting to evaluate.')
try:
start_timestamp = time.time() # Includes compilation time
eval_results = inception_classifier.evaluate(
input_fn=imagenet_eval.input_fn,
steps=eval_steps,
hooks=eval_hooks,
checkpoint_path=checkpoint)
elapsed_time = int(time.time() - start_timestamp)
tf.logging.info(
'Eval results: %s. Elapsed seconds: %d', eval_results, elapsed_time)
# Terminate eval job when final checkpoint is reached
current_step = int(os.path.basename(checkpoint).split('-')[1])
if current_step >= FLAGS.train_steps:
tf.logging.info(
'Evaluation finished after training step %d', current_step)
break
except tf.errors.NotFoundError:
# Since the coordinator is on a different job than the TPU worker,
# sometimes the TPU worker does not finish initializing until long after
# the CPU job tells it to start evaluating. In this case, the checkpoint
# file could have been deleted already.
tf.logging.info(
'Checkpoint %s no longer exists, skipping checkpoint', checkpoint)
elif FLAGS.mode == 'train_and_eval':
for cycle in range(FLAGS.train_steps // FLAGS.train_steps_per_eval):
tf.logging.info('Starting training cycle %d.' % cycle)
inception_classifier.train(
input_fn=imagenet_train.input_fn, steps=FLAGS.train_steps_per_eval)
tf.logging.info('Starting evaluation cycle %d .' % cycle)
eval_results = inception_classifier.evaluate(
input_fn=imagenet_eval.input_fn, steps=eval_steps, hooks=eval_hooks)
tf.logging.info('Evaluation results: %s' % eval_results)
else:
tf.logging.info('Starting training ...')
inception_classifier.train(
input_fn=imagenet_train.input_fn, max_steps=FLAGS.train_steps)
if FLAGS.export_dir is not None:
tf.logging.info('Starting to export model.')
inception_classifier.export_saved_model(
export_dir_base=FLAGS.export_dir,
serving_input_receiver_fn=image_serving_input_fn)
if __name__ == '__main__':
tf.logging.set_verbosity(tf.logging.INFO)
app.run(main)
| {
"content_hash": "998f3b952a30afbf28e4dae20ed4e094",
"timestamp": "",
"source": "github",
"line_count": 869,
"max_line_length": 85,
"avg_line_length": 34.09781357882623,
"alnum_prop": 0.6688603152104216,
"repo_name": "tensorflow/tpu",
"id": "d19a9de3c6f163a92401d47361401781e0895dc4",
"size": "30321",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "models/experimental/inception/inception_v3.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "754301"
},
{
"name": "Dockerfile",
"bytes": "2734"
},
{
"name": "Go",
"bytes": "226317"
},
{
"name": "Jupyter Notebook",
"bytes": "56231509"
},
{
"name": "Makefile",
"bytes": "2369"
},
{
"name": "Python",
"bytes": "3444271"
},
{
"name": "Shell",
"bytes": "21032"
},
{
"name": "Starlark",
"bytes": "164"
}
],
"symlink_target": ""
} |
package nl.basjes.parse.useragent.config;
import nl.basjes.parse.useragent.analyze.InvalidParserConfigurationException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import static nl.basjes.parse.useragent.analyze.UserAgentStringMatchMaker.DEFAULT_USER_AGENT_MAX_LENGTH;
public final class AnalyzerConfig implements Serializable {
// If we want ALL fields this is null. If we only want specific fields this is a list of names.
private Set<String> wantedFieldNames = null; // NOSONAR: Only accessed via Builder.
// file+line number --> Config at that location
private final Map<String, MatcherConfig> matcherConfigs = new LinkedHashMap<>();
// Lookup name --> keys+values for the lookup
private final Map<String, Map<String, String>> lookups = new LinkedHashMap<>(128);
// Lookup SET name --> values of this set
private final Map<String, Set<String>> lookupSets = new LinkedHashMap<>(128);
private int userAgentMaxLength = -1;
private final List<TestCase> testCases = new ArrayList<>(8192);
private AnalyzerConfig() {
}
public static AnalyzerConfigBuilder newBuilder() {
return new AnalyzerConfigBuilder();
}
public void merge(AnalyzerConfig additionalConfig) {
testCases .addAll(additionalConfig.testCases);
lookups .putAll(additionalConfig.lookups);
lookupSets .putAll(additionalConfig.lookupSets);
matcherConfigs .putAll(additionalConfig.matcherConfigs);
if (additionalConfig.userAgentMaxLength >= 0) {
userAgentMaxLength = Math.min(userAgentMaxLength, additionalConfig.userAgentMaxLength);
}
if (additionalConfig.wantedFieldNames == null) {
wantedFieldNames = null;
} else {
if (wantedFieldNames == null) {
wantedFieldNames = new TreeSet<>(additionalConfig.wantedFieldNames);
} else {
wantedFieldNames.addAll(additionalConfig.wantedFieldNames);
}
}
}
public Set<String> getWantedFieldNames() {
return wantedFieldNames;
}
public Map<String, MatcherConfig> getMatcherConfigs() {
return matcherConfigs;
}
public Map<String, Map<String, String>> getLookups() {
return lookups;
}
public Map<String, Set<String>> getLookupSets() {
return lookupSets;
}
public List<TestCase> getTestCases() {
return testCases;
}
public int getUserAgentMaxLength() {
return userAgentMaxLength;
}
public AnalyzerConfig setUserAgentMaxLength(int newUserAgentMaxLength) {
if (newUserAgentMaxLength < 0) {
this.userAgentMaxLength = DEFAULT_USER_AGENT_MAX_LENGTH;
} else {
this.userAgentMaxLength = newUserAgentMaxLength;
}
return this;
}
public AnalyzerConfig wantedFieldNames(Set<String> newWantedFieldNames) {
if (newWantedFieldNames == null || newWantedFieldNames.isEmpty()) {
this.wantedFieldNames = null;
} else {
this.wantedFieldNames = new TreeSet<>(newWantedFieldNames);
}
return this;
}
public static class AnalyzerConfigBuilder {
// Lookup name --> names of lookups to merge in it
private final Map<String, Set<String>> lookupMerge = new LinkedHashMap<>(128); // The names of the lookups that need to be merged
// Lookup SET name --> names of lookup SETs to merge in it
private final Map<String, Set<String>> lookupSetMerge = new LinkedHashMap<>(128); // The names of the sets that need to be merged
private final AnalyzerConfig analyzerConfig;
public AnalyzerConfigBuilder() {
this.analyzerConfig = new AnalyzerConfig();
}
public void addMatcherConfigs(String filename, MatcherConfig matcherConfig) {
analyzerConfig.matcherConfigs.put(filename, matcherConfig);
}
/**
* Store the keys and values.
*
* @param name The name of the lookup
* @param values The additional keys and values for this lookup.
*/
public AnalyzerConfigBuilder putLookup(String name, Map<String, String> values) {
Map<String, String> existing = analyzerConfig.lookups.get(name);
if (existing == null) {
analyzerConfig.lookups.put(name, values);
} else {
// Perhaps we are overwriting an existing value
// So we must do one by one and fail if already there
for (Map.Entry<String, String> valueEntry : values.entrySet()) {
String existingValue = existing.get(valueEntry.getKey());
if (existingValue != null) {
if (existingValue.equals(valueEntry.getValue())) {
continue; // Ignore this one.
}
throw new InvalidParserConfigurationException(
"For lookup \"" + name + "\" a multiple different values for " +
"the key \"" + valueEntry.getKey() + "\" were found from " +
"separate definitions of this lookup.");
}
existing.put(valueEntry.getKey(), valueEntry.getValue());
}
}
return this;
}
/**
* @param newLookups The additional lookups.
*/
public AnalyzerConfigBuilder putLookups(Map<String, Map<String, String>> newLookups) {
for (Map.Entry<String, Map<String, String>> entry : newLookups.entrySet()) {
putLookup(entry.getKey(), entry.getValue());
}
return this;
}
/**
* Store the additional lookups that need to be merged in with a lookup
* @param name The name of the lookup
* @param lookupNames The names of the lookups that must be added to the specified lookup.
*/
public AnalyzerConfigBuilder putLookupMerges(String name, Set<String> lookupNames) {
lookupMerge.put(name, lookupNames);
return this;
}
/**
* Store the keys and values.
* @param name The name of the lookupSet
* @param values The additional keys and values for this lookup.
*/
public AnalyzerConfigBuilder putLookupSets(String name, Set<String> values) {
Set<String> existing = analyzerConfig.lookupSets.get(name);
if (existing == null) {
analyzerConfig.lookupSets.put(name, values);
} else {
existing.addAll(values); // This automatically de-duplicates
}
return this;
}
/**
* @param newLookupSets The additional lookup sets.
*/
public AnalyzerConfigBuilder putLookupSets(Map<String, Set<String>> newLookupSets) {
for (Map.Entry<String, Set<String>> entry : newLookupSets.entrySet()) {
putLookupSets(entry.getKey(), entry.getValue());
}
return this;
}
/**
* Store the additional lookupSets that need to be merged in with a lookupSet
* @param name The name of the lookupSet
* @param setNames The names of the lookupSets that must be added to the specified lookupSet.
*/
public AnalyzerConfigBuilder putLookupSetsMerges(String name, Set<String> setNames) {
lookupSetMerge.put(name, setNames);
return this;
}
public AnalyzerConfigBuilder clearAllTestCases() {
analyzerConfig.testCases.clear();
return this;
}
public AnalyzerConfigBuilder addTestCase(TestCase testCase) {
analyzerConfig.testCases.add(testCase);
return this;
}
public AnalyzerConfigBuilder withUserAgentMaxLength(int userAgentMaxLength) {
analyzerConfig.setUserAgentMaxLength(userAgentMaxLength);
return this;
}
public AnalyzerConfig build() {
if (!analyzerConfig.lookups.isEmpty()) {
if (!lookupMerge.isEmpty()) {
lookupMerge.forEach((mapName, allExtraToLoad) -> {
Map<String, String> theMap = analyzerConfig.lookups.get(mapName);
if (theMap != null) {
allExtraToLoad.forEach(extraToLoad -> {
if (lookupMerge.containsKey(extraToLoad)) {
throw new InvalidParserConfigurationException("Unable to merge lookup '" + extraToLoad + "' into '" + mapName + "' because it is a recursive merge.");
}
Map<String, String> extraMap = analyzerConfig.lookups.get(extraToLoad);
if (extraMap == null) {
throw new InvalidParserConfigurationException("Unable to merge lookup '" + extraToLoad + "' into '" + mapName + "'.");
}
theMap.putAll(extraMap);
});
}
});
}
// All compares are done in a case insensitive way. So we lowercase ALL keys of the lookups beforehand.
Map<String, Map<String, String>> cleanedLookups = new LinkedHashMap<>(analyzerConfig.lookups.size());
for (Map.Entry<String, Map<String, String>> lookupsEntry : analyzerConfig.lookups.entrySet()) {
Map<String, String> cleanedLookup = new LinkedHashMap<>(lookupsEntry.getValue().size());
for (Map.Entry<String, String> entry : lookupsEntry.getValue().entrySet()) {
cleanedLookup.put(entry.getKey().toLowerCase(Locale.ROOT), entry.getValue());
}
cleanedLookups.put(lookupsEntry.getKey(), cleanedLookup);
}
analyzerConfig.lookups.clear();
analyzerConfig.lookups.putAll(cleanedLookups);
}
if (!lookupSetMerge.isEmpty()) {
lookupSetMerge.forEach((setName, allExtraToLoad) -> {
Set<String> theSet = analyzerConfig.lookupSets.get(setName);
if (theSet != null) {
allExtraToLoad.forEach(extraToLoad -> {
Map<String, String> extralookup = analyzerConfig.lookups.get(extraToLoad);
if (extralookup != null) {
theSet.addAll(extralookup.keySet());
}
Set<String> extralookupSet = analyzerConfig.lookupSets.get(extraToLoad);
if (extralookupSet != null) {
theSet.addAll(extralookupSet);
}
if (extralookup == null && extralookupSet == null) {
throw new InvalidParserConfigurationException("Unable to merge set '" + extraToLoad + "' into '" + setName + "'.");
}
if (lookupMerge.containsKey(extraToLoad) || lookupSetMerge.containsKey(extraToLoad)) {
throw new InvalidParserConfigurationException("Unable to merge lookupSET '" + extraToLoad + "' into '" + setName + "' because it is a recursive merge.");
}
});
}
});
}
return analyzerConfig;
}
}
@Override
public String toString() {
return "AnalyzerConfig {\n" +
" matcherConfigs=" + matcherConfigs + ",\n" +
" lookups=" + lookups.size() + ",\n" +
" lookupSets=" + lookupSets.size() + ",\n" +
" testCases=" + testCases.size() + ",\n" +
" userAgentMaxLength=" + userAgentMaxLength + ",\n" +
"\n}";
}
}
| {
"content_hash": "46f7f0d9db519b92ed9796175a702582",
"timestamp": "",
"source": "github",
"line_count": 291,
"max_line_length": 186,
"avg_line_length": 42.446735395189,
"alnum_prop": 0.5699481865284974,
"repo_name": "nielsbasjes/yauaa",
"id": "f938a92b6db231ab12b8af6fea88b101546d06ec",
"size": "12987",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "analyzer/src/main/java/nl/basjes/parse/useragent/config/AnalyzerConfig.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "45134"
},
{
"name": "CSS",
"bytes": "6258"
},
{
"name": "Dockerfile",
"bytes": "10259"
},
{
"name": "Java",
"bytes": "1796519"
},
{
"name": "Kotlin",
"bytes": "2310"
},
{
"name": "Makefile",
"bytes": "2908"
},
{
"name": "NASL",
"bytes": "11460"
},
{
"name": "Ruby",
"bytes": "3392"
},
{
"name": "Scala",
"bytes": "2221"
},
{
"name": "Shell",
"bytes": "147618"
}
],
"symlink_target": ""
} |
package com.amazonaws.services.pinpoint.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.pinpoint.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* GetApnsChannelRequestMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class GetApnsChannelRequestMarshaller {
private static final MarshallingInfo<String> APPLICATIONID_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PATH).marshallLocationName("application-id").build();
private static final GetApnsChannelRequestMarshaller instance = new GetApnsChannelRequestMarshaller();
public static GetApnsChannelRequestMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(GetApnsChannelRequest getApnsChannelRequest, ProtocolMarshaller protocolMarshaller) {
if (getApnsChannelRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(getApnsChannelRequest.getApplicationId(), APPLICATIONID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| {
"content_hash": "060b0e32e210fd6c60d2db74dda20571",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 120,
"avg_line_length": 32.86363636363637,
"alnum_prop": 0.7378976486860305,
"repo_name": "jentfoo/aws-sdk-java",
"id": "21b29facc4f687a78833d2377604a04dc7141f00",
"size": "2026",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aws-java-sdk-pinpoint/src/main/java/com/amazonaws/services/pinpoint/model/transform/GetApnsChannelRequestMarshaller.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "270"
},
{
"name": "FreeMarker",
"bytes": "173637"
},
{
"name": "Gherkin",
"bytes": "25063"
},
{
"name": "Java",
"bytes": "356214839"
},
{
"name": "Scilab",
"bytes": "3924"
},
{
"name": "Shell",
"bytes": "295"
}
],
"symlink_target": ""
} |
package com.taobao.android.dx.dex.code;
import com.taobao.android.dx.rop.code.RegisterSpec;
import com.taobao.android.dx.rop.code.RegisterSpecList;
import com.taobao.android.dx.rop.cst.Constant;
import com.taobao.android.dx.rop.cst.CstInteger;
import com.taobao.android.dx.rop.cst.CstKnownNull;
import com.taobao.android.dx.rop.cst.CstLiteral64;
import com.taobao.android.dx.rop.cst.CstLiteralBits;
import com.taobao.android.dx.rop.cst.CstString;
import com.taobao.android.dx.util.AnnotatedOutput;
import com.taobao.android.dx.util.Hex;
import java.util.BitSet;
/**
* Base class for all instruction format handlers. Instruction format
* handlers know how to translate {@link DalvInsn} instances into
* streams of code units, as well as human-oriented listing strings
* representing such translations.
*/
public abstract class InsnFormat {
/**
* flag to enable/disable the new extended opcode formats; meant as a
* temporary measure until VM support for the salient opcodes is
* added. TODO: Remove this declaration when the VM can deal.
*/
public static boolean ALLOW_EXTENDED_OPCODES = true;
/**
* Returns the string form, suitable for inclusion in a listing
* dump, of the given instruction. The instruction must be of this
* instance's format for proper operation.
*
* @param insn {@code non-null;} the instruction
* @param noteIndices whether to include an explicit notation of
* constant pool indices
* @return {@code non-null;} the string form
*/
public final String listingString(DalvInsn insn, boolean noteIndices) {
String op = insn.getOpcode().getName();
String arg = insnArgString(insn);
String comment = insnCommentString(insn, noteIndices);
StringBuilder sb = new StringBuilder(100);
sb.append(op);
if (arg.length() != 0) {
sb.append(' ');
sb.append(arg);
}
if (comment.length() != 0) {
sb.append(" // ");
sb.append(comment);
}
return sb.toString();
}
/**
* Returns the string form of the arguments to the given instruction.
* The instruction must be of this instance's format. If the instruction
* has no arguments, then the result should be {@code ""}, not
* {@code null}.
*
* <p>Subclasses must override this method.</p>
*
* @param insn {@code non-null;} the instruction
* @return {@code non-null;} the string form
*/
public abstract String insnArgString(DalvInsn insn);
/**
* Returns the associated comment for the given instruction, if any.
* The instruction must be of this instance's format. If the instruction
* has no comment, then the result should be {@code ""}, not
* {@code null}.
*
* <p>Subclasses must override this method.</p>
*
* @param insn {@code non-null;} the instruction
* @param noteIndices whether to include an explicit notation of
* constant pool indices
* @return {@code non-null;} the string form
*/
public abstract String insnCommentString(DalvInsn insn,
boolean noteIndices);
/**
* Gets the code size of instructions that use this format. The
* size is a number of 16-bit code units, not bytes. This should
* throw an exception if this format is of variable size.
*
* @return {@code >= 0;} the instruction length in 16-bit code units
*/
public abstract int codeSize();
/**
* Returns whether or not the given instruction's arguments will
* fit in this instance's format. This includes such things as
* counting register arguments, checking register ranges, and
* making sure that additional arguments are of appropriate types
* and are in-range. If this format has a branch target but the
* instruction's branch offset is unknown, this method will simply
* not check the offset.
*
* <p>Subclasses must override this method.</p>
*
* @param insn {@code non-null;} the instruction to check
* @return {@code true} iff the instruction's arguments are
* appropriate for this instance, or {@code false} if not
*/
public abstract boolean isCompatible(DalvInsn insn);
/**
* Returns which of a given instruction's registers will fit in
* this instance's format.
*
* <p>The default implementation of this method always returns
* an empty BitSet. Subclasses must override this method if they
* have registers.</p>
*
* @param insn {@code non-null;} the instruction to check
* @return {@code non-null;} a BitSet flagging registers in the
* register list that are compatible to this format
*/
public BitSet compatibleRegs(DalvInsn insn) {
return new BitSet();
}
/**
* Returns whether or not the given instruction's branch offset will
* fit in this instance's format. This always returns {@code false}
* for formats that don't include a branch offset.
*
* <p>The default implementation of this method always returns
* {@code false}. Subclasses must override this method if they
* include branch offsets.</p>
*
* @param insn {@code non-null;} the instruction to check
* @return {@code true} iff the instruction's branch offset is
* appropriate for this instance, or {@code false} if not
*/
public boolean branchFits(TargetInsn insn) {
return false;
}
/**
* Writes the code units for the given instruction to the given
* output destination. The instruction must be of this instance's format.
*
* <p>Subclasses must override this method.</p>
*
* @param out {@code non-null;} the output destination to write to
* @param insn {@code non-null;} the instruction to write
*/
public abstract void writeTo(AnnotatedOutput out, DalvInsn insn);
/**
* Helper method to return a register list string.
*
* @param list {@code non-null;} the list of registers
* @return {@code non-null;} the string form
*/
protected static String regListString(RegisterSpecList list) {
int sz = list.size();
StringBuffer sb = new StringBuffer(sz * 5 + 2);
sb.append('{');
for (int i = 0; i < sz; i++) {
if (i != 0) {
sb.append(", ");
}
sb.append(list.get(i).regString());
}
sb.append('}');
return sb.toString();
}
/**
* Helper method to return a register range string.
*
* @param list {@code non-null;} the list of registers (which must be
* sequential)
* @return {@code non-null;} the string form
*/
protected static String regRangeString(RegisterSpecList list) {
int size = list.size();
StringBuilder sb = new StringBuilder(30);
sb.append("{");
switch (size) {
case 0: {
// Nothing to do.
break;
}
case 1: {
sb.append(list.get(0).regString());
break;
}
default: {
RegisterSpec lastReg = list.get(size - 1);
if (lastReg.getCategory() == 2) {
/*
* Add one to properly represent a list-final
* category-2 register.
*/
lastReg = lastReg.withOffset(1);
}
sb.append(list.get(0).regString());
sb.append("..");
sb.append(lastReg.regString());
}
}
sb.append("}");
return sb.toString();
}
/**
* Helper method to return a literal bits argument string.
*
* @param value the value
* @return {@code non-null;} the string form
*/
protected static String literalBitsString(CstLiteralBits value) {
StringBuffer sb = new StringBuffer(100);
sb.append('#');
if (value instanceof CstKnownNull) {
sb.append("null");
} else {
sb.append(value.typeName());
sb.append(' ');
sb.append(value.toHuman());
}
return sb.toString();
}
/**
* Helper method to return a literal bits comment string.
*
* @param value the value
* @param width the width of the constant, in bits (used for displaying
* the uninterpreted bits; one of: {@code 4 8 16 32 64}
* @return {@code non-null;} the comment
*/
protected static String literalBitsComment(CstLiteralBits value,
int width) {
StringBuffer sb = new StringBuffer(20);
sb.append("#");
long bits;
if (value instanceof CstLiteral64) {
bits = ((CstLiteral64) value).getLongBits();
} else {
bits = value.getIntBits();
}
switch (width) {
case 4: sb.append(Hex.uNibble((int) bits)); break;
case 8: sb.append(Hex.u1((int) bits)); break;
case 16: sb.append(Hex.u2((int) bits)); break;
case 32: sb.append(Hex.u4((int) bits)); break;
case 64: sb.append(Hex.u8(bits)); break;
default: {
throw new RuntimeException("shouldn't happen");
}
}
return sb.toString();
}
/**
* Helper method to return a branch address string.
*
* @param insn {@code non-null;} the instruction in question
* @return {@code non-null;} the string form of the instruction's
* branch target
*/
protected static String branchString(DalvInsn insn) {
TargetInsn ti = (TargetInsn) insn;
int address = ti.getTargetAddress();
return (address == (char) address) ? Hex.u2(address) : Hex.u4(address);
}
/**
* Helper method to return the comment for a branch.
*
* @param insn {@code non-null;} the instruction in question
* @return {@code non-null;} the comment
*/
protected static String branchComment(DalvInsn insn) {
TargetInsn ti = (TargetInsn) insn;
int offset = ti.getTargetOffset();
return (offset == (short) offset) ? Hex.s2(offset) : Hex.s4(offset);
}
/**
* Helper method to return the constant string for a {@link CstInsn}
* in human form.
*
* @param insn {@code non-null;} a constant-bearing instruction
* @return {@code non-null;} the human string form of the contained
* constant
*/
protected static String cstString(DalvInsn insn) {
CstInsn ci = (CstInsn) insn;
Constant cst = ci.getConstant();
return cst instanceof CstString ? ((CstString) cst).toQuoted() : cst.toHuman();
}
/**
* Helper method to return an instruction comment for a constant.
*
* @param insn {@code non-null;} a constant-bearing instruction
* @return {@code non-null;} comment string representing the constant
*/
protected static String cstComment(DalvInsn insn) {
CstInsn ci = (CstInsn) insn;
if (! ci.hasIndex()) {
return "";
}
StringBuilder sb = new StringBuilder(20);
int index = ci.getIndex();
sb.append(ci.getConstant().typeName());
sb.append('@');
if (index < 65536) {
sb.append(Hex.u2(index));
} else {
sb.append(Hex.u4(index));
}
return sb.toString();
}
/**
* Helper method to determine if a signed int value fits in a nibble.
*
* @param value the value in question
* @return {@code true} iff it's in the range -8..+7
*/
protected static boolean signedFitsInNibble(int value) {
return (value >= -8) && (value <= 7);
}
/**
* Helper method to determine if an unsigned int value fits in a nibble.
*
* @param value the value in question
* @return {@code true} iff it's in the range 0..0xf
*/
protected static boolean unsignedFitsInNibble(int value) {
return value == (value & 0xf);
}
/**
* Helper method to determine if a signed int value fits in a byte.
*
* @param value the value in question
* @return {@code true} iff it's in the range -0x80..+0x7f
*/
protected static boolean signedFitsInByte(int value) {
return (byte) value == value;
}
/**
* Helper method to determine if an unsigned int value fits in a byte.
*
* @param value the value in question
* @return {@code true} iff it's in the range 0..0xff
*/
protected static boolean unsignedFitsInByte(int value) {
return value == (value & 0xff);
}
/**
* Helper method to determine if a signed int value fits in a short.
*
* @param value the value in question
* @return {@code true} iff it's in the range -0x8000..+0x7fff
*/
protected static boolean signedFitsInShort(int value) {
return (short) value == value;
}
/**
* Helper method to determine if an unsigned int value fits in a short.
*
* @param value the value in question
* @return {@code true} iff it's in the range 0..0xffff
*/
protected static boolean unsignedFitsInShort(int value) {
return value == (value & 0xffff);
}
/**
* Helper method to determine if a list of registers are sequential,
* including degenerate cases for empty or single-element lists.
*
* @param list {@code non-null;} the list of registers
* @return {@code true} iff the list is sequentially ordered
*/
protected static boolean isRegListSequential(RegisterSpecList list) {
int sz = list.size();
if (sz < 2) {
return true;
}
int first = list.get(0).getReg();
int next = first;
for (int i = 0; i < sz; i++) {
RegisterSpec one = list.get(i);
if (one.getReg() != next) {
return false;
}
next += one.getCategory();
}
return true;
}
/**
* Helper method to extract the callout-argument index from an
* appropriate instruction.
*
* @param insn {@code non-null;} the instruction
* @return {@code >= 0;} the callout argument index
*/
protected static int argIndex(DalvInsn insn) {
int arg = ((CstInteger) ((CstInsn) insn).getConstant()).getValue();
if (arg < 0) {
throw new IllegalArgumentException("bogus insn");
}
return arg;
}
/**
* Helper method to combine an opcode and a second byte of data into
* the appropriate form for emitting into a code buffer.
*
* @param insn {@code non-null;} the instruction containing the opcode
* @param arg {@code 0..255;} arbitrary other byte value
* @return combined value
*/
protected static short opcodeUnit(DalvInsn insn, int arg) {
if ((arg & 0xff) != arg) {
throw new IllegalArgumentException("arg out of range 0..255");
}
int opcode = insn.getOpcode().getOpcode();
if ((opcode & 0xff) != opcode) {
throw new IllegalArgumentException("opcode out of range 0..255");
}
return (short) (opcode | (arg << 8));
}
/**
* Helper method to get an extended (16-bit) opcode out of an
* instruction, returning it as a code unit. The opcode
* <i>must</i> be an extended opcode.
*
* @param insn {@code non-null;} the instruction containing the
* extended opcode
* @return the opcode as a code unit
*/
protected static short opcodeUnit(DalvInsn insn) {
int opcode = insn.getOpcode().getOpcode();
if ((opcode < 0x100) || (opcode > 0xffff)) {
throw new IllegalArgumentException("opcode out of range 0..65535");
}
return (short) opcode;
}
/**
* Helper method to combine two bytes into a code unit.
*
* @param low {@code 0..255;} low byte
* @param high {@code 0..255;} high byte
* @return combined value
*/
protected static short codeUnit(int low, int high) {
if ((low & 0xff) != low) {
throw new IllegalArgumentException("low out of range 0..255");
}
if ((high & 0xff) != high) {
throw new IllegalArgumentException("high out of range 0..255");
}
return (short) (low | (high << 8));
}
/**
* Helper method to combine four nibbles into a code unit.
*
* @param n0 {@code 0..15;} low nibble
* @param n1 {@code 0..15;} medium-low nibble
* @param n2 {@code 0..15;} medium-high nibble
* @param n3 {@code 0..15;} high nibble
* @return combined value
*/
protected static short codeUnit(int n0, int n1, int n2, int n3) {
if ((n0 & 0xf) != n0) {
throw new IllegalArgumentException("n0 out of range 0..15");
}
if ((n1 & 0xf) != n1) {
throw new IllegalArgumentException("n1 out of range 0..15");
}
if ((n2 & 0xf) != n2) {
throw new IllegalArgumentException("n2 out of range 0..15");
}
if ((n3 & 0xf) != n3) {
throw new IllegalArgumentException("n3 out of range 0..15");
}
return (short) (n0 | (n1 << 4) | (n2 << 8) | (n3 << 12));
}
/**
* Helper method to combine two nibbles into a byte.
*
* @param low {@code 0..15;} low nibble
* @param high {@code 0..15;} high nibble
* @return {@code 0..255;} combined value
*/
protected static int makeByte(int low, int high) {
if ((low & 0xf) != low) {
throw new IllegalArgumentException("low out of range 0..15");
}
if ((high & 0xf) != high) {
throw new IllegalArgumentException("high out of range 0..15");
}
return low | (high << 4);
}
/**
* Writes one code unit to the given output destination.
*
* @param out {@code non-null;} where to write to
* @param c0 code unit to write
*/
protected static void write(AnnotatedOutput out, short c0) {
out.writeShort(c0);
}
/**
* Writes two code units to the given output destination.
*
* @param out {@code non-null;} where to write to
* @param c0 code unit to write
* @param c1 code unit to write
*/
protected static void write(AnnotatedOutput out, short c0, short c1) {
out.writeShort(c0);
out.writeShort(c1);
}
/**
* Writes three code units to the given output destination.
*
* @param out {@code non-null;} where to write to
* @param c0 code unit to write
* @param c1 code unit to write
* @param c2 code unit to write
*/
protected static void write(AnnotatedOutput out, short c0, short c1,
short c2) {
out.writeShort(c0);
out.writeShort(c1);
out.writeShort(c2);
}
/**
* Writes four code units to the given output destination.
*
* @param out {@code non-null;} where to write to
* @param c0 code unit to write
* @param c1 code unit to write
* @param c2 code unit to write
* @param c3 code unit to write
*/
protected static void write(AnnotatedOutput out, short c0, short c1,
short c2, short c3) {
out.writeShort(c0);
out.writeShort(c1);
out.writeShort(c2);
out.writeShort(c3);
}
/**
* Writes five code units to the given output destination.
*
* @param out {@code non-null;} where to write to
* @param c0 code unit to write
* @param c1 code unit to write
* @param c2 code unit to write
* @param c3 code unit to write
* @param c4 code unit to write
*/
protected static void write(AnnotatedOutput out, short c0, short c1,
short c2, short c3, short c4) {
out.writeShort(c0);
out.writeShort(c1);
out.writeShort(c2);
out.writeShort(c3);
out.writeShort(c4);
}
/**
* Writes three code units to the given output destination, where the
* second and third are represented as single <code>int</code> and emitted
* in little-endian order.
*
* @param out {@code non-null;} where to write to
* @param c0 code unit to write
* @param c1c2 code unit pair to write
*/
protected static void write(AnnotatedOutput out, short c0, int c1c2) {
write(out, c0, (short) c1c2, (short) (c1c2 >> 16));
}
/**
* Writes four code units to the given output destination, where the
* second and third are represented as single <code>int</code> and emitted
* in little-endian order.
*
* @param out {@code non-null;} where to write to
* @param c0 code unit to write
* @param c1c2 code unit pair to write
* @param c3 code unit to write
*/
protected static void write(AnnotatedOutput out, short c0, int c1c2,
short c3) {
write(out, c0, (short) c1c2, (short) (c1c2 >> 16), c3);
}
/**
* Writes five code units to the given output destination, where the
* second and third are represented as single <code>int</code> and emitted
* in little-endian order.
*
* @param out {@code non-null;} where to write to
* @param c0 code unit to write
* @param c1c2 code unit pair to write
* @param c3 code unit to write
* @param c4 code unit to write
*/
protected static void write(AnnotatedOutput out, short c0, int c1c2,
short c3, short c4) {
write(out, c0, (short) c1c2, (short) (c1c2 >> 16), c3, c4);
}
/**
* Writes five code units to the given output destination, where the
* second through fifth are represented as single <code>long</code>
* and emitted in little-endian order.
*
* @param out {@code non-null;} where to write to
* @param c0 code unit to write
* @param c1c2c3c4 code unit quad to write
*/
protected static void write(AnnotatedOutput out, short c0, long c1c2c3c4) {
write(out, c0, (short) c1c2c3c4, (short) (c1c2c3c4 >> 16),
(short) (c1c2c3c4 >> 32), (short) (c1c2c3c4 >> 48));
}
}
| {
"content_hash": "48ee1e25ebac2536f8f5290da04b91d0",
"timestamp": "",
"source": "github",
"line_count": 699,
"max_line_length": 87,
"avg_line_length": 32.035765379113016,
"alnum_prop": 0.5884874737641227,
"repo_name": "alibaba/atlas",
"id": "78cfeb083fdcf4353af14a179a6569ee7c9ff8ac",
"size": "23012",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "atlas-gradle-plugin/dexpatch/src/main/java/com/taobao/android/dx/dex/code/InsnFormat.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "64906"
},
{
"name": "C",
"bytes": "1975639"
},
{
"name": "C++",
"bytes": "5114248"
},
{
"name": "CMake",
"bytes": "3090"
},
{
"name": "CSS",
"bytes": "87762"
},
{
"name": "HTML",
"bytes": "628309"
},
{
"name": "Java",
"bytes": "11894753"
},
{
"name": "JavaScript",
"bytes": "37802"
},
{
"name": "Kotlin",
"bytes": "2217"
},
{
"name": "Makefile",
"bytes": "47196"
},
{
"name": "Python",
"bytes": "1262"
},
{
"name": "Shell",
"bytes": "11347"
}
],
"symlink_target": ""
} |
#!@PHP_BIN@
<?php
require_once realpath(__DIR__ . "/../config/centreon.config.php");
include_once _CENTREON_PATH_ . "/cron/centAcl-Func.php";
include_once _CENTREON_PATH_ . "/www/class/centreonDB.class.php";
include_once _CENTREON_PATH_ . "/www/class/centreonLDAP.class.php";
include_once _CENTREON_PATH_ . "/www/class/centreonMeta.class.php";
include_once _CENTREON_PATH_ . "/www/class/centreonContactgroup.class.php";
include_once _CENTREON_PATH_ . "/www/class/centreonLog.class.php";
$centreonDbName = $conf_centreon['db'];
$centreonLog = new CentreonLog();
/*
* Define the period between two update in second for LDAP user/contactgroup
*/
define('LDAP_UPDATE_PERIOD', 3600);
/**
* CentAcl script
*/
$nbProc = exec('ps -o args -p $(pidof -o $$ -o $PPID -o %PPID -x php || echo 1000000) | grep -c ' . __FILE__);
if ((int) $nbProc > 0) {
programExit("More than one centAcl.php process is currently running. Going to exit...");
}
ini_set('max_execution_time', 0);
try {
/*
* Init values
*/
$debug = 0;
/*
* Init DB connections
*/
$pearDB = new CentreonDB();
$pearDBO = new CentreonDB("centstorage");
$metaObj = new CentreonMeta($pearDB);
$cgObj = new CentreonContactgroup($pearDB);
// checking the state of the Cron
$data = getCentAclRunningState();
$beginTime = time();
if (empty($data)) {
try {
// at first run (eg: after the install), data may be missing.
$pearDB->query(
"INSERT INTO `cron_operation` (`name`, `system`, `activate`) VALUES ('centAcl.php', '1', '1')"
);
} catch (\PDOException $e) {
programExit("Error can't insert centAcl values in the `cron_operation` table.");
}
$data = getCentAclRunningState();
$appId = (int)$data["id"] ?? 0;
$is_running = 0;
} else {
$is_running = $data["running"];
$appId = (int)$data["id"];
}
/*
* Lock in MySQL (ie: by setting the `running` value to 1)
*/
if ($is_running == 0) {
putALock($appId);
} else {
if ($nbProc <= 1) {
$errorMessage = "According to DB another instance of centAcl.php is already running and I found " .
$nbProc . " process...\n";
$errorMessage .= "Correcting the state in the DB, by setting the `running` value to 0 for id = " . $appId;
removeLock($appId);
} else {
$errorMessage = "centAcl marked as running. Exiting...";
}
programExit($errorMessage);
}
/**
* Sync ACL with LDAP's contactgroup
* If the LDAP is enabled and the last check is greater than the update period
*
* @TODO : Synchronize LDAP with contacts data in background to avoid it at login
*/
$ldapEnable = '0';
$ldapLastUpdate = 0;
$queryOptions = "SELECT `key`, `value` FROM `options` WHERE `key` IN ('ldap_auth_enable', 'ldap_last_acl_update')";
$res = $pearDB->query($queryOptions);
while ($row = $res->fetch()) {
switch ($row['key']) {
case 'ldap_auth_enable':
$ldapEnable = $row['value'];
break;
case 'ldap_last_acl_update':
$ldapLastUpdate = $row['value'];
break;
}
}
if ($ldapEnable === '1' && $ldapLastUpdate < (time() - LDAP_UPDATE_PERIOD)) {
$cgObj->syncWithLdap();
}
/**
* Check expected contact data sync on login with the LDAP, depending on last sync time and own sync interval
*/
$pearDB->beginTransaction();
try {
$ldapConf = $pearDB->query(
"SELECT auth.ar_id, auth.ar_sync_base_date, info.ari_value AS `interval`
FROM auth_ressource auth
INNER JOIN auth_ressource_info info ON auth.ar_id = info.ar_id
WHERE auth.ar_enable = '1' AND info.ari_name = 'ldap_sync_interval'"
);
$updateSyncTime = $pearDB->prepare(
'UPDATE auth_ressource SET ar_sync_base_date = :currentTime
WHERE ar_id = :arId'
);
$currentTime = time();
while ($ldapRow = $ldapConf->fetch()) {
if ($currentTime > ($ldapRow['ar_sync_base_date'] + 3600 * $ldapRow['interval'])) {
$updateSyncTime->bindValue(':currentTime', $currentTime, \PDO::PARAM_INT);
$updateSyncTime->bindValue(':arId', (int)$ldapRow['ar_id'], \PDO::PARAM_INT);
$updateSyncTime->execute();
}
}
$pearDB->commit();
} catch (\PDOException $e) {
$pearDB->rollBack();
programExit("Error when updating LDAP's reference date for next synchronization");
}
/**
* Remove data from old groups (deleted groups)
*/
$aclGroupToDelete = "SELECT DISTINCT acl_group_id
FROM `" . $centreonDbName . "`.acl_groups WHERE acl_group_activate = '1'";
$aclGroupToDelete2 = "SELECT DISTINCT acl_group_id FROM `" . $centreonDbName . "`.acl_res_group_relations";
$pearDBO->beginTransaction();
try {
$pearDBO->query("DELETE FROM centreon_acl WHERE group_id NOT IN (" . $aclGroupToDelete . ")");
$pearDBO->query("DELETE FROM centreon_acl WHERE group_id NOT IN (" . $aclGroupToDelete2 . ")");
$pearDBO->commit();
} catch (\PDOException $e) {
$pearDBO->rollBack();
$centreonLog->insertLog(
2,
"CentACL CRON: failed to delete old groups relations"
);
}
/**
* Check if some ACL have global options selected for
* all the resources
*/
$res = $pearDB->query(
"SELECT acl_res_id, all_hosts, all_hostgroups, all_servicegroups
FROM acl_resources WHERE acl_res_activate = '1'
AND (all_hosts IS NOT NULL OR all_hostgroups IS NOT NULL OR all_servicegroups IS NOT NULL)"
);
while ($row = $res->fetch()) {
// manage acl_resources.changed flag
$aclResourcesUpdated = false;
/**
* Add Hosts
*/
if ($row['all_hosts']) {
$pearDB->beginTransaction();
try {
$res1 = $pearDB->prepare(
"SELECT host_id FROM host WHERE host_id NOT IN (SELECT DISTINCT host_host_id
FROM acl_resources_host_relations WHERE acl_res_id = :aclResId)
AND host_register = '1'"
);
$res1->bindValue(':aclResId', $row['acl_res_id'], \PDO::PARAM_INT);
$res1->execute();
if ($res1->rowCount()) {
// set acl_resources.changed flag to 1
$aclResourcesUpdated = true;
}
while ($rowData = $res1->fetch()) {
$stmt = $pearDB->prepare(
"INSERT INTO acl_resources_host_relations (host_host_id, acl_res_id)
VALUES (:hostId, :aclResId)"
);
$stmt->bindValue(':hostId', $rowData['host_id'], \PDO::PARAM_INT);
$stmt->bindValue(':aclResId', $row['acl_res_id'], \PDO::PARAM_INT);
$stmt->execute();
}
$pearDB->commit();
$res1->closeCursor();
} catch (\PDOException $e) {
$pearDB->rollBack();
$centreonLog->insertLog(
2,
"CentACL CRON: failed to add new host"
);
}
}
/**
* Add Hostgroups
*/
if ($row['all_hostgroups']) {
$pearDB->beginTransaction();
try {
$res1 = $pearDB->prepare(
"SELECT hg_id FROM hostgroup
WHERE hg_id NOT IN (
SELECT DISTINCT hg_hg_id FROM acl_resources_hg_relations
WHERE acl_res_id = :aclResId)"
);
$res1->bindValue(':aclResId', $row['acl_res_id'], \PDO::PARAM_INT);
$res1->execute();
if ($res1->rowCount()) {
// set acl_resources.changed flag to 1
$aclResourcesUpdated = true;
}
while ($rowData = $res1->fetch()) {
$stmt = $pearDB->prepare(
"INSERT INTO acl_resources_hg_relations (hg_hg_id, acl_res_id)
VALUES (:hgId, :aclResId)"
);
$stmt->bindValue(':hgId', $rowData['hg_id'], \PDO::PARAM_INT);
$stmt->bindValue(':aclResId', $row['acl_res_id'], \PDO::PARAM_INT);
$stmt->execute();
}
$pearDB->commit();
$res1->closeCursor();
} catch (\PDOException $e) {
$pearDB->rollBack();
$centreonLog->insertLog(
2,
"CentACL CRON: failed to add new hostgroups"
);
}
}
/**
* Add Servicesgroups
*/
$pearDB->beginTransaction();
try {
if ($row['all_servicegroups']) {
$res1 = $pearDB->prepare(
"SELECT sg_id FROM servicegroup
WHERE sg_id NOT IN (
SELECT DISTINCT sg_id FROM acl_resources_sg_relations
WHERE acl_res_id = :aclResId)"
);
$res1->bindValue(':aclResId', $row['acl_res_id'], \PDO::PARAM_INT);
$res1->execute();
if ($res1->rowCount()) {
// set acl_resources.changed flag to 1
$aclResourcesUpdated = true;
}
while ($rowData = $res1->fetch()) {
$stmt = $pearDB->prepare(
"INSERT INTO acl_resources_sg_relations (sg_id, acl_res_id)
VALUES (:sgId, :aclResId)"
);
$stmt->bindValue(':sgId', $rowData['sg_id'], \PDO::PARAM_INT);
$stmt->bindValue(':aclResId', $row['acl_res_id'], \PDO::PARAM_INT);
$stmt->execute();
}
$res1->closeCursor();
}
// as resources has changed we need to save it in the DB
if ($aclResourcesUpdated) {
$stmt = $pearDB->prepare(
"UPDATE acl_resources SET changed = '1' WHERE acl_res_id = :aclResId"
);
$stmt->bindValue(':aclResId', $row['acl_res_id'], \PDO::PARAM_INT);
$stmt->execute();
}
$pearDB->commit();
} catch (\PDOException $e) {
$pearDB->rollBack();
$centreonLog->insertLog(
2,
"CentACL CRON: failed to add new servicegroup"
);
}
}
/**
* Check that the ACL resources have changed
* if no : go away.
* if yes : let's go to build cache and update database
*/
$tabGroups = array();
$dbResult1 = $pearDB->query(
"SELECT DISTINCT acl_groups.acl_group_id
FROM acl_res_group_relations, `acl_groups`, `acl_resources`
WHERE acl_groups.acl_group_id = acl_res_group_relations.acl_group_id
AND acl_res_group_relations.acl_res_id = acl_resources.acl_res_id
AND acl_groups.acl_group_activate = '1'
AND (
acl_groups.acl_group_changed = '1' OR
(acl_resources.changed = '1' AND acl_resources.acl_res_activate IS NOT NULL)
)"
);
while ($result = $dbResult1->fetch()) {
$tabGroups[] = $result['acl_group_id'];
}
unset($result);
if (count($tabGroups)) {
/**
* Cache for hosts and host Templates
*/
$hostTemplateCache = [];
$res = $pearDB->query(
"SELECT host_host_id, host_tpl_id FROM host_template_relation"
);
while ($row = $res->fetch()) {
if (!isset($hostTemplateCache[$row['host_tpl_id']])) {
$hostTemplateCache[$row['host_tpl_id']] = array();
}
$hostTemplateCache[$row['host_tpl_id']][$row['host_host_id']] = $row['host_host_id'];
}
$hostCache = [];
$dbResult = $pearDB->query(
"SELECT host_id, host_name FROM host WHERE host_register IN ('1', '2')"
);
while ($h = $dbResult->fetch()) {
$hostCache[$h["host_id"]] = $h["host_name"];
}
unset($h);
/**
* Cache for host poller relation
*/
$hostPollerCache = array();
$res = $pearDB->query(
"SELECT nagios_server_id, host_host_id FROM ns_host_relation"
);
while ($row = $res->fetch()) {
if (!isset($hostPollerCache[$row['nagios_server_id']])) {
$hostPollerCache[$row['nagios_server_id']] = array();
}
$hostPollerCache[$row['nagios_server_id']][$row['host_host_id']] = $row['host_host_id'];
}
/**
* Get all included Hosts
*/
$hostIncCache = [];
$dbResult = $pearDB->query(
"SELECT host_host_id, acl_res_id
FROM acl_resources_host_relations"
);
while ($h = $dbResult->fetch()) {
if (!isset($hostIncCache[$h["acl_res_id"]])) {
$hostIncCache[$h["acl_res_id"]] = [];
}
$hostIncCache[$h["acl_res_id"]][$h["host_host_id"]] = 1;
}
/**
* Get all excluded Hosts
*/
$hostExclCache = [];
$dbResult = $pearDB->query(
"SELECT host_host_id, acl_res_id
FROM acl_resources_hostex_relations"
);
while ($h = $dbResult->fetch()) {
if (!isset($hostExclCache[$h["acl_res_id"]])) {
$hostExclCache[$h["acl_res_id"]] = [];
}
$hostExclCache[$h["acl_res_id"]][$h["host_host_id"]] = 1;
}
/**
* Service Cache
*/
$svcCache = [];
$dbResult = $pearDB->query(
"SELECT service_id FROM `service`
WHERE service_register = '1'"
);
while ($s = $dbResult->fetch()) {
$svcCache[$s["service_id"]] = 1;
}
/**
* Host Host relation
*/
$hostHGRelation = [];
$dbResult = $pearDB->query("SELECT * FROM hostgroup_relation");
while ($hg = $dbResult->fetch()) {
if (!isset($hostHGRelation[$hg["hostgroup_hg_id"]])) {
$hostHGRelation[$hg["hostgroup_hg_id"]] = [];
}
$hostHGRelation[$hg["hostgroup_hg_id"]][$hg["host_host_id"]] = $hg["host_host_id"];
}
unset($hg);
/**
* Host Service relation
*/
$hsRelation = [];
$dbResult = $pearDB->query(
"SELECT hostgroup_hg_id, host_host_id, service_service_id
FROM host_service_relation"
);
while ($sr = $dbResult->fetch()) {
if (isset($sr["host_host_id"]) && $sr["host_host_id"]) {
if (!isset($hsRelation[$sr["host_host_id"]])) {
$hsRelation[$sr["host_host_id"]] = array();
}
$hsRelation[$sr["host_host_id"]][$sr["service_service_id"]] = 1;
} else {
if (isset($hostHGRelation[$sr["hostgroup_hg_id"]])) {
foreach ($hostHGRelation[$sr["hostgroup_hg_id"]] as $hostId) {
if (!isset($hsRelation[$hostId])) {
$hsRelation[$hostId] = array();
}
$hsRelation[$hostId][$sr["service_service_id"]] = 1;
}
}
}
}
$dbResult->closeCursor();
/**
* Create Service template model Cache
*/
$svcTplCache = [];
$dbResult = $pearDB->query("SELECT service_template_model_stm_id, service_id FROM service");
while ($tpl = $dbResult->fetch()) {
$svcTplCache[$tpl["service_id"]] = $tpl["service_template_model_stm_id"];
}
$dbResult->closeCursor();
unset($tpl);
$svcCatCache = [];
$dbResult = $pearDB->query("SELECT sc_id, service_service_id FROM `service_categories_relation`");
while ($res = $dbResult->fetch()) {
if (!isset($svcCatCache[$res["service_service_id"]])) {
$svcCatCache[$res["service_service_id"]] = [];
}
$svcCatCache[$res["service_service_id"]][$res["sc_id"]] = 1;
}
$dbResult->closeCursor();
unset($res);
$sgCache = [];
$res = $pearDB->query(
"SELECT argr.`acl_res_id`, acl_group_id
FROM `acl_res_group_relations` argr, `acl_resources` ar
WHERE argr.acl_res_id = ar.acl_res_id
AND ar.acl_res_activate = '1'"
);
while ($row = $res->fetch()) {
$sgCache[$row['acl_res_id']] = [];
}
unset($row);
$res = $pearDB->query(
"SELECT service_service_id, sgr.host_host_id, acl_res_id
FROM servicegroup sg, acl_resources_sg_relations acl, servicegroup_relation sgr
WHERE acl.sg_id = sg.sg_id
AND sgr.servicegroup_sg_id = sg.sg_id "
);
while ($row = $res->fetch()) {
foreach (array_keys($sgCache) as $rId) {
if ($rId == $row['acl_res_id']) {
if (!isset($sgCache[$rId][$row['host_host_id']])) {
$sgCache[$rId][$row['host_host_id']] = [];
}
$sgCache[$rId][$row['host_host_id']][$row['service_service_id']] = 1;
}
}
}
unset($row);
$res = $pearDB->query(
"SELECT acl_res_id, hg_id
FROM hostgroup, acl_resources_hg_relations
WHERE acl_resources_hg_relations.hg_hg_id = hostgroup.hg_id"
);
$hgResCache = array();
while ($row = $res->fetch()) {
if (!isset($hgResCache[$row['acl_res_id']])) {
$hgResCache[$row['acl_res_id']] = array();
}
$hgResCache[$row['acl_res_id']][] = $row['hg_id'];
}
unset($row);
// Prepare statement
$deleteHandler = $pearDBO->prepare("DELETE FROM centreon_acl WHERE group_id = ?");
/**
* Begin to build ACL
*/
$cpt = 0;
$resourceCache = array();
foreach ($tabGroups as $aclGroupId) {
/*
* Delete old data for this group
*/
$deleteHandler->execute(array($aclGroupId));
/**
* Select
*/
$dbResult2 = $pearDB->prepare(
"SELECT DISTINCT(`acl_resources`.`acl_res_id`)
FROM `acl_res_group_relations`, `acl_resources`
WHERE `acl_res_group_relations`.`acl_group_id` = :aclGroupId
AND `acl_res_group_relations`.acl_res_id = `acl_resources`.acl_res_id
AND `acl_resources`.acl_res_activate = '1'"
);
$dbResult2->bindValue(':aclGroupId', $aclGroupId, \PDO::PARAM_INT);
$dbResult2->execute();
if ($debug) {
$time_start = microtime_float2();
}
while ($res2 = $dbResult2->fetch()) {
if (!isset($resourceCache[$res2["acl_res_id"]])) {
$resourceCache[$res2["acl_res_id"]] = array();
$host = [];
/*
* Get all Hosts
*/
if (isset($hostIncCache[$res2["acl_res_id"]])) {
foreach (array_keys($hostIncCache[$res2["acl_res_id"]]) as $hostId) {
$host[$hostId] = 1;
}
}
if (isset($hgResCache[$res2['acl_res_id']])) {
foreach ($hgResCache[$res2['acl_res_id']] as $hgId) {
if (isset($hostHGRelation[$hgId])) {
foreach ($hostHGRelation[$hgId] as $hostId) {
if ($hostCache[$hostId]) {
$host[$hostId] = 1;
} else {
print "Host $hostId unknown !\n";
}
}
}
}
}
if (isset($hostExclCache[$res2["acl_res_id"]])) {
foreach (array_keys($hostExclCache[$res2["acl_res_id"]]) as $hostId) {
unset($host[$hostId]);
}
}
/*
* Give Authorized Categories
*/
$authorizedCategories = getAuthorizedCategories($res2["acl_res_id"]);
/*
* get all Service groups
*/
$dbResult3 = $pearDB->prepare(
"SELECT servicegroup_relation.host_host_id, servicegroup_relation.service_service_id
FROM `acl_resources_sg_relations`, `servicegroup_relation`
WHERE acl_res_id = :aclResId
AND servicegroup_relation.servicegroup_sg_id = acl_resources_sg_relations.sg_id
UNION
SELECT servicegroup_relation.host_host_id, servicegroup_relation.service_service_id
FROM `acl_resources_sg_relations`, `servicegroup_relation`, `hostgroup`, `hostgroup_relation`
WHERE acl_res_id = :aclResId
AND hostgroup.hg_id = servicegroup_relation.hostgroup_hg_id
AND servicegroup_relation.hostgroup_hg_id = hostgroup_relation.hostgroup_hg_id
AND servicegroup_relation.servicegroup_sg_id = acl_resources_sg_relations.sg_id"
);
$dbResult3->bindValue(':aclResId', $res2["acl_res_id"], \PDO::PARAM_INT);
$dbResult3->execute();
$sgElem = [];
$tmpH = [];
while ($h = $dbResult3->fetch()) {
if (!isset($sgElem[$h["host_host_id"]])) {
$sgElem[$h["host_host_id"]] = [];
$tmpH[$h['host_host_id']] = 1;
}
$sgElem[$h["host_host_id"]][$h["service_service_id"]] = 1;
}
$tmpH = getFilteredHostCategories($tmpH, $res2["acl_res_id"]);
$tmpH = getFilteredPollers($tmpH, $res2["acl_res_id"]);
foreach ($sgElem as $hostId => $value) {
if (isset($tmpH[$hostId])) {
if (count($authorizedCategories) == 0) { // no category filter
$resourceCache[$res2["acl_res_id"]][$hostId] = $value;
} else {
foreach (array_keys($value) as $serviceId) {
$linkedServiceCategories = getServiceTemplateCategoryList($serviceId);
foreach ($linkedServiceCategories as $linkedServiceCategory) {
// Check if category linked to service is allowed
if (in_array($linkedServiceCategory, $authorizedCategories)) {
$resourceCache[$res2["acl_res_id"]][$hostId][$serviceId] = 1;
break;
}
}
}
}
}
}
unset($tmpH);
unset($sgElem);
// Filter
$host = getFilteredHostCategories($host, $res2["acl_res_id"]);
$host = getFilteredPollers($host, $res2['acl_res_id']);
/*
* Initialize and first filter
*/
foreach (array_keys($host) as $hostId) {
$tab = getAuthorizedServicesHost($hostId, $res2["acl_res_id"], $authorizedCategories);
if (!isset($resourceCache[$res2["acl_res_id"]][$hostId])) {
$resourceCache[$res2["acl_res_id"]][$hostId] = array();
}
foreach (array_keys($tab) as $serviceId) {
$resourceCache[$res2["acl_res_id"]][$hostId][$serviceId] = 1;
}
unset($tab);
}
unset($host);
/*
* Set meta services
*/
$metaServices = getMetaServices($res2['acl_res_id'], $pearDB, $metaObj);
if (count($metaServices)) {
$resourceCache[$res2["acl_res_id"]] += $metaServices;
}
}
$strBegin = "INSERT INTO centreon_acl (host_id, service_id, group_id) VALUES ";
$strEnd = " ON DUPLICATE KEY UPDATE `group_id` = ? ";
$str = "";
$params = [];
$i = 0;
foreach ($resourceCache[$res2["acl_res_id"]] as $hostId => $svcList) {
if (isset($hostCache[$hostId])) {
if ($str != "") {
$str .= ", ";
}
$str .= " (?, NULL, ?) ";
$params[] = $hostId;
$params[] = $aclGroupId;
foreach (array_keys($svcList) as $serviceId) {
if ($str != "") {
$str .= ', ';
}
$i++;
$str .= " (?, ?, ?) ";
$params[] = $hostId;
$params[] = $serviceId;
$params[] = $aclGroupId;
if ($i >= 5000) {
$params[] = $aclGroupId; // argument for $strEnd
$stmt = $pearDBO->prepare($strBegin . $str . $strEnd);
$stmt->execute($params); // inject acl by bulk (1000 relations)
$str = "";
$params = [];
$i = 0;
}
}
}
}
// inject remaining acl (bulk of less than 1000 relations)
if ($str != "") {
$params[] = $aclGroupId; // argument for $strEnd
$stmt = $pearDBO->prepare($strBegin . $str . $strEnd);
$stmt->execute($params);
$str = "";
}
// reset flags of acl_resources
$stmt = $pearDB->prepare("UPDATE `acl_resources` SET `changed` = '0' WHERE acl_res_id = :aclResId");
$stmt->bindValue(':aclResId', $res2["acl_res_id"], \PDO::PARAM_INT);
$stmt->execute();
}
if ($debug) {
$time_end = microtime_float2();
$now = $time_end - $time_start;
print round($now, 3) . " " . _("seconds") . "\n";
}
$cpt++;
// reset flags of acl_groups
$stmt = $pearDB->prepare("UPDATE acl_groups SET acl_group_changed = '0' WHERE acl_group_id = :aclGroupId");
$stmt->bindValue(':aclGroupId', $aclGroupId, \PDO::PARAM_INT);
$stmt->execute();
}
/**
* Include module specific ACL evaluation
*/
$extensionsPaths = getModulesExtensionsPaths($pearDB);
foreach ($extensionsPaths as $extensionPath) {
require_once $extensionPath . 'centAcl.php';
}
}
/**
* Remove lock
*/
$dbResult = $pearDB->prepare(
"UPDATE cron_operation
SET running = '0', last_execution_time = :time
WHERE id = :appId"
);
$dbResult->bindValue(':time', (time() - $beginTime), \PDO::PARAM_INT);
$dbResult->bindValue(':appId', $appId, \PDO::PARAM_INT);
$dbResult->execute();
/*
* Close connection to databases
*/
$pearDB = null;
$pearDBO = null;
} catch (Exception $e) {
programExit($e->getMessage());
}
| {
"content_hash": "37a790088b52a0b58fc1374a560bb467",
"timestamp": "",
"source": "github",
"line_count": 754,
"max_line_length": 119,
"avg_line_length": 38.59946949602122,
"alnum_prop": 0.45540131940626716,
"repo_name": "centreon/centreon",
"id": "320231a2d4321a02db41d78f10ab7860a4b3b958",
"size": "30849",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "centreon/cron/centAcl.php",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "210043"
},
{
"name": "Gherkin",
"bytes": "174313"
},
{
"name": "HTML",
"bytes": "1276734"
},
{
"name": "JavaScript",
"bytes": "865312"
},
{
"name": "Makefile",
"bytes": "25883"
},
{
"name": "NewLisp",
"bytes": "621"
},
{
"name": "PHP",
"bytes": "15602217"
},
{
"name": "Perl",
"bytes": "1866808"
},
{
"name": "Python",
"bytes": "32748"
},
{
"name": "Raku",
"bytes": "122"
},
{
"name": "Shell",
"bytes": "473416"
},
{
"name": "Smarty",
"bytes": "42689"
},
{
"name": "TypeScript",
"bytes": "1698281"
},
{
"name": "XSLT",
"bytes": "124586"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using Baseline;
using Marten.Schema;
using Remotion.Linq.Clauses.Expressions;
using Remotion.Linq.Parsing;
namespace Marten.Linq
{
public partial class MartenExpressionParser
{
public class WhereClauseVisitor : RelinqExpressionVisitor
{
private readonly IQueryableDocument _mapping;
private readonly MartenExpressionParser _parent;
private readonly Stack<Action<IWhereFragment>> _register = new Stack<Action<IWhereFragment>>();
private IWhereFragment _top;
public WhereClauseVisitor(MartenExpressionParser parent, IQueryableDocument mapping)
{
_parent = parent;
_mapping = mapping;
_register.Push(x => _top = x);
}
public IWhereFragment ToWhereFragment()
{
return _top;
}
protected override Expression VisitBinary(BinaryExpression binary)
{
if (_operators.ContainsKey(binary.NodeType))
{
var fragment = _parent.buildSimpleWhereClause(_mapping, binary);
_register.Peek()(fragment);
return null;
}
if ((binary.NodeType == ExpressionType.AndAlso) || (binary.NodeType == ExpressionType.OrElse))
{
var separator = binary.NodeType == ExpressionType.AndAlso
? "and"
: "or";
var compound = new CompoundWhereFragment(separator);
_register.Peek()(compound);
_register.Push(child => compound.Add(child));
Visit(binary.Left);
Visit(binary.Right);
_register.Pop();
return null;
}
throw new NotSupportedException($"Marten does not support the BinaryExpression {binary} (yet).");
}
protected override Expression VisitMethodCall(MethodCallExpression expression)
{
var parser = _parent._options.Linq.MethodCallParsers.FirstOrDefault(x => x.Matches(expression))
?? _parsers.FirstOrDefault(x => x.Matches(expression));
if (parser != null)
{
var where = parser.Parse(_mapping, _parent._serializer, expression);
_register.Peek()(where);
return null;
}
throw new NotSupportedException(
$"Marten does not (yet) support Linq queries using the {expression.Method.DeclaringType.FullName}.{expression.Method.Name}() method");
}
protected override Expression VisitUnary(UnaryExpression node)
{
switch (node.NodeType)
{
case ExpressionType.Not:
if (node.Operand is SubQueryExpression)
{
var nested = new WhereClauseVisitor(_parent, _mapping);
nested.Visit(node.Operand);
var @where = new NotWhereFragment(nested.ToWhereFragment());
_register.Peek()(@where);
}
else
{
var visitor = new NotVisitor(this, _mapping, _register.Peek(), _parent._serializer);
visitor.Visit(node);
}
return null;
}
return base.VisitUnary(node);
}
protected override Expression VisitConstant(ConstantExpression node)
{
if ((node.Type == typeof(bool)))
_register.Peek()(new WhereFragment(node.Value.ToString().ToLower()));
return base.VisitConstant(node);
}
protected override Expression VisitSubQuery(SubQueryExpression expression)
{
Action<IWhereFragment> register = w => _register.Peek()(w);
var visitor = new ChildCollectionWhereVisitor(_parent._serializer, expression, register);
visitor.Parse();
return null;
}
protected override Expression VisitMember(MemberExpression expression)
{
if (expression.Type == typeof(bool))
{
var locator = _mapping.JsonLocator(expression);
var where = new WhereFragment("{0} = True".ToFormat(locator), true);
_register.Peek()(where);
return null;
}
return base.VisitMember(expression);
}
}
}
} | {
"content_hash": "0266954c7c2a35ac129e97fbaddb4616",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 154,
"avg_line_length": 35.614285714285714,
"alnum_prop": 0.5100280786201363,
"repo_name": "jokokko/marten",
"id": "2f2cca41576186bda388efb14349be9dc96e2f6d",
"size": "4988",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Marten/Linq/WhereClauseVisitor.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "2437995"
},
{
"name": "CSS",
"bytes": "50263"
},
{
"name": "HTML",
"bytes": "21635"
},
{
"name": "JavaScript",
"bytes": "456029"
},
{
"name": "PLpgSQL",
"bytes": "3910"
},
{
"name": "Ruby",
"bytes": "5242"
},
{
"name": "SQLPL",
"bytes": "193"
},
{
"name": "Shell",
"bytes": "302"
},
{
"name": "Smalltalk",
"bytes": "3"
}
],
"symlink_target": ""
} |
require "log4r"
module VagrantPlugins
module Puppet
module Provisioner
class PuppetError < Vagrant::Errors::VagrantError
error_namespace("vagrant.provisioners.puppet")
end
class Puppet < Vagrant.plugin("1", :provisioner)
class Config < Vagrant.plugin("1", :config)
attr_accessor :manifest_file
attr_accessor :manifests_path
attr_accessor :module_path
attr_accessor :pp_path
attr_accessor :options
attr_accessor :facter
def manifest_file; @manifest_file || "default.pp"; end
def manifests_path; @manifests_path || "manifests"; end
def pp_path; @pp_path || "/tmp/vagrant-puppet"; end
def options; @options ||= []; end
def facter; @facter ||= {}; end
# Returns the manifests path expanded relative to the root path of the
# environment.
def expanded_manifests_path(root_path)
Pathname.new(manifests_path).expand_path(root_path)
end
# Returns the module paths as an array of paths expanded relative to the
# root path.
def expanded_module_paths(root_path)
return [] if !module_path
# Get all the paths and expand them relative to the root path, returning
# the array of expanded paths
paths = module_path
paths = [paths] if !paths.is_a?(Array)
paths.map do |path|
Pathname.new(path).expand_path(root_path)
end
end
def validate(env, errors)
# Calculate the manifests and module paths based on env
this_expanded_manifests_path = expanded_manifests_path(env.root_path)
this_expanded_module_paths = expanded_module_paths(env.root_path)
# Manifests path/file validation
if !this_expanded_manifests_path.directory?
errors.add(I18n.t("vagrant.provisioners.puppet.manifests_path_missing",
:path => this_expanded_manifests_path))
else
expanded_manifest_file = this_expanded_manifests_path.join(manifest_file)
if !expanded_manifest_file.file?
errors.add(I18n.t("vagrant.provisioners.puppet.manifest_missing",
:manifest => expanded_manifest_file.to_s))
end
end
# Module paths validation
this_expanded_module_paths.each do |path|
if !path.directory?
errors.add(I18n.t("vagrant.provisioners.puppet.module_path_missing", :path => path))
end
end
end
end
def self.config_class
Config
end
def initialize(env, config)
super
@logger = Log4r::Logger.new("vagrant::provisioners::puppet")
end
def prepare
# Calculate the paths we're going to use based on the environment
@expanded_manifests_path = config.expanded_manifests_path(env[:root_path])
@expanded_module_paths = config.expanded_module_paths(env[:root_path])
@manifest_file = File.join(manifests_guest_path, config.manifest_file)
set_module_paths
share_manifests
share_module_paths
end
def provision!
# Check that the shared folders are properly shared
check = [manifests_guest_path]
@module_paths.each do |host_path, guest_path|
check << guest_path
end
verify_shared_folders(check)
# Verify Puppet is installed and run it
verify_binary("puppet")
run_puppet_apply
end
def share_manifests
env[:machine].config.vm.share_folder("manifests", manifests_guest_path, @expanded_manifests_path)
end
def share_module_paths
count = 0
@module_paths.each do |from, to|
# Sorry for the cryptic key here, but VirtualBox has a strange limit on
# maximum size for it and its something small (around 10)
env[:machine].config.vm.share_folder("v-pp-m#{count}", to, from)
count += 1
end
end
def set_module_paths
@module_paths = []
@expanded_module_paths.each_with_index do |path, i|
@module_paths << [path, File.join(config.pp_path, "modules-#{i}")]
end
end
def manifests_guest_path
File.join(config.pp_path, "manifests")
end
def verify_binary(binary)
env[:machine].communicate.sudo("which #{binary}",
:error_class => PuppetError,
:error_key => :not_detected,
:binary => binary)
end
def run_puppet_apply
options = [config.options].flatten
module_paths = @module_paths.map { |_, to| to }
options << "--modulepath '#{module_paths.join(':')}'" if !@module_paths.empty?
options << @manifest_file
options = options.join(" ")
# Build up the custom facts if we have any
facter = ""
if !config.facter.empty?
facts = []
config.facter.each do |key, value|
facts << "FACTER_#{key}='#{value}'"
end
facter = "#{facts.join(" ")} "
end
command = "cd #{manifests_guest_path} && #{facter}puppet apply #{options}"
env[:ui].info I18n.t("vagrant.provisioners.puppet.running_puppet",
:manifest => @manifest_file)
env[:machine].communicate.sudo(command) do |type, data|
env[:ui].info(data.chomp, :prefix => false)
end
end
def verify_shared_folders(folders)
folders.each do |folder|
@logger.debug("Checking for shared folder: #{folder}")
if !env[:machine].communicate.test("test -d #{folder}")
raise PuppetError, :missing_shared_folders
end
end
end
end
end
end
end
| {
"content_hash": "caef09eb08a77bdc5a859125b79627b5",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 107,
"avg_line_length": 35.152542372881356,
"alnum_prop": 0.5506268081002893,
"repo_name": "webcoyote/vagrant",
"id": "c0693257f054378dbc163c93b14e192c8b610d31",
"size": "6222",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plugins/provisioners/puppet/provisioner/puppet.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Emacs Lisp",
"bytes": "420"
},
{
"name": "JavaScript",
"bytes": "2165"
},
{
"name": "Python",
"bytes": "14300"
},
{
"name": "Ruby",
"bytes": "600581"
},
{
"name": "Shell",
"bytes": "4152"
},
{
"name": "VimL",
"bytes": "227"
}
],
"symlink_target": ""
} |
@interface PiPageViewController () {
UIStoryboard *_storyboardPassedIn;
NSArray *_identifiers;
NSMutableDictionary *_viewControllers;
BOOL _inTransation;
int _futureIndex;
BOOL _pageEnabled;
}
@end
@implementation PiPageViewController
- (id)initWithStoryboard:(UIStoryboard *)storyboard andViewControllerIdentifiers:(NSArray *)identifiers {
self = [super initWithTransitionStyle:UIPageViewControllerTransitionStyleScroll navigationOrientation:UIPageViewControllerNavigationOrientationHorizontal options:nil];
if (self) {
_pageEnabled = YES;
_storyboardPassedIn = storyboard;
_identifiers = identifiers;
_viewControllers = [[NSMutableDictionary alloc] init];
_inTransation = NO;
self.delegate = self;
self.dataSource = self;
}
return self;
}
- (id)initWithStoryboard:(UIStoryboard *)storyboard viewControllerIdentifiers:(NSArray *)identifiers andPageDelegate:(NSObject <PiPageViewControllerDelegate> *)pageDelegate {
self = [super initWithTransitionStyle:UIPageViewControllerTransitionStyleScroll navigationOrientation:UIPageViewControllerNavigationOrientationHorizontal options:nil];
if (self) {
self.pageDelegate = pageDelegate;
_pageEnabled = YES;
_storyboardPassedIn = storyboard;
_identifiers = identifiers;
_viewControllers = [[NSMutableDictionary alloc] init];
_inTransation = NO;
self.delegate = self;
self.dataSource = self;
}
return self;
}
- (void)viewDidLoad {
[super viewDidLoad];
if (_index == 0) {
_index = -1;
self.index = 0;
}
for (UIView *v in self.view.subviews) {
if ([v isKindOfClass:[UIScrollView class]]) {
UIScrollView *s = (UIScrollView *)v;
s.delaysContentTouches = NO;
s.canCancelContentTouches = YES;
}
}
}
- (void)pageViewController:(UIPageViewController *)pageViewController willTransitionToViewControllers:(NSArray *)pendingViewControllers {
_futureIndex = (int)[self indexOfViewController:[pendingViewControllers objectAtIndex:pendingViewControllers.count - 1]];
[self onVisitedViewControllerAtIndex:_futureIndex];
_inTransation = YES;
[NSObject cancelPreviousPerformRequestsWithTarget:self selector:@selector(resetInTransationFlag) object:nil];
[self performSelector:@selector(resetInTransationFlag) withObject:nil afterDelay:kPiPageViewControllerResetTransitionFlagDelay];
}
- (void)pageViewController:(UIPageViewController *)pageViewController didFinishAnimating:(BOOL)finished previousViewControllers:(NSArray *)previousViewControllers transitionCompleted:(BOOL)completed {
_inTransation = NO;
if (!completed) {
return;
}
[self onIndexSet:_futureIndex];
}
- (UIViewController *)pageViewController:(UIPageViewController *)pageViewController viewControllerBeforeViewController:(UIViewController *)viewController {
int index = (int)[self indexOfViewController:viewController];
return [self loadViewControllerAtIndex:index - 1];
}
- (UIViewController *)pageViewController:(UIPageViewController *)pageViewController viewControllerAfterViewController:(UIViewController *)viewController {
int index = (int)[self indexOfViewController:viewController];
return [self loadViewControllerAtIndex:index + 1];
}
- (int)index {
return _index;
}
- (void)setIndex:(int)index {
[self setIndex:index animated:NO];
}
- (void)setIndex:(int)index animated:(BOOL)animated {
if (index == _index || _inTransation) {
return;
}
if (!_pageEnabled) {
return;
}
_inTransation = YES;
[NSObject cancelPreviousPerformRequestsWithTarget:self selector:@selector(resetInTransationFlag) object:nil];
[self performSelector:@selector(resetInTransationFlag) withObject:nil afterDelay:kPiPageViewControllerResetTransitionFlagDelay];
UIPageViewControllerNavigationDirection direction = UIPageViewControllerNavigationDirectionForward;
if (index < _index) {
direction = UIPageViewControllerNavigationDirectionReverse;
}
int step = direction == UIPageViewControllerNavigationDirectionForward ? 1 : -1;
for (int i = _index + step; (index - i) * step > 0; i += step) {
NSArray *vcs = [[NSArray alloc] initWithObjects:[self loadViewControllerAtIndex:i], nil];
[self onVisitedViewControllerAtIndex:i];
[self setViewControllers:vcs direction:direction animated:animated completion:nil];
}
NSArray *vcs = [[NSArray alloc] initWithObjects:[self loadViewControllerAtIndex:index], nil];
__weak PiPageViewController *vc = self;
[self onVisitedViewControllerAtIndex:index];
[self setViewControllers:vcs direction:direction animated:animated completion:^(BOOL finished) {
[vc onIndexSet:index];
}];
if (!animated) {
[self onIndexSet:index];
}
}
- (void)resetInTransationFlag {
_inTransation = NO;
}
- (void)onIndexSet:(int)index {
_index = index;
_inTransation = NO;
[self sendPageIndexChangedMessage];
}
- (UIViewController *)currentViewController {
return [self viewControllerAtIndex:self.index];
}
- (UIViewController *)viewControllerAtIndex:(int)index {
return [self loadViewControllerAtIndex:index];
}
- (UIViewController *)loadViewControllerAtIndex:(int)index {
if (![self shouldUseDelegateViewControllers]) {
if (index < 0 || index >= _identifiers.count) {
return nil;
}
}
UIViewController *vc = [_viewControllers objectForKey:[NSNumber numberWithInt:index]];
if (!vc) {
if ([self shouldUseDelegateViewControllers]) {
vc = [self.pageDelegate loadViewControllerAtIndex:index];
} else {
vc = [_storyboardPassedIn instantiateViewControllerWithIdentifier:[_identifiers objectAtIndex:index]];
}
if (vc) {
if (self.pageDelegate && [self.pageDelegate respondsToSelector:@selector(onViewController:loadedAtIndex:)]) {
[self.pageDelegate onViewController:vc loadedAtIndex:index];
}
[_viewControllers setObject:vc forKey:[NSNumber numberWithInt:index]];
}
}
return vc;
}
- (void)sendPageIndexChangedMessage {
if (self.pageDelegate && [self.pageDelegate respondsToSelector:@selector(pageIndexChanged:)]) {
[self.pageDelegate pageIndexChanged:_index];
}
}
- (NSUInteger)indexOfViewController:(UIViewController *)viewController {
__block NSUInteger index = NSNotFound;
[_viewControllers enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop) {
if (obj == viewController) {
index = [key integerValue];
}
}];
return index;
}
- (BOOL)shouldUseDelegateViewControllers {
return self.pageDelegate && [self.pageDelegate respondsToSelector:@selector(loadViewControllerAtIndex:)];
}
- (UIInterfaceOrientationMask)pageViewControllerSupportedInterfaceOrientations:(UIPageViewController *)pageViewController {
return UIInterfaceOrientationMaskPortrait;
}
- (void)onVisitedViewControllerAtIndex:(int)index {
if (self.pageDelegate && [self.pageDelegate respondsToSelector:@selector(onViewController:visitedAtIndex:)]) {
[self.pageDelegate onViewController:[self loadViewControllerAtIndex:index] visitedAtIndex:index];
}
}
- (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer {
if ([gestureRecognizer isKindOfClass:[UIPanGestureRecognizer class]]) {
UIPanGestureRecognizer *pan = (UIPanGestureRecognizer *) gestureRecognizer;
if ([pan translationInView:self.view].x <= 0) {
return NO;
}
return ![self pageViewController:self viewControllerBeforeViewController:[self currentViewController]];
}
return YES;
}
- (NSArray *)viewsToHandle {
NSMutableArray *array = [[NSMutableArray alloc] init];
for (UIView *view in self.view.subviews) {
if ([view isKindOfClass:[UIScrollView class]]) {
[array addObject:view];
}
}
return array;
}
- (void)setPageEnabled:(BOOL)pageEnabled {
_pageEnabled = pageEnabled;
self.view.userInteractionEnabled = pageEnabled;
}
- (BOOL)pageEnabled {
return _pageEnabled;
}
@end
| {
"content_hash": "f1e07087bbe1134e418e429f986ef74c",
"timestamp": "",
"source": "github",
"line_count": 228,
"max_line_length": 200,
"avg_line_length": 36.35964912280702,
"alnum_prop": 0.7115802171290712,
"repo_name": "bither/bither-ios",
"id": "3eaeacd37bb4e7f7df02f14c06950739a0716172",
"size": "9034",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bither-ios/UIBase/PiPageViewController.m",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Objective-C",
"bytes": "2185426"
},
{
"name": "Ruby",
"bytes": "714"
}
],
"symlink_target": ""
} |
package com.eagree.data.domain.mysql;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToOne;
import javax.validation.constraints.NotNull;
@Entity
public class OrganizationSettings extends BaseAuditDomain {
@ManyToOne(fetch=FetchType.LAZY)
@JoinColumn(name="ORG_ID")
Organization org;
@NotNull
SettingName name;
@NotNull
String value;
}
| {
"content_hash": "f65bd98cd850d207a7afc66431ef8949",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 59,
"avg_line_length": 19.12,
"alnum_prop": 0.801255230125523,
"repo_name": "liberaldart/Learn.Spring",
"id": "a4d1e21654a0945c5a9b6b5deabca95683fc9645",
"size": "478",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/eagree/data/domain/mysql/OrganizationSettings.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "17917"
}
],
"symlink_target": ""
} |
layout: post
title: "Meeting at the Cup"
modified:
categories:
excerpt:
tags: []
image:
feature:
date: 2015-06-19T23:32:52-06:00
---
Our next meeting is going to be on Wednesday, June 24th between 9 and 12
at [The Cup](http://thecupboulder.com)'s back room.
| {
"content_hash": "f0b1ea3934d79475b23990557e4f09a9",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 72,
"avg_line_length": 21.666666666666668,
"alnum_prop": 0.7230769230769231,
"repo_name": "bouldercodencoffee/bouldercodencoffee.github.io",
"id": "43d86d06cc65a12bdfb1f64a233def489d7073d4",
"size": "264",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_posts/2015-06-19-meeting-at-the-cup.markdown",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "50067"
},
{
"name": "HTML",
"bytes": "20309"
},
{
"name": "JavaScript",
"bytes": "78237"
},
{
"name": "Ruby",
"bytes": "1563"
}
],
"symlink_target": ""
} |
using System.Linq;
using System.Reflection;
using System.Security.Claims;
using System.Web;
using System.Web.Mvc;
using Claims.Services.IdentitySecurity;
using Microsoft.AspNet.Identity.Owin;
namespace Claims.Web.SecurityInfrastructure
{
public static class IdentityHelpers
{
public static MvcHtmlString GetUserName(this HtmlHelper html, string id)
{
AppUserManager mgr
= HttpContext.Current.GetOwinContext().GetUserManager<AppUserManager>();
return new MvcHtmlString(mgr.FindByIdAsync(id).Result.UserName);
}
public static MvcHtmlString ClaimType(this HtmlHelper html, string claimType)
{
FieldInfo[] fields = typeof(ClaimTypes).GetFields();
foreach (FieldInfo field in fields)
{
if (field.GetValue(null).ToString() == claimType)
return new MvcHtmlString(field.Name);
}
return new MvcHtmlString(string.Format("{0}", claimType.Split('/', '.').Last()));
}
}
}
| {
"content_hash": "8ca1855e6f8588bd6a41ea62e75f2da0",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 93,
"avg_line_length": 33.09375,
"alnum_prop": 0.6421152030217187,
"repo_name": "7ohnn1/MVC_Security_Authorisation",
"id": "aa03c43db071aeb5cc3cd1599168db3563180ec9",
"size": "1061",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Presentation/Claims.Web/SecurityInfrastructure/IdentityHelpers.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "101"
},
{
"name": "C#",
"bytes": "44486"
},
{
"name": "CSS",
"bytes": "316"
},
{
"name": "JavaScript",
"bytes": "214014"
}
],
"symlink_target": ""
} |
Opscode Chef cookbook for installing the graph database Titan server including the Rexster server for HTTP access in version 0.4.1 and utilizing the Apache Cassandra backend in version 1.2.2.
Target system: Ubuntu/Linux
Please not that when trying to glue together different versions of Titan and Apache Cassandra this must conform to the [compatibility index](http://thinkaurelius.github.io/titan/wikidoc/0.4.1/Version-Compatibility.html).
Cookbook dependecies are managed by Berkshelf (see Berksfile meta file)
## Attributes
### Default recipe
{
"titan": {
"graph": "sample_graph",
"download_url": "http://s3.thinkaurelius.com/downloads/titan/titan-server-0.4.1.zip",
"bundle": "titan-server-0.4.1.zip",
"installation_dir": "/usr/local/titan",
"user": "titan"
},
"java": {
"install_flavor": "oracle",
"jdk_version": "7"
"oracle": {
"accept_oracle_download_terms": true
}
}
}
### OpsCenter recipe
{
"opscenter": {
"clusters": {
"seeds": "10.0.1.0,10.0.1.1,10.0.1.2"
}
}
}
## Using vagrant
1. Install [Vagrant](http://www.vagrantup.com/)
2. Install [Berkshelf](http://berkshelf.com/)
3. `$ vagrant box add precise64 http://files.vagrantup.com/precise64.box`
4. `$ vagrant plugin install vagrant-omnibus`
5. `$ vagrant plugin install vagrant-berkshelf`
6. `$ berks install`
7. `$ vagrant up`
## Test
Cookbook testing is powered by [Test-Kitchen](https://github.com/test-kitchen/test-kitchen/wiki/Getting-Started) using Vagrant via kitchen-vagrant driver. To run the tests, follow these steps:
1. Install bundler via `gem install bundler`
2. Install test-kitchen from `Gemfile` file using bunder via `bundle install`
3. Run test-kitchen to test the configuration setup within all platforms specified in the `.kitchen.yml` confuration file.
## License
The MIT License (MIT)
Copyright (c) 2013 Martin Biermann
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
| {
"content_hash": "85c66d0d26a33e9789255db2c982227a",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 220,
"avg_line_length": 36.4375,
"alnum_prop": 0.7451114922813036,
"repo_name": "mbiermann/chef-titan",
"id": "a6b2a365ce6469e8c72bdf8bae574b0920c337e0",
"size": "2953",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "4552"
},
{
"name": "Shell",
"bytes": "272"
}
],
"symlink_target": ""
} |
<?php
declare(strict_types=1);
namespace Sonata\PageBundle\Form\Type;
use Sonata\PageBundle\Page\PageServiceManagerInterface;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\Form\Extension\Core\Type\ChoiceType;
use Symfony\Component\Form\FormTypeInterface;
use Symfony\Component\OptionsResolver\OptionsResolver;
use Symfony\Component\OptionsResolver\OptionsResolverInterface;
/**
* Select a page type.
*
* @author Olivier Paradis <paradis.olivier@gmail.com>
*/
class PageTypeChoiceType extends AbstractType
{
/**
* @var PageServiceManagerInterface
*/
protected $manager;
public function __construct(PageServiceManagerInterface $manager)
{
$this->manager = $manager;
}
public function configureOptions(OptionsResolver $resolver)
{
$defaults = [
'choices' => $this->getPageTypes(),
'choice_translation_domain' => false,
];
// NEXT_MAJOR: Remove (when requirement of Symfony is >= 3.0)
if (method_exists(FormTypeInterface::class, 'setDefaultOptions')) {
$defaults['choices_as_values'] = true;
}
$resolver->setDefaults($defaults);
}
public function setDefaultOptions(OptionsResolverInterface $resolver)
{
$this->configureOptions($resolver);
}
/**
* @return string[]
*/
public function getPageTypes()
{
$services = $this->manager->getAll();
$types = [];
foreach ($services as $id => $service) {
$types[$service->getName()] = $id;
}
ksort($types);
return $types;
}
public function getParent()
{
return ChoiceType::class;
}
public function getBlockPrefix()
{
return 'sonata_page_type_choice';
}
public function getName()
{
return $this->getBlockPrefix();
}
}
| {
"content_hash": "fac38fddd7bd542f85288510de04f6cb",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 75,
"avg_line_length": 22.746987951807228,
"alnum_prop": 0.6297669491525424,
"repo_name": "Th3Mouk/SonataPageBundle",
"id": "ecbad6fd79bb5c7a373477b2642d81ca2278cdaf",
"size": "2135",
"binary": false,
"copies": "1",
"ref": "refs/heads/3.x",
"path": "src/Form/Type/PageTypeChoiceType.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "37060"
},
{
"name": "HTML",
"bytes": "54795"
},
{
"name": "JavaScript",
"bytes": "108254"
},
{
"name": "Makefile",
"bytes": "1651"
},
{
"name": "PHP",
"bytes": "726314"
},
{
"name": "Shell",
"bytes": "2573"
}
],
"symlink_target": ""
} |
package edu.iu.informatics.omics.impl.fragmentation;
import java.util.List;
import junit.framework.TestCase;
import cn.ac.rcpa.bio.proteomics.IsotopicType;
import edu.iu.informatics.omics.AdductType;
import edu.iu.informatics.omics.DerivativeType;
import edu.iu.informatics.omics.Glycan;
import edu.iu.informatics.omics.IMassProxy;
import edu.iu.informatics.omics.MassProxyFactory;
import edu.iu.informatics.omics.IMonosaccharide;
public class OligosaccharideFragmentationBBuilderTest extends TestCase {
OligosaccharideFragmentationBBuilder builder = new OligosaccharideFragmentationBBuilder();
public void testBuild() {
IMassProxy massProxy = MassProxyFactory.getMassProxy(IsotopicType.Average,
DerivativeType.Underivatised, AdductType.Na);
Glycan glycan = new Glycan("Temp", "Glc6-1GlcNAc");
List<IMonosaccharide> oss = glycan.getOligosaccharidesFromNonreducingTerm();
assertEquals(226.1847, builder.build(massProxy, oss.get(0)).getMz(), 0.01);
}
}
| {
"content_hash": "f8efd4aaa16f9f03f2c19d26aaa6b6c1",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 91,
"avg_line_length": 39.84,
"alnum_prop": 0.7951807228915663,
"repo_name": "shengqh/GlycanIdentification",
"id": "7aa304f2d87af289329b4cc362506be47593b960",
"size": "996",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/edu/iu/informatics/omics/impl/fragmentation/OligosaccharideFragmentationBBuilderTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "379808"
}
],
"symlink_target": ""
} |
require 'digest/md5'
require 'find'
require 'tmpdir'
require 'etc'
module RuboCop
# Provides functionality for caching rubocop runs.
class ResultCache
NON_CHANGING = [:color, :format, :formatters, :out, :debug, :fail_level,
:cache, :fail_fast, :stdin].freeze
# Remove old files so that the cache doesn't grow too big. When the
# threshold MaxFilesInCache has been exceeded, the oldest 50% of all the
# files in the cache are removed. The reason for removing so much is that
# cleaning should be done relatively seldom, since there is a slight risk
# that some other RuboCop process was just about to read the file, when
# there's parallel execution and the cache is shared.
def self.cleanup(config_store, verbose, cache_root = nil)
return if inhibit_cleanup # OPTIMIZE: For faster testing
cache_root ||= cache_root(config_store)
return unless File.exist?(cache_root)
files, dirs = Find.find(cache_root).partition { |path| File.file?(path) }
if files.length > config_store.for('.')['AllCops']['MaxFilesInCache'] &&
files.length > 1
# Add 1 to half the number of files, so that we remove the file if
# there's only 1 left.
remove_count = 1 + files.length / 2
if verbose
puts "Removing the #{remove_count} oldest files from #{cache_root}"
end
sorted = files.sort_by { |path| File.mtime(path) }
begin
File.delete(*sorted[0, remove_count])
dirs.each { |dir| Dir.rmdir(dir) if Dir["#{dir}/*"].empty? }
rescue Errno::ENOENT
# This can happen if parallel RuboCop invocations try to remove the
# same files. No problem.
puts $ERROR_INFO if verbose
end
end
end
def self.cache_root(config_store)
root = config_store.for('.')['AllCops']['CacheRootDirectory']
if root == '/tmp'
tmpdir = File.realpath(Dir.tmpdir)
# Include user ID in the path to make sure the user has write access.
root = File.join(tmpdir, Process.uid.to_s)
end
File.join(root, 'rubocop_cache')
end
def initialize(file, options, config_store, cache_root = nil)
cache_root ||= ResultCache.cache_root(config_store)
@path = File.join(cache_root, rubocop_checksum, RUBY_VERSION,
relevant_options(options),
file_checksum(file, config_store))
@cached_data = CachedData.new(file)
end
def valid?
File.exist?(@path)
end
def load
@cached_data.from_json(IO.binread(@path))
end
def save(offenses)
dir = File.dirname(@path)
FileUtils.mkdir_p(dir)
preliminary_path = "#{@path}_#{rand(1_000_000_000)}"
# RuboCop must be in control of where its cached data is stored. A
# symbolic link anywhere in the cache directory tree is an indication
# that a symlink attack is being waged.
return if any_symlink?(dir)
File.open(preliminary_path, 'wb') do |f|
f.write(@cached_data.to_json(offenses))
end
# The preliminary path is used so that if there are multiple RuboCop
# processes trying to save data for the same inspected file
# simultaneously, the only problem we run in to is a competition who gets
# to write to the final file. The contents are the same, so no corruption
# of data should occur.
FileUtils.mv(preliminary_path, @path)
end
private
def any_symlink?(path)
while path != File.dirname(path)
if File.symlink?(path)
warn "Warning: #{path} is a symlink, which is not allowed."
return true
end
path = File.dirname(path)
end
false
end
def file_checksum(file, config_store)
Digest::MD5.hexdigest(Dir.pwd + file + IO.read(file) +
config_store.for(file).to_s)
rescue Errno::ENOENT
# Spurious files that come and go should not cause a crash, at least not
# here.
'_'
end
class << self
attr_accessor :source_checksum, :inhibit_cleanup
end
# The checksum of the rubocop program running the inspection.
def rubocop_checksum
ResultCache.source_checksum ||=
begin
lib_root = File.join(File.dirname(__FILE__), '..')
bin_root = File.join(lib_root, '..', 'bin')
source = Find.find(lib_root, bin_root).sort.map do |path|
IO.read(path) if File.file?(path)
end
Digest::MD5.hexdigest(source.join)
end
end
# Return the options given at invocation, minus the ones that have no
# effect on which offenses and disabled line ranges are found, and thus
# don't affect caching.
def relevant_options(options)
options = options.reject { |key, _| NON_CHANGING.include?(key) }
options.to_s.gsub(/[^a-z]+/i, '_')
end
end
end
| {
"content_hash": "735d1e4b9d22231597e3eadd77a89848",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 79,
"avg_line_length": 36.043795620437955,
"alnum_prop": 0.6217091940056703,
"repo_name": "mrb/rubocop",
"id": "c111140e660056051d99d5606d53b2f4f039260c",
"size": "4987",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/rubocop/result_cache.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "6594"
},
{
"name": "Ruby",
"bytes": "2740617"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>AdminLTE 2 | Dashboard</title>
<!-- Tell the browser to be responsive to screen width -->
<meta content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no" name="viewport">
<!-- Bootstrap 3.3.5 -->
<link rel="stylesheet" href="bootstrap/css/bootstrap.min.css">
<!-- Font Awesome -->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.5.0/css/font-awesome.min.css">
<!-- Ionicons -->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/ionicons/2.0.1/css/ionicons.min.css">
<!-- Theme style -->
<link rel="stylesheet" href="dist/css/AdminLTE.min.css">
<link rel="stylesheet" href="dist/css/style.css"
<!-- AdminLTE Skins. Choose a skin from the css/skins
folder instead of downloading all of them to reduce the load. -->
<link rel="stylesheet" href="dist/css/skins/_all-skins.min.css">
<!-- iCheck -->
<link rel="stylesheet" href="plugins/iCheck/flat/blue.css">
<!-- Morris chart -->
<link rel="stylesheet" href="plugins/morris/morris.css">
<!-- jvectormap -->
<link rel="stylesheet" href="plugins/jvectormap/jquery-jvectormap-1.2.2.css">
<!-- Date Picker -->
<link rel="stylesheet" href="plugins/datepicker/datepicker3.css">
<!-- Daterange picker -->
<link rel="stylesheet" href="plugins/daterangepicker/daterangepicker-bs3.css">
<!-- bootstrap wysihtml5 - text editor -->
<link rel="stylesheet" href="plugins/bootstrap-wysihtml5/bootstrap3-wysihtml5.min.css">
<link rel="stylesheet" href="plugins/fullcalendar/fullcalendar.min.css">
<link rel="stylesheet" href="plugins/fullcalendar/fullcalendar.print.css" media="print">
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.3/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body class="hold-transition skin-blue sidebar-mini">
<div class="wrapper">
<header class="main-header">
<!-- Logo -->
<a href="index2.html" class="logo">
<!-- mini logo for sidebar mini 50x50 pixels -->
<span class="logo-mini"><b>A</b>LT</span>
<!-- logo for regular state and mobile devices -->
<span class="logo-lg"><b>Admin</b>LTE</span>
</a>
<!-- Header Navbar: style can be found in header.less -->
<nav class="navbar navbar-static-top" role="navigation">
<!-- Sidebar toggle button-->
<a href="#" class="sidebar-toggle" data-toggle="offcanvas" role="button">
<span class="sr-only">Toggle navigation</span>
</a>
<div class="navbar-custom-menu">
<ul class="nav navbar-nav">
<!-- Messages: style can be found in dropdown.less-->
<li class="dropdown messages-menu">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">
<i class="fa fa-envelope-o"></i>
<span class="label label-success">4</span>
</a>
<ul class="dropdown-menu">
<li class="header">You have 4 messages</li>
<li>
<!-- inner menu: contains the actual data -->
<ul class="menu">
<li><!-- start message -->
<a href="#">
<div class="pull-left">
<img src="dist/img/user2-160x160.jpg" class="img-circle" alt="User Image">
</div>
<h4>
Support Team
<small><i class="fa fa-clock-o"></i> 5 mins</small>
</h4>
<p>Why not buy a new awesome theme?</p>
</a>
</li>
<!-- end message -->
<li>
<a href="#">
<div class="pull-left">
<img src="dist/img/user3-128x128.jpg" class="img-circle" alt="User Image">
</div>
<h4>
AdminLTE Design Team
<small><i class="fa fa-clock-o"></i> 2 hours</small>
</h4>
<p>Why not buy a new awesome theme?</p>
</a>
</li>
<li>
<a href="#">
<div class="pull-left">
<img src="dist/img/user4-128x128.jpg" class="img-circle" alt="User Image">
</div>
<h4>
Developers
<small><i class="fa fa-clock-o"></i> Today</small>
</h4>
<p>Why not buy a new awesome theme?</p>
</a>
</li>
<li>
<a href="#">
<div class="pull-left">
<img src="dist/img/user3-128x128.jpg" class="img-circle" alt="User Image">
</div>
<h4>
Sales Department
<small><i class="fa fa-clock-o"></i> Yesterday</small>
</h4>
<p>Why not buy a new awesome theme?</p>
</a>
</li>
<li>
<a href="#">
<div class="pull-left">
<img src="dist/img/user4-128x128.jpg" class="img-circle" alt="User Image">
</div>
<h4>
Reviewers
<small><i class="fa fa-clock-o"></i> 2 days</small>
</h4>
<p>Why not buy a new awesome theme?</p>
</a>
</li>
</ul>
</li>
<li class="footer"><a href="#">See All Messages</a></li>
</ul>
</li>
<!-- Notifications: style can be found in dropdown.less -->
<li class="dropdown notifications-menu">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">
<i class="fa fa-bell-o"></i>
<span class="label label-warning">10</span>
</a>
<ul class="dropdown-menu">
<li class="header">You have 10 notifications</li>
<li>
<!-- inner menu: contains the actual data -->
<ul class="menu">
<li>
<a href="#">
<i class="fa fa-users text-aqua"></i> 5 new members joined today
</a>
</li>
<li>
<a href="#">
<i class="fa fa-warning text-yellow"></i> Very long description here that may not fit into the
page and may cause design problems
</a>
</li>
<li>
<a href="#">
<i class="fa fa-users text-red"></i> 5 new members joined
</a>
</li>
<li>
<a href="#">
<i class="fa fa-shopping-cart text-green"></i> 25 sales made
</a>
</li>
<li>
<a href="#">
<i class="fa fa-user text-red"></i> You changed your username
</a>
</li>
</ul>
</li>
<li class="footer"><a href="#">View all</a></li>
</ul>
</li>
<!-- Tasks: style can be found in dropdown.less -->
<li class="dropdown tasks-menu">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">
<i class="fa fa-flag-o"></i>
<span class="label label-danger">9</span>
</a>
<ul class="dropdown-menu">
<li class="header">You have 9 tasks</li>
<li>
<!-- inner menu: contains the actual data -->
<ul class="menu">
<li><!-- Task item -->
<a href="#">
<h3>
Design some buttons
<small class="pull-right">20%</small>
</h3>
<div class="progress xs">
<div class="progress-bar progress-bar-aqua" style="width: 20%" role="progressbar" aria-valuenow="20" aria-valuemin="0" aria-valuemax="100">
<span class="sr-only">20% Complete</span>
</div>
</div>
</a>
</li>
<!-- end task item -->
<li><!-- Task item -->
<a href="#">
<h3>
Create a nice theme
<small class="pull-right">40%</small>
</h3>
<div class="progress xs">
<div class="progress-bar progress-bar-green" style="width: 40%" role="progressbar" aria-valuenow="20" aria-valuemin="0" aria-valuemax="100">
<span class="sr-only">40% Complete</span>
</div>
</div>
</a>
</li>
<!-- end task item -->
<li><!-- Task item -->
<a href="#">
<h3>
Some task I need to do
<small class="pull-right">60%</small>
</h3>
<div class="progress xs">
<div class="progress-bar progress-bar-red" style="width: 60%" role="progressbar" aria-valuenow="20" aria-valuemin="0" aria-valuemax="100">
<span class="sr-only">60% Complete</span>
</div>
</div>
</a>
</li>
<!-- end task item -->
<li><!-- Task item -->
<a href="#">
<h3>
Make beautiful transitions
<small class="pull-right">80%</small>
</h3>
<div class="progress xs">
<div class="progress-bar progress-bar-yellow" style="width: 80%" role="progressbar" aria-valuenow="20" aria-valuemin="0" aria-valuemax="100">
<span class="sr-only">80% Complete</span>
</div>
</div>
</a>
</li>
<!-- end task item -->
</ul>
</li>
<li class="footer">
<a href="#">View all tasks</a>
</li>
</ul>
</li>
<!-- User Account: style can be found in dropdown.less -->
<li class="dropdown user user-menu">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">
<img src="dist/img/user2-160x160.jpg" class="user-image" alt="User Image">
<span class="hidden-xs">Alexander Pierce</span>
</a>
<ul class="dropdown-menu">
<!-- User image -->
<li class="user-header">
<img src="dist/img/user2-160x160.jpg" class="img-circle" alt="User Image">
<p>
Alexander Pierce - Web Developer
<small>Member since Nov. 2012</small>
</p>
</li>
<!-- Menu Body -->
<li class="user-body">
<div class="row">
<div class="col-xs-4 text-center">
<a href="#">Followers</a>
</div>
<div class="col-xs-4 text-center">
<a href="#">Sales</a>
</div>
<div class="col-xs-4 text-center">
<a href="#">Friends</a>
</div>
</div>
<!-- /.row -->
</li>
<!-- Menu Footer-->
<li class="user-footer">
<div class="pull-left">
<a href="#" class="btn btn-default btn-flat">Profile</a>
</div>
<div class="pull-right">
<a href="#" class="btn btn-default btn-flat">Sign out</a>
</div>
</li>
</ul>
</li>
<!-- Control Sidebar Toggle Button -->
<li>
<a href="#" data-toggle="control-sidebar"><i class="fa fa-gears"></i></a>
</li>
</ul>
</div>
</nav>
</header>
<!-- Left side column. contains the logo and sidebar -->
<aside class="main-sidebar">
<!-- sidebar: style can be found in sidebar.less -->
<section class="sidebar">
<!-- Sidebar user panel -->
<div class="user-panel">
<div class="pull-left image">
<img src="dist/img/user2-160x160.jpg" class="img-circle" alt="User Image">
</div>
<div class="pull-left info">
<p>Alexander Pierce</p>
<a href="#"><i class="fa fa-circle text-success"></i> Online</a>
</div>
</div>
<!-- search form -->
<form action="#" method="get" class="sidebar-form">
<div class="input-group">
<input type="text" name="q" class="form-control" placeholder="Search...">
<span class="input-group-btn">
<button type="submit" name="search" id="search-btn" class="btn btn-flat"><i class="fa fa-search"></i>
</button>
</span>
</div>
</form>
<!-- /.search form -->
<!-- sidebar menu: : style can be found in sidebar.less -->
<ul class="sidebar-menu">
<li class="header">MAIN NAVIGATION</li>
<li class="active treeview">
<a href="#">
<i class="fa fa-dashboard"></i> <span>Dashboard</span> <i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li class="active"><a href="index.html"><i class="fa fa-circle-o"></i> Dashboard v1</a></li>
<li><a href="index2.html"><i class="fa fa-circle-o"></i> Dashboard v2</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-files-o"></i>
<span>Layout Options</span>
<span class="label label-primary pull-right">4</span>
</a>
<ul class="treeview-menu">
<li><a href="pages/layout/top-nav.html"><i class="fa fa-circle-o"></i> Top Navigation</a></li>
<li><a href="pages/layout/boxed.html"><i class="fa fa-circle-o"></i> Boxed</a></li>
<li><a href="pages/layout/fixed.html"><i class="fa fa-circle-o"></i> Fixed</a></li>
<li><a href="pages/layout/collapsed-sidebar.html"><i class="fa fa-circle-o"></i> Collapsed Sidebar</a></li>
</ul>
</li>
<li>
<a href="pages/widgets.html">
<i class="fa fa-th"></i> <span>Widgets</span>
<small class="label pull-right bg-green">new</small>
</a>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-pie-chart"></i>
<span>Charts</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="pages/charts/chartjs.html"><i class="fa fa-circle-o"></i> ChartJS</a></li>
<li><a href="pages/charts/morris.html"><i class="fa fa-circle-o"></i> Morris</a></li>
<li><a href="pages/charts/flot.html"><i class="fa fa-circle-o"></i> Flot</a></li>
<li><a href="pages/charts/inline.html"><i class="fa fa-circle-o"></i> Inline charts</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-laptop"></i>
<span>UI Elements</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="pages/UI/general.html"><i class="fa fa-circle-o"></i> General</a></li>
<li><a href="pages/UI/icons.html"><i class="fa fa-circle-o"></i> Icons</a></li>
<li><a href="pages/UI/buttons.html"><i class="fa fa-circle-o"></i> Buttons</a></li>
<li><a href="pages/UI/sliders.html"><i class="fa fa-circle-o"></i> Sliders</a></li>
<li><a href="pages/UI/timeline.html"><i class="fa fa-circle-o"></i> Timeline</a></li>
<li><a href="pages/UI/modals.html"><i class="fa fa-circle-o"></i> Modals</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-edit"></i> <span>Forms</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="pages/forms/general.html"><i class="fa fa-circle-o"></i> General Elements</a></li>
<li><a href="pages/forms/advanced.html"><i class="fa fa-circle-o"></i> Advanced Elements</a></li>
<li><a href="pages/forms/editors.html"><i class="fa fa-circle-o"></i> Editors</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-table"></i> <span>Tables</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="pages/tables/simple.html"><i class="fa fa-circle-o"></i> Simple tables</a></li>
<li><a href="pages/tables/data.html"><i class="fa fa-circle-o"></i> Data tables</a></li>
</ul>
</li>
<li>
<a href="pages/calendar.html">
<i class="fa fa-calendar"></i> <span>Calendar</span>
<small class="label pull-right bg-red">3</small>
</a>
</li>
<li>
<a href="pages/mailbox/mailbox.html">
<i class="fa fa-envelope"></i> <span>Mailbox</span>
<small class="label pull-right bg-yellow">12</small>
</a>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-folder"></i> <span>Examples</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="pages/examples/invoice.html"><i class="fa fa-circle-o"></i> Invoice</a></li>
<li><a href="pages/examples/profile.html"><i class="fa fa-circle-o"></i> Profile</a></li>
<li><a href="pages/examples/login.html"><i class="fa fa-circle-o"></i> Login</a></li>
<li><a href="pages/examples/register.html"><i class="fa fa-circle-o"></i> Register</a></li>
<li><a href="pages/examples/lockscreen.html"><i class="fa fa-circle-o"></i> Lockscreen</a></li>
<li><a href="pages/examples/404.html"><i class="fa fa-circle-o"></i> 404 Error</a></li>
<li><a href="pages/examples/500.html"><i class="fa fa-circle-o"></i> 500 Error</a></li>
<li><a href="pages/examples/blank.html"><i class="fa fa-circle-o"></i> Blank Page</a></li>
<li><a href="pages/examples/pace.html"><i class="fa fa-circle-o"></i> Pace Page</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-share"></i> <span>Multilevel</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="#"><i class="fa fa-circle-o"></i> Level One</a></li>
<li>
<a href="#"><i class="fa fa-circle-o"></i> Level One <i class="fa fa-angle-left pull-right"></i></a>
<ul class="treeview-menu">
<li><a href="#"><i class="fa fa-circle-o"></i> Level Two</a></li>
<li>
<a href="#"><i class="fa fa-circle-o"></i> Level Two <i class="fa fa-angle-left pull-right"></i></a>
<ul class="treeview-menu">
<li><a href="#"><i class="fa fa-circle-o"></i> Level Three</a></li>
<li><a href="#"><i class="fa fa-circle-o"></i> Level Three</a></li>
</ul>
</li>
</ul>
</li>
<li><a href="#"><i class="fa fa-circle-o"></i> Level One</a></li>
</ul>
</li>
<li><a href="documentation/index.html"><i class="fa fa-book"></i> <span>Documentation</span></a></li>
<li class="header">LABELS</li>
<li><a href="#"><i class="fa fa-circle-o text-red"></i> <span>Important</span></a></li>
<li><a href="#"><i class="fa fa-circle-o text-yellow"></i> <span>Warning</span></a></li>
<li><a href="#"><i class="fa fa-circle-o text-aqua"></i> <span>Information</span></a></li>
</ul>
</section>
<!-- /.sidebar -->
</aside>
<!-- Content Wrapper. Contains page content -->
<div class="content-wrapper">
<!-- Content Header (Page header) -->
<section class="content-header">
<h1>
Dashboard
<small>Control panel</small>
</h1>
<ol class="breadcrumb">
<li><a href="#"><i class="fa fa-dashboard"></i> Home</a></li>
<li class="active">Dashboard</li>
</ol>
</section>
<!-- Main content -->
<section class="content">
<div class="row">
<section class="col-lg-3 connectedSortable">
<div class="box box-primary">
<div class="box-body no-padding">
<!-- THE CALENDAR -->
<div id="calendar"></div>
</div>
<!-- /.box-body -->
</div>
</section>
<section class="col-lg-6 connectedSortable">
<div class="box">
<div class="box-header">
<h3 class="box-title">Task</h3>
</div>
<!-- /.box-header -->
<div class="box-body table-responsive no-padding">
<table class="table table-bordered table-hover" id="example2">
<tr>
<th>Task</th>
<th>Operation </th>
<th>Time</th>
<th></th>
</tr>
<tr>
<td>Task 1</td>
<td>12364556</td>
<td>9:30</td>
<td><input type="checkbox" /></td>
</tr>
<tr>
<td>Task 2</td>
<td>569832</td>
<td>10:30</td>
<td><input type="checkbox" /></td>
</tr>
<tr>
<td>Task 3</td>
<td>96552255</td>
<td>11:00</td>
<td><input type="checkbox" /></td>
</tr>
<tr>
<td>Task 4</td>
<td>9865327</td>
<td>12:30</td>
<td><input type="checkbox" /></td>
</tr>
<tr>
<td>Task 5</td>
<td>9865232</td>
<td>5:45</td>
<td><input type="checkbox" /></td>
</tr>
</table>
</div>
<!-- /.box-body -->
</div>
</section>
<!-- /.Left col -->
<!-- right col (We are only adding the ID to make the widgets sortable)-->
<section class="col-lg-3 connectedSortable">
<div class="box box-primary">
<div class="box-body no-padding">
<!-- THE CALENDAR -->
<div id="piechart" class="piagoal"></div>
<h4 class="goalpercentage">Goal%</h4>
<h2 class="goalpercentage">37%</h2>
</div>
<!-- /.box-body -->
</div>
</section>
<!-- right col -->
<section class="col-lg-12 connectedSortable op">
<div class="box">
<div class="box-header">
<h3 class="box-title">Assigned Operation</h3>
<select class="form-control select2 op_filter">
<option>On Agreement</option>
<option>Agreement</option>
</select>
</div>
<!-- /.box-header -->
<div class="box-body table-responsive no-padding">
<table id="example2" class="table table-bordered table-hover">
<thead>
<tr>
<th>Operation</th>
<th>ID</th>
<th>Name</th>
<th>Status</th>
<th>Action</th>
<th>More</th>
</tr>
</thead>
<tbody>
<tr>
<td>985652321</td>
<td>123654789</td>
<td>abc</td>
<td>On Agreement</td>
<td>Contact</td>
<td><a href="#">Details</a></td>
</tr>
<tr>
<td>985652321</td>
<td>123654789</td>
<td>abc</td>
<td>On Agreement</td>
<td>Contact</td>
<td><a href="#">Details</a></td>
</tr>
<tr>
<td>985652321</td>
<td>123654789</td>
<td>abc</td>
<td>On Agreement</td>
<td>Contact</td>
<td><a href="#">Details</a></td>
</tr>
<tr>
<td>985652321</td>
<td>123654789</td>
<td>abc</td>
<td>On Agreement</td>
<td>Contact</td>
<td><a href="#">Details</a></td>
</tr>
<tr>
<td>985652321</td>
<td>123654789</td>
<td>abc</td>
<td>On Agreement</td>
<td>Contact</td>
<td><a href="#">Details</a></td>
</tr>
<tr>
<td>985652321</td>
<td>123654789</td>
<td>abc</td>
<td>On Agreement</td>
<td>Contact</td>
<td><a href="#">Details</a></td>
</tr>
<tr>
<td>985652321</td>
<td>123654789</td>
<td>abc</td>
<td>On Agreement</td>
<td>Contact</td>
<td><a href="#">Details</a></td>
</tr>
<tr>
<td>985652321</td>
<td>123654789</td>
<td>abc</td>
<td>On Agreement</td>
<td>Contact</td>
<td><a href="#">Details</a></td>
</tr>
</tbody>
<tfoot>
<tr>
<th>Operation</th>
<th>ID</th>
<th>Name</th>
<th>Status</th>
<th>Action</th>
<th>More</th>
</tr>
</tfoot>
</table>
</div>
<!-- /.box-body -->
</div>
</section>
</div>
<!-- /.row (main row) -->
</section>
<!-- /.content -->
</div>
<!-- /.content-wrapper -->
<footer class="main-footer">
<div class="pull-right hidden-xs">
<b>Version</b> 2.3.2
</div>
<strong>Copyright © 2014-2015 <a href="http://almsaeedstudio.com">Almsaeed Studio</a>.</strong> All rights
reserved.
</footer>
<!-- Control Sidebar -->
<aside class="control-sidebar control-sidebar-dark">
<!-- Create the tabs -->
<ul class="nav nav-tabs nav-justified control-sidebar-tabs">
<li><a href="#control-sidebar-home-tab" data-toggle="tab"><i class="fa fa-home"></i></a></li>
<li><a href="#control-sidebar-settings-tab" data-toggle="tab"><i class="fa fa-gears"></i></a></li>
</ul>
<!-- Tab panes -->
<div class="tab-content">
<!-- Home tab content -->
<div class="tab-pane" id="control-sidebar-home-tab">
<h3 class="control-sidebar-heading">Recent Activity</h3>
<ul class="control-sidebar-menu">
<li>
<a href="javascript::;">
<i class="menu-icon fa fa-birthday-cake bg-red"></i>
<div class="menu-info">
<h4 class="control-sidebar-subheading">Langdon's Birthday</h4>
<p>Will be 23 on April 24th</p>
</div>
</a>
</li>
<li>
<a href="javascript::;">
<i class="menu-icon fa fa-user bg-yellow"></i>
<div class="menu-info">
<h4 class="control-sidebar-subheading">Frodo Updated His Profile</h4>
<p>New phone +1(800)555-1234</p>
</div>
</a>
</li>
<li>
<a href="javascript::;">
<i class="menu-icon fa fa-envelope-o bg-light-blue"></i>
<div class="menu-info">
<h4 class="control-sidebar-subheading">Nora Joined Mailing List</h4>
<p>nora@example.com</p>
</div>
</a>
</li>
<li>
<a href="javascript::;">
<i class="menu-icon fa fa-file-code-o bg-green"></i>
<div class="menu-info">
<h4 class="control-sidebar-subheading">Cron Job 254 Executed</h4>
<p>Execution time 5 seconds</p>
</div>
</a>
</li>
</ul>
<!-- /.control-sidebar-menu -->
<h3 class="control-sidebar-heading">Tasks Progress</h3>
<ul class="control-sidebar-menu">
<li>
<a href="javascript::;">
<h4 class="control-sidebar-subheading">
Custom Template Design
<span class="label label-danger pull-right">70%</span>
</h4>
<div class="progress progress-xxs">
<div class="progress-bar progress-bar-danger" style="width: 70%"></div>
</div>
</a>
</li>
<li>
<a href="javascript::;">
<h4 class="control-sidebar-subheading">
Update Resume
<span class="label label-success pull-right">95%</span>
</h4>
<div class="progress progress-xxs">
<div class="progress-bar progress-bar-success" style="width: 95%"></div>
</div>
</a>
</li>
<li>
<a href="javascript::;">
<h4 class="control-sidebar-subheading">
Laravel Integration
<span class="label label-warning pull-right">50%</span>
</h4>
<div class="progress progress-xxs">
<div class="progress-bar progress-bar-warning" style="width: 50%"></div>
</div>
</a>
</li>
<li>
<a href="javascript::;">
<h4 class="control-sidebar-subheading">
Back End Framework
<span class="label label-primary pull-right">68%</span>
</h4>
<div class="progress progress-xxs">
<div class="progress-bar progress-bar-primary" style="width: 68%"></div>
</div>
</a>
</li>
</ul>
<!-- /.control-sidebar-menu -->
</div>
<!-- /.tab-pane -->
<!-- Stats tab content -->
<div class="tab-pane" id="control-sidebar-stats-tab">Stats Tab Content</div>
<!-- /.tab-pane -->
<!-- Settings tab content -->
<div class="tab-pane" id="control-sidebar-settings-tab">
<form method="post">
<h3 class="control-sidebar-heading">General Settings</h3>
<div class="form-group">
<label class="control-sidebar-subheading">
Report panel usage
<input type="checkbox" class="pull-right" checked>
</label>
<p>
Some information about this general settings option
</p>
</div>
<!-- /.form-group -->
<div class="form-group">
<label class="control-sidebar-subheading">
Allow mail redirect
<input type="checkbox" class="pull-right" checked>
</label>
<p>
Other sets of options are available
</p>
</div>
<!-- /.form-group -->
<div class="form-group">
<label class="control-sidebar-subheading">
Expose author name in posts
<input type="checkbox" class="pull-right" checked>
</label>
<p>
Allow the user to show his name in blog posts
</p>
</div>
<!-- /.form-group -->
<h3 class="control-sidebar-heading">Chat Settings</h3>
<div class="form-group">
<label class="control-sidebar-subheading">
Show me as online
<input type="checkbox" class="pull-right" checked>
</label>
</div>
<!-- /.form-group -->
<div class="form-group">
<label class="control-sidebar-subheading">
Turn off notifications
<input type="checkbox" class="pull-right">
</label>
</div>
<!-- /.form-group -->
<div class="form-group">
<label class="control-sidebar-subheading">
Delete chat history
<a href="javascript::;" class="text-red pull-right"><i class="fa fa-trash-o"></i></a>
</label>
</div>
<!-- /.form-group -->
</form>
</div>
<!-- /.tab-pane -->
</div>
</aside>
<!-- /.control-sidebar -->
<!-- Add the sidebar's background. This div must be placed
immediately after the control sidebar -->
<div class="control-sidebar-bg"></div>
</div>
<!-- ./wrapper -->
<!-- jQuery 2.2.0 -->
<script src="plugins/jQuery/jQuery-2.2.0.min.js"></script>
<!-- jQuery UI 1.11.4 -->
<script src="https://code.jquery.com/ui/1.11.4/jquery-ui.min.js"></script>
<!-- Resolve conflict in jQuery UI tooltip with Bootstrap tooltip -->
<script>
$.widget.bridge('uibutton', $.ui.button);
</script>
<!-- Bootstrap 3.3.5 -->
<script src="bootstrap/js/bootstrap.min.js"></script>
<!-- Morris.js charts -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/raphael/2.1.0/raphael-min.js"></script>
<script src="plugins/morris/morris.min.js"></script>
<!-- Sparkline -->
<script src="plugins/sparkline/jquery.sparkline.min.js"></script>
<!-- jvectormap -->
<script src="plugins/jvectormap/jquery-jvectormap-1.2.2.min.js"></script>
<script src="plugins/jvectormap/jquery-jvectormap-world-mill-en.js"></script>
<!-- jQuery Knob Chart -->
<script src="plugins/knob/jquery.knob.js"></script>
<!-- daterangepicker -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.11.2/moment.min.js"></script>
<script src="plugins/daterangepicker/daterangepicker.js"></script>
<!-- datepicker -->
<script src="plugins/datepicker/bootstrap-datepicker.js"></script>
<!-- Bootstrap WYSIHTML5 -->
<script src="plugins/bootstrap-wysihtml5/bootstrap3-wysihtml5.all.min.js"></script>
<!-- Slimscroll -->
<script src="plugins/slimScroll/jquery.slimscroll.min.js"></script>
<!-- FastClick -->
<script src="plugins/fastclick/fastclick.js"></script>
<!-- AdminLTE App -->
<script src="dist/js/app.min.js"></script>
<!-- AdminLTE dashboard demo (This is only for demo purposes) -->
<script src="dist/js/pages/dashboard.js"></script>
<script src="dist/js/pages/piagoal.js"></script>
<!-- AdminLTE for demo purposes -->
<script src="dist/js/demo.js"></script>
<script src="dist/js/canvasjs.min.js"></script>
<script src="plugins/fullcalendar/fullcalendar.min.js"></script>
<!-- Page specific script -->
<script>
$(function () {
/* initialize the calendar
-----------------------------------------------------------------*/
//Date for the calendar events (dummy data)
var date = new Date();
var d = date.getDate(),
m = date.getMonth(),
y = date.getFullYear();
$('#calendar').fullCalendar({
header: {
left: 'prev,next today',
center: 'title',
right: 'month,agendaWeek,agendaDay'
},
buttonText: {
today: 'today',
month: 'month',
week: 'week',
day: 'day'
},
//Random default events
events: [
{
title: 'All Day Event',
start: new Date(y, m, 1),
backgroundColor: "#f56954", //red
borderColor: "#f56954" //red
},
{
title: 'Long Event',
start: new Date(y, m, d - 5),
end: new Date(y, m, d - 2),
backgroundColor: "#f39c12", //yellow
borderColor: "#f39c12" //yellow
},
{
title: 'Meeting',
start: new Date(y, m, d, 10, 30),
allDay: false,
backgroundColor: "#0073b7", //Blue
borderColor: "#0073b7" //Blue
},
{
title: 'Lunch',
start: new Date(y, m, d, 12, 0),
end: new Date(y, m, d, 14, 0),
allDay: false,
backgroundColor: "#00c0ef", //Info (aqua)
borderColor: "#00c0ef" //Info (aqua)
},
{
title: 'Birthday Party',
start: new Date(y, m, d + 1, 19, 0),
end: new Date(y, m, d + 1, 22, 30),
allDay: false,
backgroundColor: "#00a65a", //Success (green)
borderColor: "#00a65a" //Success (green)
},
{
title: 'Click for Google',
start: new Date(y, m, 28),
end: new Date(y, m, 29),
url: 'http://google.com/',
backgroundColor: "#3c8dbc", //Primary (light-blue)
borderColor: "#3c8dbc" //Primary (light-blue)
}
],
editable: true,
droppable: true, // this allows things to be dropped onto the calendar !!!
drop: function (date, allDay) { // this function is called when something is dropped
// retrieve the dropped element's stored Event Object
var originalEventObject = $(this).data('eventObject');
// we need to copy it, so that multiple events don't have a reference to the same object
var copiedEventObject = $.extend({}, originalEventObject);
// assign it the date that was reported
copiedEventObject.start = date;
copiedEventObject.allDay = allDay;
copiedEventObject.backgroundColor = $(this).css("background-color");
copiedEventObject.borderColor = $(this).css("border-color");
// render the event on the calendar
// the last `true` argument determines if the event "sticks" (http://arshaw.com/fullcalendar/docs/event_rendering/renderEvent/)
$('#calendar').fullCalendar('renderEvent', copiedEventObject, true);
// is the "remove after drop" checkbox checked?
if ($('#drop-remove').is(':checked')) {
// if so, remove the element from the "Draggable Events" list
$(this).remove();
}
}
});
</script>
<script>
$(function () {
$('#example2').DataTable({
"paging": true,
"lengthChange": false,
"searching": false,
"ordering": true,
"info": true,
"autoWidth": false
});
});
</script>
</body>
</html>
| {
"content_hash": "a178b7ecdd42d8c6d5a4552d75ce9c32",
"timestamp": "",
"source": "github",
"line_count": 1039,
"max_line_length": 165,
"avg_line_length": 38.60731472569778,
"alnum_prop": 0.46174556876822975,
"repo_name": "jalvaradoec/Poligresa-3.0",
"id": "7cfea796e4d2ee302c844b26f8ccd9b8e1a00b17",
"size": "40113",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "backup/Operator.html",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "703867"
},
{
"name": "HTML",
"bytes": "4510535"
},
{
"name": "JavaScript",
"bytes": "2907789"
},
{
"name": "PHP",
"bytes": "860316"
}
],
"symlink_target": ""
} |
namespace arangodb {
namespace velocypack {
class AttributeTranslator;
class Dumper;
struct Options;
class Slice;
struct AttributeExcludeHandler {
virtual ~AttributeExcludeHandler() {}
virtual bool shouldExclude(Slice const& key, int nesting) = 0;
};
struct CustomTypeHandler {
virtual ~CustomTypeHandler() {}
virtual void dump(Slice const&, Dumper*, Slice const&) {
throw Exception(Exception::NotImplemented);
}
virtual std::string toString(Slice const&, Options const*, Slice const&) {
throw Exception(Exception::NotImplemented);
}
};
struct Options {
enum UnsupportedTypeBehavior {
NullifyUnsupportedType,
ConvertUnsupportedType,
FailOnUnsupportedType
};
Options() {}
// Dumper behavior when a VPack value is serialized to JSON that
// has no JSON equivalent
UnsupportedTypeBehavior unsupportedTypeBehavior = FailOnUnsupportedType;
// callback for excluding attributes from being built by the Parser
AttributeExcludeHandler* attributeExcludeHandler = nullptr;
AttributeTranslator* attributeTranslator = nullptr;
// custom type handler used for processing custom types by Dumper and Slicer
CustomTypeHandler* customTypeHandler = nullptr;
// allow building Arrays without index table?
bool buildUnindexedArrays = false;
// allow building Objects without index table?
bool buildUnindexedObjects = false;
// pretty-print JSON output when dumping with Dumper
bool prettyPrint = false;
// keep top-level object/array open when building objects with the Parser
bool keepTopLevelOpen = false;
// clear builder before starting to parse in Parser
bool clearBuilderBeforeParse = true;
// validate UTF-8 strings when JSON-parsing with Parser
bool validateUtf8Strings = false;
// validate that attribute names in Object values are actually
// unique when creating objects via Builder. This also includes
// creation of Object values via a Parser
bool checkAttributeUniqueness = false;
// escape forward slashes when serializing VPack values into
// JSON with a Dumper
bool escapeForwardSlashes = false;
// escape multi-byte Unicode characters when dumping them to JSON
// with a Dumper (creates \uxxxx sequences)
bool escapeUnicode = false;
// disallow using type External (to prevent injection of arbitrary pointer
// values as a security precaution)
bool disallowExternals = false;
// default options with the above settings
static Options Defaults;
};
} // namespace arangodb::velocypack
} // namespace arangodb
#endif
| {
"content_hash": "c2f11115687ffcc4534233116e63ed4a",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 78,
"avg_line_length": 28.555555555555557,
"alnum_prop": 0.7517509727626459,
"repo_name": "baslr/ArangoDB",
"id": "58d7e73e1a1a50a51a4e8bcff1c4b737b25556de",
"size": "3778",
"binary": false,
"copies": "4",
"ref": "refs/heads/3.1-silent",
"path": "3rdParty/velocypack/include/velocypack/Options.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Ada",
"bytes": "89080"
},
{
"name": "Assembly",
"bytes": "391227"
},
{
"name": "Awk",
"bytes": "4272"
},
{
"name": "Batchfile",
"bytes": "62892"
},
{
"name": "C",
"bytes": "7932707"
},
{
"name": "C#",
"bytes": "96430"
},
{
"name": "C++",
"bytes": "284363933"
},
{
"name": "CLIPS",
"bytes": "5291"
},
{
"name": "CMake",
"bytes": "681903"
},
{
"name": "CSS",
"bytes": "1036656"
},
{
"name": "CWeb",
"bytes": "174166"
},
{
"name": "Cuda",
"bytes": "52444"
},
{
"name": "DIGITAL Command Language",
"bytes": "259402"
},
{
"name": "Emacs Lisp",
"bytes": "14637"
},
{
"name": "Fortran",
"bytes": "1856"
},
{
"name": "Groovy",
"bytes": "131"
},
{
"name": "HTML",
"bytes": "2318016"
},
{
"name": "Java",
"bytes": "2325801"
},
{
"name": "JavaScript",
"bytes": "67878359"
},
{
"name": "LLVM",
"bytes": "24129"
},
{
"name": "Lex",
"bytes": "1231"
},
{
"name": "Lua",
"bytes": "16189"
},
{
"name": "M4",
"bytes": "600550"
},
{
"name": "Makefile",
"bytes": "509612"
},
{
"name": "Max",
"bytes": "36857"
},
{
"name": "Module Management System",
"bytes": "1545"
},
{
"name": "NSIS",
"bytes": "28404"
},
{
"name": "Objective-C",
"bytes": "19321"
},
{
"name": "Objective-C++",
"bytes": "2503"
},
{
"name": "PHP",
"bytes": "98503"
},
{
"name": "Pascal",
"bytes": "145688"
},
{
"name": "Perl",
"bytes": "720157"
},
{
"name": "Perl 6",
"bytes": "9918"
},
{
"name": "Python",
"bytes": "5859911"
},
{
"name": "QMake",
"bytes": "16692"
},
{
"name": "R",
"bytes": "5123"
},
{
"name": "Rebol",
"bytes": "354"
},
{
"name": "Roff",
"bytes": "1010686"
},
{
"name": "Ruby",
"bytes": "922159"
},
{
"name": "SAS",
"bytes": "1847"
},
{
"name": "Scheme",
"bytes": "10604"
},
{
"name": "Shell",
"bytes": "511077"
},
{
"name": "Swift",
"bytes": "116"
},
{
"name": "Tcl",
"bytes": "1172"
},
{
"name": "TeX",
"bytes": "32117"
},
{
"name": "Vim script",
"bytes": "4075"
},
{
"name": "Visual Basic",
"bytes": "11568"
},
{
"name": "XSLT",
"bytes": "551977"
},
{
"name": "Yacc",
"bytes": "53005"
}
],
"symlink_target": ""
} |
<?php
namespace App\Http\Middleware;
use Closure;
use Illuminate\Support\Facades\Auth;
use Validator;
use App\Models\Mirror;
use Hash;
class AuthenticateMirror
{
/**
* Handle an incoming request.
*
* @param \Illuminate\Http\Request $request
* @param \Closure $next
* @param string|null $guard
* @return mixed
*/
public function handle($request, Closure $next, $guard = null)
{
$validator = Validator::make($request->all(), [
'id' => 'required|exists:mirrors',
'password' => 'required',
]);
$mirror = Mirror::findOrFail($request->id);
$validator->after(function ($validator) use($mirror, $request) {
if(!Hash::check($request->password, $mirror->authentication_code)) {
$validator->errors()->add('password', 'Invalid Authentiation code');
}
});
if ($validator->fails()) {
abort(403, $validator->errors()->first());
}
return $next($request);
}
}
| {
"content_hash": "d87aa2ba2a880fef27c055e007e5e446",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 84,
"avg_line_length": 24.928571428571427,
"alnum_prop": 0.5635148042024832,
"repo_name": "jlndk/smart-mirror",
"id": "b353a547b429595156632cfbb3290207e490ca0c",
"size": "1047",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/Http/Middleware/AuthenticateMirror.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "553"
},
{
"name": "HTML",
"bytes": "27841"
},
{
"name": "PHP",
"bytes": "80210"
},
{
"name": "Vue",
"bytes": "563"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.